Advertisement
AbdealiJK

RCNN Error

Mar 31st, 2015
282
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 14.66 KB | None | 0 0
  1. WARNING: Logging before InitGoogleLogging() is written to STDERR
  2. E0331 20:43:48.882966 3129 upgrade_proto.cpp:618] Attempting to upgrade input file specified using deprecated V1LayerParameter: ./model-defs/rcnn_batch_256_output_fc7.prototxt
  3. I0331 20:43:48.883144 3129 upgrade_proto.cpp:626] Successfully upgraded file specified using deprecated V1LayerParameter
  4. I0331 20:43:48.883358 3129 net.cpp:42] Initializing net from parameters:
  5. input: "data"
  6. input_dim: 256
  7. input_dim: 3
  8. input_dim: 227
  9. input_dim: 227
  10. state {
  11. phase: TEST
  12. }
  13. layer {
  14. name: "conv1"
  15. type: "Convolution"
  16. bottom: "data"
  17. top: "conv1"
  18. param {
  19. lr_mult: 1
  20. decay_mult: 1
  21. }
  22. param {
  23. lr_mult: 2
  24. decay_mult: 0
  25. }
  26. convolution_param {
  27. num_output: 96
  28. kernel_size: 11
  29. stride: 4
  30. weight_filler {
  31. type: "gaussian"
  32. std: 0.01
  33. }
  34. bias_filler {
  35. type: "constant"
  36. value: 0
  37. }
  38. }
  39. }
  40. layer {
  41. name: "relu1"
  42. type: "ReLU"
  43. bottom: "conv1"
  44. top: "conv1"
  45. }
  46. layer {
  47. name: "pool1"
  48. type: "Pooling"
  49. bottom: "conv1"
  50. top: "pool1"
  51. pooling_param {
  52. pool: MAX
  53. kernel_size: 3
  54. stride: 2
  55. }
  56. }
  57. layer {
  58. name: "norm1"
  59. type: "LRN"
  60. bottom: "pool1"
  61. top: "norm1"
  62. lrn_param {
  63. local_size: 5
  64. alpha: 0.0001
  65. beta: 0.75
  66. }
  67. }
  68. layer {
  69. name: "conv2"
  70. type: "Convolution"
  71. bottom: "norm1"
  72. top: "conv2"
  73. param {
  74. lr_mult: 1
  75. decay_mult: 1
  76. }
  77. param {
  78. lr_mult: 2
  79. decay_mult: 0
  80. }
  81. convolution_param {
  82. num_output: 256
  83. pad: 2
  84. kernel_size: 5
  85. group: 2
  86. weight_filler {
  87. type: "gaussian"
  88. std: 0.01
  89. }
  90. bias_filler {
  91. type: "constant"
  92. value: 1
  93. }
  94. }
  95. }
  96. layer {
  97. name: "relu2"
  98. type: "ReLU"
  99. bottom: "conv2"
  100. top: "conv2"
  101. }
  102. layer {
  103. name: "pool2"
  104. type: "Pooling"
  105. bottom: "conv2"
  106. top: "pool2"
  107. pooling_param {
  108. pool: MAX
  109. kernel_size: 3
  110. stride: 2
  111. }
  112. }
  113. layer {
  114. name: "norm2"
  115. type: "LRN"
  116. bottom: "pool2"
  117. top: "norm2"
  118. lrn_param {
  119. local_size: 5
  120. alpha: 0.0001
  121. beta: 0.75
  122. }
  123. }
  124. layer {
  125. name: "conv3"
  126. type: "Convolution"
  127. bottom: "norm2"
  128. top: "conv3"
  129. param {
  130. lr_mult: 1
  131. decay_mult: 1
  132. }
  133. param {
  134. lr_mult: 2
  135. decay_mult: 0
  136. }
  137. convolution_param {
  138. num_output: 384
  139. pad: 1
  140. kernel_size: 3
  141. weight_filler {
  142. type: "gaussian"
  143. std: 0.01
  144. }
  145. bias_filler {
  146. type: "constant"
  147. value: 0
  148. }
  149. }
  150. }
  151. layer {
  152. name: "relu3"
  153. type: "ReLU"
  154. bottom: "conv3"
  155. top: "conv3"
  156. }
  157. layer {
  158. name: "conv4"
  159. type: "Convolution"
  160. bottom: "conv3"
  161. top: "conv4"
  162. param {
  163. lr_mult: 1
  164. decay_mult: 1
  165. }
  166. param {
  167. lr_mult: 2
  168. decay_mult: 0
  169. }
  170. convolution_param {
  171. num_output: 384
  172. pad: 1
  173. kernel_size: 3
  174. group: 2
  175. weight_filler {
  176. type: "gaussian"
  177. std: 0.01
  178. }
  179. bias_filler {
  180. type: "constant"
  181. value: 1
  182. }
  183. }
  184. }
  185. layer {
  186. name: "relu4"
  187. type: "ReLU"
  188. bottom: "conv4"
  189. top: "conv4"
  190. }
  191. layer {
  192. name: "conv5"
  193. type: "Convolution"
  194. bottom: "conv4"
  195. top: "conv5"
  196. param {
  197. lr_mult: 1
  198. decay_mult: 1
  199. }
  200. param {
  201. lr_mult: 2
  202. decay_mult: 0
  203. }
  204. convolution_param {
  205. num_output: 256
  206. pad: 1
  207. kernel_size: 3
  208. group: 2
  209. weight_filler {
  210. type: "gaussian"
  211. std: 0.01
  212. }
  213. bias_filler {
  214. type: "constant"
  215. value: 1
  216. }
  217. }
  218. }
  219. layer {
  220. name: "relu5"
  221. type: "ReLU"
  222. bottom: "conv5"
  223. top: "conv5"
  224. }
  225. layer {
  226. name: "pool5"
  227. type: "Pooling"
  228. bottom: "conv5"
  229. top: "pool5"
  230. pooling_param {
  231. pool: MAX
  232. kernel_size: 3
  233. stride: 2
  234. }
  235. }
  236. layer {
  237. name: "fc6"
  238. type: "InnerProduct"
  239. bottom: "pool5"
  240. top: "fc6"
  241. param {
  242. lr_mult: 1
  243. decay_mult: 1
  244. }
  245. param {
  246. lr_mult: 2
  247. decay_mult: 0
  248. }
  249. inner_product_param {
  250. num_output: 4096
  251. weight_filler {
  252. type: "gaussian"
  253. std: 0.005
  254. }
  255. bias_filler {
  256. type: "constant"
  257. value: 1
  258. }
  259. }
  260. }
  261. layer {
  262. name: "relu6"
  263. type: "ReLU"
  264. bottom: "fc6"
  265. top: "fc6"
  266. }
  267. layer {
  268. name: "fc7"
  269. type: "InnerProduct"
  270. bottom: "fc6"
  271. top: "fc7"
  272. param {
  273. lr_mult: 1
  274. decay_mult: 1
  275. }
  276. param {
  277. lr_mult: 2
  278. decay_mult: 0
  279. }
  280. inner_product_param {
  281. num_output: 4096
  282. weight_filler {
  283. type: "gaussian"
  284. std: 0.005
  285. }
  286. bias_filler {
  287. type: "constant"
  288. value: 1
  289. }
  290. }
  291. }
  292. layer {
  293. name: "relu7"
  294. type: "ReLU"
  295. bottom: "fc7"
  296. top: "fc7"
  297. }
  298. I0331 20:43:48.884116 3129 net.cpp:340] Input 0 -> data
  299. I0331 20:43:48.891572 3129 layer_factory.hpp:74] Creating layer conv1
  300. I0331 20:43:48.891630 3129 net.cpp:84] Creating Layer conv1
  301. I0331 20:43:48.891639 3129 net.cpp:380] conv1 <- data
  302. I0331 20:43:48.891652 3129 net.cpp:338] conv1 -> conv1
  303. I0331 20:43:48.891667 3129 net.cpp:113] Setting up conv1
  304. I0331 20:43:48.910981 3129 net.cpp:120] Top shape: 256 96 55 55 (74342400)
  305. I0331 20:43:48.911026 3129 layer_factory.hpp:74] Creating layer relu1
  306. I0331 20:43:48.911041 3129 net.cpp:84] Creating Layer relu1
  307. I0331 20:43:48.911051 3129 net.cpp:380] relu1 <- conv1
  308. I0331 20:43:48.911062 3129 net.cpp:327] relu1 -> conv1 (in-place)
  309. I0331 20:43:48.911073 3129 net.cpp:113] Setting up relu1
  310. I0331 20:43:48.911084 3129 net.cpp:120] Top shape: 256 96 55 55 (74342400)
  311. I0331 20:43:48.911093 3129 layer_factory.hpp:74] Creating layer pool1
  312. I0331 20:43:48.911104 3129 net.cpp:84] Creating Layer pool1
  313. I0331 20:43:48.911113 3129 net.cpp:380] pool1 <- conv1
  314. I0331 20:43:48.911121 3129 net.cpp:338] pool1 -> pool1
  315. I0331 20:43:48.911133 3129 net.cpp:113] Setting up pool1
  316. I0331 20:43:48.911149 3129 net.cpp:120] Top shape: 256 96 27 27 (17915904)
  317. I0331 20:43:48.911157 3129 layer_factory.hpp:74] Creating layer norm1
  318. I0331 20:43:48.911684 3129 net.cpp:84] Creating Layer norm1
  319. I0331 20:43:48.911717 3129 net.cpp:380] norm1 <- pool1
  320. I0331 20:43:48.911736 3129 net.cpp:338] norm1 -> norm1
  321. I0331 20:43:48.911757 3129 net.cpp:113] Setting up norm1
  322. I0331 20:43:48.911787 3129 net.cpp:120] Top shape: 256 96 27 27 (17915904)
  323. I0331 20:43:48.911806 3129 layer_factory.hpp:74] Creating layer conv2
  324. I0331 20:43:48.911836 3129 net.cpp:84] Creating Layer conv2
  325. I0331 20:43:48.911852 3129 net.cpp:380] conv2 <- norm1
  326. I0331 20:43:48.911870 3129 net.cpp:338] conv2 -> conv2
  327. I0331 20:43:48.911905 3129 net.cpp:113] Setting up conv2
  328. I0331 20:43:48.930404 3129 net.cpp:120] Top shape: 256 256 27 27 (47775744)
  329. I0331 20:43:48.930447 3129 layer_factory.hpp:74] Creating layer relu2
  330. I0331 20:43:48.930462 3129 net.cpp:84] Creating Layer relu2
  331. I0331 20:43:48.930472 3129 net.cpp:380] relu2 <- conv2
  332. I0331 20:43:48.930482 3129 net.cpp:327] relu2 -> conv2 (in-place)
  333. I0331 20:43:48.930493 3129 net.cpp:113] Setting up relu2
  334. I0331 20:43:48.930508 3129 net.cpp:120] Top shape: 256 256 27 27 (47775744)
  335. I0331 20:43:48.930517 3129 layer_factory.hpp:74] Creating layer pool2
  336. I0331 20:43:48.930527 3129 net.cpp:84] Creating Layer pool2
  337. I0331 20:43:48.930536 3129 net.cpp:380] pool2 <- conv2
  338. I0331 20:43:48.930544 3129 net.cpp:338] pool2 -> pool2
  339. I0331 20:43:48.930554 3129 net.cpp:113] Setting up pool2
  340. I0331 20:43:48.930568 3129 net.cpp:120] Top shape: 256 256 13 13 (11075584)
  341. I0331 20:43:48.930579 3129 layer_factory.hpp:74] Creating layer norm2
  342. I0331 20:43:48.930590 3129 net.cpp:84] Creating Layer norm2
  343. I0331 20:43:48.930600 3129 net.cpp:380] norm2 <- pool2
  344. I0331 20:43:48.930609 3129 net.cpp:338] norm2 -> norm2
  345. I0331 20:43:48.930619 3129 net.cpp:113] Setting up norm2
  346. I0331 20:43:48.930630 3129 net.cpp:120] Top shape: 256 256 13 13 (11075584)
  347. I0331 20:43:48.930639 3129 layer_factory.hpp:74] Creating layer conv3
  348. I0331 20:43:48.930651 3129 net.cpp:84] Creating Layer conv3
  349. I0331 20:43:48.930660 3129 net.cpp:380] conv3 <- norm2
  350. I0331 20:43:48.930670 3129 net.cpp:338] conv3 -> conv3
  351. I0331 20:43:48.930680 3129 net.cpp:113] Setting up conv3
  352. I0331 20:43:48.957094 3129 net.cpp:120] Top shape: 256 384 13 13 (16613376)
  353. I0331 20:43:48.957142 3129 layer_factory.hpp:74] Creating layer relu3
  354. I0331 20:43:48.957159 3129 net.cpp:84] Creating Layer relu3
  355. I0331 20:43:48.957167 3129 net.cpp:380] relu3 <- conv3
  356. I0331 20:43:48.957190 3129 net.cpp:327] relu3 -> conv3 (in-place)
  357. I0331 20:43:48.957217 3129 net.cpp:113] Setting up relu3
  358. I0331 20:43:48.957228 3129 net.cpp:120] Top shape: 256 384 13 13 (16613376)
  359. I0331 20:43:48.957237 3129 layer_factory.hpp:74] Creating layer conv4
  360. I0331 20:43:48.957248 3129 net.cpp:84] Creating Layer conv4
  361. I0331 20:43:48.957262 3129 net.cpp:380] conv4 <- conv3
  362. I0331 20:43:48.957273 3129 net.cpp:338] conv4 -> conv4
  363. I0331 20:43:48.957289 3129 net.cpp:113] Setting up conv4
  364. I0331 20:43:48.977625 3129 net.cpp:120] Top shape: 256 384 13 13 (16613376)
  365. I0331 20:43:48.977668 3129 layer_factory.hpp:74] Creating layer relu4
  366. I0331 20:43:48.977682 3129 net.cpp:84] Creating Layer relu4
  367. I0331 20:43:48.977691 3129 net.cpp:380] relu4 <- conv4
  368. I0331 20:43:48.977701 3129 net.cpp:327] relu4 -> conv4 (in-place)
  369. I0331 20:43:48.977713 3129 net.cpp:113] Setting up relu4
  370. I0331 20:43:48.977723 3129 net.cpp:120] Top shape: 256 384 13 13 (16613376)
  371. I0331 20:43:48.977731 3129 layer_factory.hpp:74] Creating layer conv5
  372. I0331 20:43:48.977743 3129 net.cpp:84] Creating Layer conv5
  373. I0331 20:43:48.977751 3129 net.cpp:380] conv5 <- conv4
  374. I0331 20:43:48.977764 3129 net.cpp:338] conv5 -> conv5
  375. I0331 20:43:48.977777 3129 net.cpp:113] Setting up conv5
  376. I0331 20:43:48.991324 3129 net.cpp:120] Top shape: 256 256 13 13 (11075584)
  377. I0331 20:43:48.991360 3129 layer_factory.hpp:74] Creating layer relu5
  378. I0331 20:43:48.991375 3129 net.cpp:84] Creating Layer relu5
  379. I0331 20:43:48.991384 3129 net.cpp:380] relu5 <- conv5
  380. I0331 20:43:48.991395 3129 net.cpp:327] relu5 -> conv5 (in-place)
  381. I0331 20:43:48.991405 3129 net.cpp:113] Setting up relu5
  382. I0331 20:43:48.991415 3129 net.cpp:120] Top shape: 256 256 13 13 (11075584)
  383. I0331 20:43:48.991423 3129 layer_factory.hpp:74] Creating layer pool5
  384. I0331 20:43:48.991433 3129 net.cpp:84] Creating Layer pool5
  385. I0331 20:43:48.991447 3129 net.cpp:380] pool5 <- conv5
  386. I0331 20:43:48.991458 3129 net.cpp:338] pool5 -> pool5
  387. I0331 20:43:48.991471 3129 net.cpp:113] Setting up pool5
  388. I0331 20:43:48.991483 3129 net.cpp:120] Top shape: 256 256 6 6 (2359296)
  389. I0331 20:43:48.991492 3129 layer_factory.hpp:74] Creating layer fc6
  390. I0331 20:43:48.991508 3129 net.cpp:84] Creating Layer fc6
  391. I0331 20:43:48.991518 3129 net.cpp:380] fc6 <- pool5
  392. I0331 20:43:48.991529 3129 net.cpp:338] fc6 -> fc6
  393. I0331 20:43:48.991540 3129 net.cpp:113] Setting up fc6
  394. I0331 20:43:50.034123 3129 net.cpp:120] Top shape: 256 4096 (1048576)
  395. I0331 20:43:50.034180 3129 layer_factory.hpp:74] Creating layer relu6
  396. I0331 20:43:50.034198 3129 net.cpp:84] Creating Layer relu6
  397. I0331 20:43:50.034207 3129 net.cpp:380] relu6 <- fc6
  398. I0331 20:43:50.034219 3129 net.cpp:327] relu6 -> fc6 (in-place)
  399. I0331 20:43:50.034230 3129 net.cpp:113] Setting up relu6
  400. I0331 20:43:50.034241 3129 net.cpp:120] Top shape: 256 4096 (1048576)
  401. I0331 20:43:50.034250 3129 layer_factory.hpp:74] Creating layer fc7
  402. I0331 20:43:50.034265 3129 net.cpp:84] Creating Layer fc7
  403. I0331 20:43:50.034278 3129 net.cpp:380] fc7 <- fc6
  404. I0331 20:43:50.034288 3129 net.cpp:338] fc7 -> fc7
  405. I0331 20:43:50.034299 3129 net.cpp:113] Setting up fc7
  406. I0331 20:43:50.497591 3129 net.cpp:120] Top shape: 256 4096 (1048576)
  407. I0331 20:43:50.497642 3129 layer_factory.hpp:74] Creating layer relu7
  408. I0331 20:43:50.497656 3129 net.cpp:84] Creating Layer relu7
  409. I0331 20:43:50.497666 3129 net.cpp:380] relu7 <- fc7
  410. I0331 20:43:50.497680 3129 net.cpp:327] relu7 -> fc7 (in-place)
  411. I0331 20:43:50.497692 3129 net.cpp:113] Setting up relu7
  412. I0331 20:43:50.497704 3129 net.cpp:120] Top shape: 256 4096 (1048576)
  413. I0331 20:43:50.497711 3129 net.cpp:169] relu7 does not need backward computation.
  414. I0331 20:43:50.497720 3129 net.cpp:169] fc7 does not need backward computation.
  415. I0331 20:43:50.497727 3129 net.cpp:169] relu6 does not need backward computation.
  416. I0331 20:43:50.497735 3129 net.cpp:169] fc6 does not need backward computation.
  417. I0331 20:43:50.497743 3129 net.cpp:169] pool5 does not need backward computation.
  418. I0331 20:43:50.497751 3129 net.cpp:169] relu5 does not need backward computation.
  419. I0331 20:43:50.497758 3129 net.cpp:169] conv5 does not need backward computation.
  420. I0331 20:43:50.497766 3129 net.cpp:169] relu4 does not need backward computation.
  421. I0331 20:43:50.497774 3129 net.cpp:169] conv4 does not need backward computation.
  422. I0331 20:43:50.497782 3129 net.cpp:169] relu3 does not need backward computation.
  423. I0331 20:43:50.497791 3129 net.cpp:169] conv3 does not need backward computation.
  424. I0331 20:43:50.497798 3129 net.cpp:169] norm2 does not need backward computation.
  425. I0331 20:43:50.497807 3129 net.cpp:169] pool2 does not need backward computation.
  426. I0331 20:43:50.497814 3129 net.cpp:169] relu2 does not need backward computation.
  427. I0331 20:43:50.497822 3129 net.cpp:169] conv2 does not need backward computation.
  428. I0331 20:43:50.497830 3129 net.cpp:169] norm1 does not need backward computation.
  429. I0331 20:43:50.497838 3129 net.cpp:169] pool1 does not need backward computation.
  430. I0331 20:43:50.497846 3129 net.cpp:169] relu1 does not need backward computation.
  431. I0331 20:43:50.497854 3129 net.cpp:169] conv1 does not need backward computation.
  432. I0331 20:43:50.497861 3129 net.cpp:205] This network produces output fc7
  433. I0331 20:43:50.497879 3129 net.cpp:447] Collecting Learning Rate and Weight Decay.
  434. I0331 20:43:50.497895 3129 net.cpp:217] Network initialization done.
  435. I0331 20:43:50.497905 3129 net.cpp:218] Memory required for data: 1589510144
  436. E0331 20:43:53.882091 3129 upgrade_proto.cpp:609] Attempting to upgrade input file specified using deprecated transformation parameters: ./data/caffe_nets/finetune_voc_2012_train_iter_70k
  437. I0331 20:43:53.882133 3129 upgrade_proto.cpp:612] Successfully upgraded file specified using deprecated data transformation parameters.
  438. E0331 20:43:53.882143 3129 upgrade_proto.cpp:614] Note that future Caffe releases will only support transform_param messages for transformation fields.
  439. E0331 20:43:53.882150 3129 upgrade_proto.cpp:618] Attempting to upgrade input file specified using deprecated V1LayerParameter: ./data/caffe_nets/finetune_voc_2012_train_iter_70k
  440. I0331 20:43:54.077909 3129 upgrade_proto.cpp:626] Successfully upgraded file specified using deprecated V1LayerParameter
  441. F0331 20:44:24.422291 3129 syncedmem.hpp:27] Check failed: *ptr host allocation of size 44302336 failed
  442. *** Check failure stack trace: ***
  443. ./matlab: line 1: 2990 Killed LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libfreetype.so.6 matlab
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement