Guest User

Untitled

a guest
Aug 3rd, 2017
202
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 45.68 KB | None | 0 0
  1. sina@sina-Z170X-UD3:~$ cd code/caffe/
  2. sina@sina-Z170X-UD3:~/code/caffe$ ./build/tools/caffe train --solver=examples/alexnetfinetune/solver_sina.prototxt
  3. I0803 12:10:35.137758 2175 caffe.cpp:218] Using GPUs 0
  4. I0803 12:10:35.359722 2175 caffe.cpp:223] GPU 0: GeForce GTX 1070
  5. I0803 12:10:38.265421 2175 solver.cpp:44] Initializing solver from parameters:
  6. test_iter: 280
  7. test_interval: 660
  8. base_lr: 0.001
  9. display: 20
  10. max_iter: 5000
  11. lr_policy: "step"
  12. gamma: 0.1
  13. momentum: 0.9
  14. weight_decay: 0.0005
  15. stepsize: 100000
  16. snapshot: 4999
  17. snapshot_prefix: "models/bvlc_alexnet/caffe_alexnet_sinatrain"
  18. solver_mode: GPU
  19. device_id: 0
  20. net: "examples/alexnetfinetune/alexnetsade-bn.prototxt"
  21. train_state {
  22. level: 0
  23. stage: ""
  24. }
  25. type: "SGD"
  26. I0803 12:10:38.265714 2175 solver.cpp:87] Creating training net from net file: examples/alexnetfinetune/alexnetsade-bn.prototxt
  27. I0803 12:10:38.286698 2175 upgrade_proto.cpp:77] Attempting to upgrade batch norm layers using deprecated params: examples/alexnetfinetune/alexnetsade-bn.prototxt
  28. I0803 12:10:38.286759 2175 upgrade_proto.cpp:80] Successfully upgraded batch norm layers using deprecated params.
  29. I0803 12:10:38.286895 2175 net.cpp:296] The NetState phase (0) differed from the phase (1) specified by a rule in layer data
  30. I0803 12:10:38.286932 2175 net.cpp:296] The NetState phase (0) differed from the phase (1) specified by a rule in layer accuracy
  31. I0803 12:10:38.287377 2175 net.cpp:53] Initializing net from parameters:
  32. name: "AlexNet"
  33. state {
  34. phase: TRAIN
  35. level: 0
  36. stage: ""
  37. }
  38. layer {
  39. name: "data"
  40. type: "Data"
  41. top: "data"
  42. top: "label"
  43. include {
  44. phase: TRAIN
  45. }
  46. transform_param {
  47. mirror: true
  48. crop_size: 227
  49. mean_file: "examples/Mydataset_train_lmdb/mean_imagetest.binaryproto"
  50. }
  51. data_param {
  52. source: "examples/Mydataset_train_lmdb"
  53. batch_size: 10
  54. backend: LMDB
  55. }
  56. }
  57. layer {
  58. name: "conv1"
  59. type: "Convolution"
  60. bottom: "data"
  61. top: "conv1"
  62. param {
  63. lr_mult: 1
  64. }
  65. convolution_param {
  66. num_output: 64
  67. bias_term: true
  68. pad: 1
  69. kernel_size: 3
  70. stride: 1
  71. weight_filler {
  72. type: "xavier"
  73. }
  74. }
  75. }
  76. layer {
  77. name: "bn1"
  78. type: "BatchNorm"
  79. bottom: "conv1"
  80. top: "conv1"
  81. param {
  82. lr_mult: 0
  83. decay_mult: 0
  84. }
  85. param {
  86. lr_mult: 0
  87. decay_mult: 0
  88. }
  89. param {
  90. lr_mult: 0
  91. decay_mult: 0
  92. }
  93. }
  94. layer {
  95. name: "scale1"
  96. type: "Scale"
  97. bottom: "conv1"
  98. top: "conv1"
  99. scale_param {
  100. bias_term: true
  101. }
  102. }
  103. layer {
  104. name: "relu1"
  105. type: "ReLU"
  106. bottom: "conv1"
  107. top: "conv1"
  108. }
  109. layer {
  110. name: "norm1"
  111. type: "LRN"
  112. bottom: "conv1"
  113. top: "norm1"
  114. lrn_param {
  115. local_size: 5
  116. alpha: 0.0001
  117. beta: 0.75
  118. }
  119. }
  120. layer {
  121. name: "pool1"
  122. type: "Pooling"
  123. bottom: "norm1"
  124. top: "pool1"
  125. pooling_param {
  126. pool: MAX
  127. kernel_size: 3
  128. stride: 2
  129. }
  130. }
  131. layer {
  132. name: "conv2"
  133. type: "Convolution"
  134. bottom: "pool1"
  135. top: "conv2"
  136. param {
  137. lr_mult: 1
  138. decay_mult: 1
  139. }
  140. param {
  141. lr_mult: 2
  142. decay_mult: 0
  143. }
  144. convolution_param {
  145. num_output: 256
  146. pad: 2
  147. kernel_size: 5
  148. group: 2
  149. weight_filler {
  150. type: "gaussian"
  151. std: 0.01
  152. }
  153. bias_filler {
  154. type: "constant"
  155. value: 0.1
  156. }
  157. }
  158. }
  159. layer {
  160. name: "relu2"
  161. type: "ReLU"
  162. bottom: "conv2"
  163. top: "conv2"
  164. }
  165. layer {
  166. name: "norm2"
  167. type: "LRN"
  168. bottom: "conv2"
  169. top: "norm2"
  170. lrn_param {
  171. local_size: 5
  172. alpha: 0.0001
  173. beta: 0.75
  174. }
  175. }
  176. layer {
  177. name: "pool2"
  178. type: "Pooling"
  179. bottom: "norm2"
  180. top: "pool2"
  181. pooling_param {
  182. pool: MAX
  183. kernel_size: 3
  184. stride: 2
  185. }
  186. }
  187. layer {
  188. name: "conv3"
  189. type: "Convolution"
  190. bottom: "pool2"
  191. top: "conv3"
  192. param {
  193. lr_mult: 1
  194. decay_mult: 1
  195. }
  196. param {
  197. lr_mult: 2
  198. decay_mult: 0
  199. }
  200. convolution_param {
  201. num_output: 384
  202. pad: 1
  203. kernel_size: 3
  204. weight_filler {
  205. type: "gaussian"
  206. std: 0.01
  207. }
  208. bias_filler {
  209. type: "constant"
  210. value: 0
  211. }
  212. }
  213. }
  214. layer {
  215. name: "relu3"
  216. type: "ReLU"
  217. bottom: "conv3"
  218. top: "conv3"
  219. }
  220. layer {
  221. name: "conv4"
  222. type: "Convolution"
  223. bottom: "conv3"
  224. top: "conv4"
  225. param {
  226. lr_mult: 1
  227. decay_mult: 1
  228. }
  229. param {
  230. lr_mult: 2
  231. decay_mult: 0
  232. }
  233. convolution_param {
  234. num_output: 384
  235. pad: 1
  236. kernel_size: 3
  237. group: 2
  238. weight_filler {
  239. type: "gaussian"
  240. std: 0.01
  241. }
  242. bias_filler {
  243. type: "constant"
  244. value: 0.1
  245. }
  246. }
  247. }
  248. layer {
  249. name: "relu4"
  250. type: "ReLU"
  251. bottom: "conv4"
  252. top: "conv4"
  253. }
  254. layer {
  255. name: "conv5"
  256. type: "Convolution"
  257. bottom: "conv4"
  258. top: "conv5"
  259. param {
  260. lr_mult: 1
  261. decay_mult: 1
  262. }
  263. param {
  264. lr_mult: 2
  265. decay_mult: 0
  266. }
  267. convolution_param {
  268. num_output: 256
  269. pad: 1
  270. kernel_size: 3
  271. group: 2
  272. weight_filler {
  273. type: "gaussian"
  274. std: 0.01
  275. }
  276. bias_filler {
  277. type: "constant"
  278. value: 0.1
  279. }
  280. }
  281. }
  282. layer {
  283. name: "relu5"
  284. type: "ReLU"
  285. bottom: "conv5"
  286. top: "conv5"
  287. }
  288. layer {
  289. name: "pool5"
  290. type: "Pooling"
  291. bottom: "conv5"
  292. top: "pool5"
  293. pooling_param {
  294. pool: MAX
  295. kernel_size: 3
  296. stride: 2
  297. }
  298. }
  299. layer {
  300. name: "fc6"
  301. type: "InnerProduct"
  302. bottom: "pool5"
  303. top: "fc6"
  304. param {
  305. lr_mult: 1
  306. decay_mult: 1
  307. }
  308. param {
  309. lr_mult: 2
  310. decay_mult: 0
  311. }
  312. inner_product_param {
  313. num_output: 4096
  314. weight_filler {
  315. type: "gaussian"
  316. std: 0.005
  317. }
  318. bias_filler {
  319. type: "constant"
  320. value: 0.1
  321. }
  322. }
  323. }
  324. layer {
  325. name: "relu6"
  326. type: "ReLU"
  327. bottom: "fc6"
  328. top: "fc6"
  329. }
  330. layer {
  331. name: "drop6"
  332. type: "Dropout"
  333. bottom: "fc6"
  334. top: "fc6"
  335. dropout_param {
  336. dropout_ratio: 0.5
  337. }
  338. }
  339. layer {
  340. name: "fc7"
  341. type: "InnerProduct"
  342. bottom: "fc6"
  343. top: "fc7"
  344. param {
  345. lr_mult: 1
  346. decay_mult: 1
  347. }
  348. param {
  349. lr_mult: 2
  350. decay_mult: 0
  351. }
  352. inner_product_param {
  353. num_output: 4096
  354. weight_filler {
  355. type: "gaussian"
  356. std: 0.005
  357. }
  358. bias_filler {
  359. type: "constant"
  360. value: 0.1
  361. }
  362. }
  363. }
  364. layer {
  365. name: "relu7"
  366. type: "ReLU"
  367. bottom: "fc7"
  368. top: "fc7"
  369. }
  370. layer {
  371. name: "drop7"
  372. type: "Dropout"
  373. bottom: "fc7"
  374. top: "fc7"
  375. dropout_param {
  376. dropout_ratio: 0.5
  377. }
  378. }
  379. layer {
  380. name: "fc8"
  381. type: "InnerProduct"
  382. bottom: "fc7"
  383. top: "fc8"
  384. param {
  385. lr_mult: 1
  386. decay_mult: 1
  387. }
  388. param {
  389. lr_mult: 2
  390. decay_mult: 0
  391. }
  392. inner_product_param {
  393. num_output: 1000
  394. weight_filler {
  395. type: "gaussian"
  396. std: 0.01
  397. }
  398. bias_filler {
  399. type: "constant"
  400. value: 0
  401. }
  402. }
  403. }
  404. layer {
  405. name: "loss"
  406. type: "SoftmaxWithLoss"
  407. bottom: "fc8"
  408. bottom: "label"
  409. top: "loss"
  410. }
  411. I0803 12:10:38.288094 2175 layer_factory.hpp:77] Creating layer data
  412. I0803 12:10:38.463691 2175 db_lmdb.cpp:35] Opened lmdb examples/Mydataset_train_lmdb
  413. I0803 12:10:38.506412 2175 net.cpp:86] Creating Layer data
  414. I0803 12:10:38.506475 2175 net.cpp:382] data -> data
  415. I0803 12:10:38.506530 2175 net.cpp:382] data -> label
  416. I0803 12:10:38.506580 2175 data_transformer.cpp:25] Loading mean file from: examples/Mydataset_train_lmdb/mean_imagetest.binaryproto
  417. I0803 12:10:38.548476 2175 data_layer.cpp:45] output data size: 10,3,227,227
  418. I0803 12:10:38.560668 2175 net.cpp:124] Setting up data
  419. I0803 12:10:38.560693 2175 net.cpp:131] Top shape: 10 3 227 227 (1545870)
  420. I0803 12:10:38.560699 2175 net.cpp:131] Top shape: 10 (10)
  421. I0803 12:10:38.560703 2175 net.cpp:139] Memory required for data: 6183520
  422. I0803 12:10:38.560712 2175 layer_factory.hpp:77] Creating layer conv1
  423. I0803 12:10:38.560735 2175 net.cpp:86] Creating Layer conv1
  424. I0803 12:10:38.560742 2175 net.cpp:408] conv1 <- data
  425. I0803 12:10:38.560756 2175 net.cpp:382] conv1 -> conv1
  426. I0803 12:10:40.714188 2175 net.cpp:124] Setting up conv1
  427. I0803 12:10:40.714244 2175 net.cpp:131] Top shape: 10 64 227 227 (32978560)
  428. I0803 12:10:40.714278 2175 net.cpp:139] Memory required for data: 138097760
  429. I0803 12:10:40.714332 2175 layer_factory.hpp:77] Creating layer bn1
  430. I0803 12:10:40.714362 2175 net.cpp:86] Creating Layer bn1
  431. I0803 12:10:40.714376 2175 net.cpp:408] bn1 <- conv1
  432. I0803 12:10:40.714392 2175 net.cpp:369] bn1 -> conv1 (in-place)
  433. I0803 12:10:40.715009 2175 net.cpp:124] Setting up bn1
  434. I0803 12:10:40.715030 2175 net.cpp:131] Top shape: 10 64 227 227 (32978560)
  435. I0803 12:10:40.715041 2175 net.cpp:139] Memory required for data: 270012000
  436. I0803 12:10:40.715067 2175 layer_factory.hpp:77] Creating layer scale1
  437. I0803 12:10:40.715128 2175 net.cpp:86] Creating Layer scale1
  438. I0803 12:10:40.715138 2175 net.cpp:408] scale1 <- conv1
  439. I0803 12:10:40.715153 2175 net.cpp:369] scale1 -> conv1 (in-place)
  440. I0803 12:10:40.715266 2175 layer_factory.hpp:77] Creating layer scale1
  441. I0803 12:10:40.715734 2175 net.cpp:124] Setting up scale1
  442. I0803 12:10:40.715755 2175 net.cpp:131] Top shape: 10 64 227 227 (32978560)
  443. I0803 12:10:40.715765 2175 net.cpp:139] Memory required for data: 401926240
  444. I0803 12:10:40.715781 2175 layer_factory.hpp:77] Creating layer relu1
  445. I0803 12:10:40.715798 2175 net.cpp:86] Creating Layer relu1
  446. I0803 12:10:40.715809 2175 net.cpp:408] relu1 <- conv1
  447. I0803 12:10:40.715822 2175 net.cpp:369] relu1 -> conv1 (in-place)
  448. I0803 12:10:40.716305 2175 net.cpp:124] Setting up relu1
  449. I0803 12:10:40.716326 2175 net.cpp:131] Top shape: 10 64 227 227 (32978560)
  450. I0803 12:10:40.716336 2175 net.cpp:139] Memory required for data: 533840480
  451. I0803 12:10:40.716346 2175 layer_factory.hpp:77] Creating layer norm1
  452. I0803 12:10:40.716363 2175 net.cpp:86] Creating Layer norm1
  453. I0803 12:10:40.716374 2175 net.cpp:408] norm1 <- conv1
  454. I0803 12:10:40.716389 2175 net.cpp:382] norm1 -> norm1
  455. I0803 12:10:40.717486 2175 net.cpp:124] Setting up norm1
  456. I0803 12:10:40.717516 2175 net.cpp:131] Top shape: 10 64 227 227 (32978560)
  457. I0803 12:10:40.717525 2175 net.cpp:139] Memory required for data: 665754720
  458. I0803 12:10:40.717535 2175 layer_factory.hpp:77] Creating layer pool1
  459. I0803 12:10:40.717552 2175 net.cpp:86] Creating Layer pool1
  460. I0803 12:10:40.717563 2175 net.cpp:408] pool1 <- norm1
  461. I0803 12:10:40.717581 2175 net.cpp:382] pool1 -> pool1
  462. I0803 12:10:40.717699 2175 net.cpp:124] Setting up pool1
  463. I0803 12:10:40.717720 2175 net.cpp:131] Top shape: 10 64 113 113 (8172160)
  464. I0803 12:10:40.717730 2175 net.cpp:139] Memory required for data: 698443360
  465. I0803 12:10:40.717738 2175 layer_factory.hpp:77] Creating layer conv2
  466. I0803 12:10:40.717762 2175 net.cpp:86] Creating Layer conv2
  467. I0803 12:10:40.717774 2175 net.cpp:408] conv2 <- pool1
  468. I0803 12:10:40.717787 2175 net.cpp:382] conv2 -> conv2
  469. I0803 12:10:40.733384 2175 net.cpp:124] Setting up conv2
  470. I0803 12:10:40.733410 2175 net.cpp:131] Top shape: 10 256 113 113 (32688640)
  471. I0803 12:10:40.733415 2175 net.cpp:139] Memory required for data: 829197920
  472. I0803 12:10:40.733430 2175 layer_factory.hpp:77] Creating layer relu2
  473. I0803 12:10:40.733443 2175 net.cpp:86] Creating Layer relu2
  474. I0803 12:10:40.733448 2175 net.cpp:408] relu2 <- conv2
  475. I0803 12:10:40.733456 2175 net.cpp:369] relu2 -> conv2 (in-place)
  476. I0803 12:10:40.733969 2175 net.cpp:124] Setting up relu2
  477. I0803 12:10:40.733983 2175 net.cpp:131] Top shape: 10 256 113 113 (32688640)
  478. I0803 12:10:40.733985 2175 net.cpp:139] Memory required for data: 959952480
  479. I0803 12:10:40.733990 2175 layer_factory.hpp:77] Creating layer norm2
  480. I0803 12:10:40.733999 2175 net.cpp:86] Creating Layer norm2
  481. I0803 12:10:40.734002 2175 net.cpp:408] norm2 <- conv2
  482. I0803 12:10:40.734007 2175 net.cpp:382] norm2 -> norm2
  483. I0803 12:10:40.734232 2175 net.cpp:124] Setting up norm2
  484. I0803 12:10:40.734241 2175 net.cpp:131] Top shape: 10 256 113 113 (32688640)
  485. I0803 12:10:40.734244 2175 net.cpp:139] Memory required for data: 1090707040
  486. I0803 12:10:40.734248 2175 layer_factory.hpp:77] Creating layer pool2
  487. I0803 12:10:40.734269 2175 net.cpp:86] Creating Layer pool2
  488. I0803 12:10:40.734274 2175 net.cpp:408] pool2 <- norm2
  489. I0803 12:10:40.734280 2175 net.cpp:382] pool2 -> pool2
  490. I0803 12:10:40.734321 2175 net.cpp:124] Setting up pool2
  491. I0803 12:10:40.734329 2175 net.cpp:131] Top shape: 10 256 56 56 (8028160)
  492. I0803 12:10:40.734333 2175 net.cpp:139] Memory required for data: 1122819680
  493. I0803 12:10:40.734335 2175 layer_factory.hpp:77] Creating layer conv3
  494. I0803 12:10:40.734345 2175 net.cpp:86] Creating Layer conv3
  495. I0803 12:10:40.734349 2175 net.cpp:408] conv3 <- pool2
  496. I0803 12:10:40.734355 2175 net.cpp:382] conv3 -> conv3
  497. I0803 12:10:40.748950 2175 net.cpp:124] Setting up conv3
  498. I0803 12:10:40.748970 2175 net.cpp:131] Top shape: 10 384 56 56 (12042240)
  499. I0803 12:10:40.748973 2175 net.cpp:139] Memory required for data: 1170988640
  500. I0803 12:10:40.749014 2175 layer_factory.hpp:77] Creating layer relu3
  501. I0803 12:10:40.749023 2175 net.cpp:86] Creating Layer relu3
  502. I0803 12:10:40.749027 2175 net.cpp:408] relu3 <- conv3
  503. I0803 12:10:40.749034 2175 net.cpp:369] relu3 -> conv3 (in-place)
  504. I0803 12:10:40.749212 2175 net.cpp:124] Setting up relu3
  505. I0803 12:10:40.749219 2175 net.cpp:131] Top shape: 10 384 56 56 (12042240)
  506. I0803 12:10:40.749222 2175 net.cpp:139] Memory required for data: 1219157600
  507. I0803 12:10:40.749224 2175 layer_factory.hpp:77] Creating layer conv4
  508. I0803 12:10:40.749233 2175 net.cpp:86] Creating Layer conv4
  509. I0803 12:10:40.749235 2175 net.cpp:408] conv4 <- conv3
  510. I0803 12:10:40.749240 2175 net.cpp:382] conv4 -> conv4
  511. I0803 12:10:40.756110 2175 net.cpp:124] Setting up conv4
  512. I0803 12:10:40.756124 2175 net.cpp:131] Top shape: 10 384 56 56 (12042240)
  513. I0803 12:10:40.756127 2175 net.cpp:139] Memory required for data: 1267326560
  514. I0803 12:10:40.756134 2175 layer_factory.hpp:77] Creating layer relu4
  515. I0803 12:10:40.756139 2175 net.cpp:86] Creating Layer relu4
  516. I0803 12:10:40.756141 2175 net.cpp:408] relu4 <- conv4
  517. I0803 12:10:40.756145 2175 net.cpp:369] relu4 -> conv4 (in-place)
  518. I0803 12:10:40.756290 2175 net.cpp:124] Setting up relu4
  519. I0803 12:10:40.756297 2175 net.cpp:131] Top shape: 10 384 56 56 (12042240)
  520. I0803 12:10:40.756299 2175 net.cpp:139] Memory required for data: 1315495520
  521. I0803 12:10:40.756301 2175 layer_factory.hpp:77] Creating layer conv5
  522. I0803 12:10:40.756307 2175 net.cpp:86] Creating Layer conv5
  523. I0803 12:10:40.756310 2175 net.cpp:408] conv5 <- conv4
  524. I0803 12:10:40.756314 2175 net.cpp:382] conv5 -> conv5
  525. I0803 12:10:40.761378 2175 net.cpp:124] Setting up conv5
  526. I0803 12:10:40.761390 2175 net.cpp:131] Top shape: 10 256 56 56 (8028160)
  527. I0803 12:10:40.761394 2175 net.cpp:139] Memory required for data: 1347608160
  528. I0803 12:10:40.761399 2175 layer_factory.hpp:77] Creating layer relu5
  529. I0803 12:10:40.761406 2175 net.cpp:86] Creating Layer relu5
  530. I0803 12:10:40.761409 2175 net.cpp:408] relu5 <- conv5
  531. I0803 12:10:40.761414 2175 net.cpp:369] relu5 -> conv5 (in-place)
  532. I0803 12:10:40.761597 2175 net.cpp:124] Setting up relu5
  533. I0803 12:10:40.761603 2175 net.cpp:131] Top shape: 10 256 56 56 (8028160)
  534. I0803 12:10:40.761605 2175 net.cpp:139] Memory required for data: 1379720800
  535. I0803 12:10:40.761607 2175 layer_factory.hpp:77] Creating layer pool5
  536. I0803 12:10:40.761612 2175 net.cpp:86] Creating Layer pool5
  537. I0803 12:10:40.761615 2175 net.cpp:408] pool5 <- conv5
  538. I0803 12:10:40.761618 2175 net.cpp:382] pool5 -> pool5
  539. I0803 12:10:40.761677 2175 net.cpp:124] Setting up pool5
  540. I0803 12:10:40.761682 2175 net.cpp:131] Top shape: 10 256 28 28 (2007040)
  541. I0803 12:10:40.761698 2175 net.cpp:139] Memory required for data: 1387748960
  542. I0803 12:10:40.761700 2175 layer_factory.hpp:77] Creating layer fc6
  543. I0803 12:10:40.761706 2175 net.cpp:86] Creating Layer fc6
  544. I0803 12:10:40.761709 2175 net.cpp:408] fc6 <- pool5
  545. I0803 12:10:40.761713 2175 net.cpp:382] fc6 -> fc6
  546. I0803 12:10:47.059787 2175 net.cpp:124] Setting up fc6
  547. I0803 12:10:47.059804 2175 net.cpp:131] Top shape: 10 4096 (40960)
  548. I0803 12:10:47.059806 2175 net.cpp:139] Memory required for data: 1387912800
  549. I0803 12:10:47.059830 2175 layer_factory.hpp:77] Creating layer relu6
  550. I0803 12:10:47.059855 2175 net.cpp:86] Creating Layer relu6
  551. I0803 12:10:47.059859 2175 net.cpp:408] relu6 <- fc6
  552. I0803 12:10:47.059864 2175 net.cpp:369] relu6 -> fc6 (in-place)
  553. I0803 12:10:47.060097 2175 net.cpp:124] Setting up relu6
  554. I0803 12:10:47.060103 2175 net.cpp:131] Top shape: 10 4096 (40960)
  555. I0803 12:10:47.060106 2175 net.cpp:139] Memory required for data: 1388076640
  556. I0803 12:10:47.060108 2175 layer_factory.hpp:77] Creating layer drop6
  557. I0803 12:10:47.060132 2175 net.cpp:86] Creating Layer drop6
  558. I0803 12:10:47.060135 2175 net.cpp:408] drop6 <- fc6
  559. I0803 12:10:47.060139 2175 net.cpp:369] drop6 -> fc6 (in-place)
  560. I0803 12:10:47.060207 2175 net.cpp:124] Setting up drop6
  561. I0803 12:10:47.060211 2175 net.cpp:131] Top shape: 10 4096 (40960)
  562. I0803 12:10:47.060256 2175 net.cpp:139] Memory required for data: 1388240480
  563. I0803 12:10:47.060261 2175 layer_factory.hpp:77] Creating layer fc7
  564. I0803 12:10:47.060281 2175 net.cpp:86] Creating Layer fc7
  565. I0803 12:10:47.060283 2175 net.cpp:408] fc7 <- fc6
  566. I0803 12:10:47.060287 2175 net.cpp:382] fc7 -> fc7
  567. I0803 12:10:47.189757 2175 net.cpp:124] Setting up fc7
  568. I0803 12:10:47.189774 2175 net.cpp:131] Top shape: 10 4096 (40960)
  569. I0803 12:10:47.189776 2175 net.cpp:139] Memory required for data: 1388404320
  570. I0803 12:10:47.189797 2175 layer_factory.hpp:77] Creating layer relu7
  571. I0803 12:10:47.189805 2175 net.cpp:86] Creating Layer relu7
  572. I0803 12:10:47.189807 2175 net.cpp:408] relu7 <- fc7
  573. I0803 12:10:47.189826 2175 net.cpp:369] relu7 -> fc7 (in-place)
  574. I0803 12:10:47.190233 2175 net.cpp:124] Setting up relu7
  575. I0803 12:10:47.190240 2175 net.cpp:131] Top shape: 10 4096 (40960)
  576. I0803 12:10:47.190243 2175 net.cpp:139] Memory required for data: 1388568160
  577. I0803 12:10:47.190244 2175 layer_factory.hpp:77] Creating layer drop7
  578. I0803 12:10:47.190269 2175 net.cpp:86] Creating Layer drop7
  579. I0803 12:10:47.190274 2175 net.cpp:408] drop7 <- fc7
  580. I0803 12:10:47.190299 2175 net.cpp:369] drop7 -> fc7 (in-place)
  581. I0803 12:10:47.190349 2175 net.cpp:124] Setting up drop7
  582. I0803 12:10:47.190354 2175 net.cpp:131] Top shape: 10 4096 (40960)
  583. I0803 12:10:47.190356 2175 net.cpp:139] Memory required for data: 1388732000
  584. I0803 12:10:47.190358 2175 layer_factory.hpp:77] Creating layer fc8
  585. I0803 12:10:47.190362 2175 net.cpp:86] Creating Layer fc8
  586. I0803 12:10:47.190364 2175 net.cpp:408] fc8 <- fc7
  587. I0803 12:10:47.190369 2175 net.cpp:382] fc8 -> fc8
  588. I0803 12:10:47.222895 2175 net.cpp:124] Setting up fc8
  589. I0803 12:10:47.222913 2175 net.cpp:131] Top shape: 10 1000 (10000)
  590. I0803 12:10:47.222915 2175 net.cpp:139] Memory required for data: 1388772000
  591. I0803 12:10:47.222936 2175 layer_factory.hpp:77] Creating layer loss
  592. I0803 12:10:47.222942 2175 net.cpp:86] Creating Layer loss
  593. I0803 12:10:47.222946 2175 net.cpp:408] loss <- fc8
  594. I0803 12:10:47.222950 2175 net.cpp:408] loss <- label
  595. I0803 12:10:47.222970 2175 net.cpp:382] loss -> loss
  596. I0803 12:10:47.222978 2175 layer_factory.hpp:77] Creating layer loss
  597. I0803 12:10:47.223251 2175 net.cpp:124] Setting up loss
  598. I0803 12:10:47.223258 2175 net.cpp:131] Top shape: (1)
  599. I0803 12:10:47.223259 2175 net.cpp:134] with loss weight 1
  600. I0803 12:10:47.223287 2175 net.cpp:139] Memory required for data: 1388772004
  601. I0803 12:10:47.223291 2175 net.cpp:200] loss needs backward computation.
  602. I0803 12:10:47.223296 2175 net.cpp:200] fc8 needs backward computation.
  603. I0803 12:10:47.223299 2175 net.cpp:200] drop7 needs backward computation.
  604. I0803 12:10:47.223316 2175 net.cpp:200] relu7 needs backward computation.
  605. I0803 12:10:47.223320 2175 net.cpp:200] fc7 needs backward computation.
  606. I0803 12:10:47.223321 2175 net.cpp:200] drop6 needs backward computation.
  607. I0803 12:10:47.223341 2175 net.cpp:200] relu6 needs backward computation.
  608. I0803 12:10:47.223343 2175 net.cpp:200] fc6 needs backward computation.
  609. I0803 12:10:47.223345 2175 net.cpp:200] pool5 needs backward computation.
  610. I0803 12:10:47.223348 2175 net.cpp:200] relu5 needs backward computation.
  611. I0803 12:10:47.223363 2175 net.cpp:200] conv5 needs backward computation.
  612. I0803 12:10:47.223366 2175 net.cpp:200] relu4 needs backward computation.
  613. I0803 12:10:47.223368 2175 net.cpp:200] conv4 needs backward computation.
  614. I0803 12:10:47.223386 2175 net.cpp:200] relu3 needs backward computation.
  615. I0803 12:10:47.223389 2175 net.cpp:200] conv3 needs backward computation.
  616. I0803 12:10:47.223392 2175 net.cpp:200] pool2 needs backward computation.
  617. I0803 12:10:47.223395 2175 net.cpp:200] norm2 needs backward computation.
  618. I0803 12:10:47.223398 2175 net.cpp:200] relu2 needs backward computation.
  619. I0803 12:10:47.223402 2175 net.cpp:200] conv2 needs backward computation.
  620. I0803 12:10:47.223404 2175 net.cpp:200] pool1 needs backward computation.
  621. I0803 12:10:47.223407 2175 net.cpp:200] norm1 needs backward computation.
  622. I0803 12:10:47.223409 2175 net.cpp:200] relu1 needs backward computation.
  623. I0803 12:10:47.223428 2175 net.cpp:200] scale1 needs backward computation.
  624. I0803 12:10:47.223430 2175 net.cpp:200] bn1 needs backward computation.
  625. I0803 12:10:47.223433 2175 net.cpp:200] conv1 needs backward computation.
  626. I0803 12:10:47.223436 2175 net.cpp:202] data does not need backward computation.
  627. I0803 12:10:47.223439 2175 net.cpp:244] This network produces output loss
  628. I0803 12:10:47.223453 2175 net.cpp:257] Network initialization done.
  629. I0803 12:10:47.223675 2175 upgrade_proto.cpp:77] Attempting to upgrade batch norm layers using deprecated params: examples/alexnetfinetune/alexnetsade-bn.prototxt
  630. I0803 12:10:47.223681 2175 upgrade_proto.cpp:80] Successfully upgraded batch norm layers using deprecated params.
  631. I0803 12:10:47.223701 2175 solver.cpp:173] Creating test net (#0) specified by net file: examples/alexnetfinetune/alexnetsade-bn.prototxt
  632. I0803 12:10:47.223724 2175 net.cpp:296] The NetState phase (1) differed from the phase (0) specified by a rule in layer data
  633. I0803 12:10:47.223853 2175 net.cpp:53] Initializing net from parameters:
  634. name: "AlexNet"
  635. state {
  636. phase: TEST
  637. }
  638. layer {
  639. name: "data"
  640. type: "Data"
  641. top: "data"
  642. top: "label"
  643. include {
  644. phase: TEST
  645. }
  646. transform_param {
  647. mirror: false
  648. crop_size: 227
  649. mean_file: "examples/Mydataset_test_lmdb/mean_imagetest.binaryproto"
  650. }
  651. data_param {
  652. source: "examples/Mydataset_test_lmdb"
  653. batch_size: 10
  654. backend: LMDB
  655. }
  656. }
  657. layer {
  658. name: "conv1"
  659. type: "Convolution"
  660. bottom: "data"
  661. top: "conv1"
  662. param {
  663. lr_mult: 1
  664. }
  665. convolution_param {
  666. num_output: 64
  667. bias_term: true
  668. pad: 1
  669. kernel_size: 3
  670. stride: 1
  671. weight_filler {
  672. type: "xavier"
  673. }
  674. }
  675. }
  676. layer {
  677. name: "bn1"
  678. type: "BatchNorm"
  679. bottom: "conv1"
  680. top: "conv1"
  681. param {
  682. lr_mult: 0
  683. decay_mult: 0
  684. }
  685. param {
  686. lr_mult: 0
  687. decay_mult: 0
  688. }
  689. param {
  690. lr_mult: 0
  691. decay_mult: 0
  692. }
  693. }
  694. layer {
  695. name: "scale1"
  696. type: "Scale"
  697. bottom: "conv1"
  698. top: "conv1"
  699. scale_param {
  700. bias_term: true
  701. }
  702. }
  703. layer {
  704. name: "relu1"
  705. type: "ReLU"
  706. bottom: "conv1"
  707. top: "conv1"
  708. }
  709. layer {
  710. name: "norm1"
  711. type: "LRN"
  712. bottom: "conv1"
  713. top: "norm1"
  714. lrn_param {
  715. local_size: 5
  716. alpha: 0.0001
  717. beta: 0.75
  718. }
  719. }
  720. layer {
  721. name: "pool1"
  722. type: "Pooling"
  723. bottom: "norm1"
  724. top: "pool1"
  725. pooling_param {
  726. pool: MAX
  727. kernel_size: 3
  728. stride: 2
  729. }
  730. }
  731. layer {
  732. name: "conv2"
  733. type: "Convolution"
  734. bottom: "pool1"
  735. top: "conv2"
  736. param {
  737. lr_mult: 1
  738. decay_mult: 1
  739. }
  740. param {
  741. lr_mult: 2
  742. decay_mult: 0
  743. }
  744. convolution_param {
  745. num_output: 256
  746. pad: 2
  747. kernel_size: 5
  748. group: 2
  749. weight_filler {
  750. type: "gaussian"
  751. std: 0.01
  752. }
  753. bias_filler {
  754. type: "constant"
  755. value: 0.1
  756. }
  757. }
  758. }
  759. layer {
  760. name: "relu2"
  761. type: "ReLU"
  762. bottom: "conv2"
  763. top: "conv2"
  764. }
  765. layer {
  766. name: "norm2"
  767. type: "LRN"
  768. bottom: "conv2"
  769. top: "norm2"
  770. lrn_param {
  771. local_size: 5
  772. alpha: 0.0001
  773. beta: 0.75
  774. }
  775. }
  776. layer {
  777. name: "pool2"
  778. type: "Pooling"
  779. bottom: "norm2"
  780. top: "pool2"
  781. pooling_param {
  782. pool: MAX
  783. kernel_size: 3
  784. stride: 2
  785. }
  786. }
  787. layer {
  788. name: "conv3"
  789. type: "Convolution"
  790. bottom: "pool2"
  791. top: "conv3"
  792. param {
  793. lr_mult: 1
  794. decay_mult: 1
  795. }
  796. param {
  797. lr_mult: 2
  798. decay_mult: 0
  799. }
  800. convolution_param {
  801. num_output: 384
  802. pad: 1
  803. kernel_size: 3
  804. weight_filler {
  805. type: "gaussian"
  806. std: 0.01
  807. }
  808. bias_filler {
  809. type: "constant"
  810. value: 0
  811. }
  812. }
  813. }
  814. layer {
  815. name: "relu3"
  816. type: "ReLU"
  817. bottom: "conv3"
  818. top: "conv3"
  819. }
  820. layer {
  821. name: "conv4"
  822. type: "Convolution"
  823. bottom: "conv3"
  824. top: "conv4"
  825. param {
  826. lr_mult: 1
  827. decay_mult: 1
  828. }
  829. param {
  830. lr_mult: 2
  831. decay_mult: 0
  832. }
  833. convolution_param {
  834. num_output: 384
  835. pad: 1
  836. kernel_size: 3
  837. group: 2
  838. weight_filler {
  839. type: "gaussian"
  840. std: 0.01
  841. }
  842. bias_filler {
  843. type: "constant"
  844. value: 0.1
  845. }
  846. }
  847. }
  848. layer {
  849. name: "relu4"
  850. type: "ReLU"
  851. bottom: "conv4"
  852. top: "conv4"
  853. }
  854. layer {
  855. name: "conv5"
  856. type: "Convolution"
  857. bottom: "conv4"
  858. top: "conv5"
  859. param {
  860. lr_mult: 1
  861. decay_mult: 1
  862. }
  863. param {
  864. lr_mult: 2
  865. decay_mult: 0
  866. }
  867. convolution_param {
  868. num_output: 256
  869. pad: 1
  870. kernel_size: 3
  871. group: 2
  872. weight_filler {
  873. type: "gaussian"
  874. std: 0.01
  875. }
  876. bias_filler {
  877. type: "constant"
  878. value: 0.1
  879. }
  880. }
  881. }
  882. layer {
  883. name: "relu5"
  884. type: "ReLU"
  885. bottom: "conv5"
  886. top: "conv5"
  887. }
  888. layer {
  889. name: "pool5"
  890. type: "Pooling"
  891. bottom: "conv5"
  892. top: "pool5"
  893. pooling_param {
  894. pool: MAX
  895. kernel_size: 3
  896. stride: 2
  897. }
  898. }
  899. layer {
  900. name: "fc6"
  901. type: "InnerProduct"
  902. bottom: "pool5"
  903. top: "fc6"
  904. param {
  905. lr_mult: 1
  906. decay_mult: 1
  907. }
  908. param {
  909. lr_mult: 2
  910. decay_mult: 0
  911. }
  912. inner_product_param {
  913. num_output: 4096
  914. weight_filler {
  915. type: "gaussian"
  916. std: 0.005
  917. }
  918. bias_filler {
  919. type: "constant"
  920. value: 0.1
  921. }
  922. }
  923. }
  924. layer {
  925. name: "relu6"
  926. type: "ReLU"
  927. bottom: "fc6"
  928. top: "fc6"
  929. }
  930. layer {
  931. name: "drop6"
  932. type: "Dropout"
  933. bottom: "fc6"
  934. top: "fc6"
  935. dropout_param {
  936. dropout_ratio: 0.5
  937. }
  938. }
  939. layer {
  940. name: "fc7"
  941. type: "InnerProduct"
  942. bottom: "fc6"
  943. top: "fc7"
  944. param {
  945. lr_mult: 1
  946. decay_mult: 1
  947. }
  948. param {
  949. lr_mult: 2
  950. decay_mult: 0
  951. }
  952. inner_product_param {
  953. num_output: 4096
  954. weight_filler {
  955. type: "gaussian"
  956. std: 0.005
  957. }
  958. bias_filler {
  959. type: "constant"
  960. value: 0.1
  961. }
  962. }
  963. }
  964. layer {
  965. name: "relu7"
  966. type: "ReLU"
  967. bottom: "fc7"
  968. top: "fc7"
  969. }
  970. layer {
  971. name: "drop7"
  972. type: "Dropout"
  973. bottom: "fc7"
  974. top: "fc7"
  975. dropout_param {
  976. dropout_ratio: 0.5
  977. }
  978. }
  979. layer {
  980. name: "fc8"
  981. type: "InnerProduct"
  982. bottom: "fc7"
  983. top: "fc8"
  984. param {
  985. lr_mult: 1
  986. decay_mult: 1
  987. }
  988. param {
  989. lr_mult: 2
  990. decay_mult: 0
  991. }
  992. inner_product_param {
  993. num_output: 1000
  994. weight_filler {
  995. type: "gaussian"
  996. std: 0.01
  997. }
  998. bias_filler {
  999. type: "constant"
  1000. value: 0
  1001. }
  1002. }
  1003. }
  1004. layer {
  1005. name: "accuracy"
  1006. type: "Accuracy"
  1007. bottom: "fc8"
  1008. bottom: "label"
  1009. top: "accuracy"
  1010. include {
  1011. phase: TEST
  1012. }
  1013. }
  1014. layer {
  1015. name: "loss"
  1016. type: "SoftmaxWithLoss"
  1017. bottom: "fc8"
  1018. bottom: "label"
  1019. top: "loss"
  1020. }
  1021. I0803 12:10:47.224076 2175 layer_factory.hpp:77] Creating layer data
  1022. I0803 12:10:47.269927 2175 db_lmdb.cpp:35] Opened lmdb examples/Mydataset_test_lmdb
  1023. I0803 12:10:47.280226 2175 net.cpp:86] Creating Layer data
  1024. I0803 12:10:47.280279 2175 net.cpp:382] data -> data
  1025. I0803 12:10:47.280303 2175 net.cpp:382] data -> label
  1026. I0803 12:10:47.280324 2175 data_transformer.cpp:25] Loading mean file from: examples/Mydataset_test_lmdb/mean_imagetest.binaryproto
  1027. I0803 12:10:47.313472 2175 data_layer.cpp:45] output data size: 10,3,227,227
  1028. I0803 12:10:47.334664 2175 net.cpp:124] Setting up data
  1029. I0803 12:10:47.334702 2175 net.cpp:131] Top shape: 10 3 227 227 (1545870)
  1030. I0803 12:10:47.334712 2175 net.cpp:131] Top shape: 10 (10)
  1031. I0803 12:10:47.334718 2175 net.cpp:139] Memory required for data: 6183520
  1032. I0803 12:10:47.334731 2175 layer_factory.hpp:77] Creating layer label_data_1_split
  1033. I0803 12:10:47.334753 2175 net.cpp:86] Creating Layer label_data_1_split
  1034. I0803 12:10:47.334761 2175 net.cpp:408] label_data_1_split <- label
  1035. I0803 12:10:47.334791 2175 net.cpp:382] label_data_1_split -> label_data_1_split_0
  1036. I0803 12:10:47.334811 2175 net.cpp:382] label_data_1_split -> label_data_1_split_1
  1037. I0803 12:10:47.335036 2175 net.cpp:124] Setting up label_data_1_split
  1038. I0803 12:10:47.335067 2175 net.cpp:131] Top shape: 10 (10)
  1039. I0803 12:10:47.335075 2175 net.cpp:131] Top shape: 10 (10)
  1040. I0803 12:10:47.335080 2175 net.cpp:139] Memory required for data: 6183600
  1041. I0803 12:10:47.335088 2175 layer_factory.hpp:77] Creating layer conv1
  1042. I0803 12:10:47.335110 2175 net.cpp:86] Creating Layer conv1
  1043. I0803 12:10:47.335119 2175 net.cpp:408] conv1 <- data
  1044. I0803 12:10:47.335131 2175 net.cpp:382] conv1 -> conv1
  1045. I0803 12:10:47.338120 2175 net.cpp:124] Setting up conv1
  1046. I0803 12:10:47.338152 2175 net.cpp:131] Top shape: 10 64 227 227 (32978560)
  1047. I0803 12:10:47.338197 2175 net.cpp:139] Memory required for data: 138097840
  1048. I0803 12:10:47.338222 2175 layer_factory.hpp:77] Creating layer bn1
  1049. I0803 12:10:47.338238 2175 net.cpp:86] Creating Layer bn1
  1050. I0803 12:10:47.338245 2175 net.cpp:408] bn1 <- conv1
  1051. I0803 12:10:47.338284 2175 net.cpp:369] bn1 -> conv1 (in-place)
  1052. I0803 12:10:47.338843 2175 net.cpp:124] Setting up bn1
  1053. I0803 12:10:47.338861 2175 net.cpp:131] Top shape: 10 64 227 227 (32978560)
  1054. I0803 12:10:47.338870 2175 net.cpp:139] Memory required for data: 270012080
  1055. I0803 12:10:47.338889 2175 layer_factory.hpp:77] Creating layer scale1
  1056. I0803 12:10:47.338907 2175 net.cpp:86] Creating Layer scale1
  1057. I0803 12:10:47.338914 2175 net.cpp:408] scale1 <- conv1
  1058. I0803 12:10:47.338927 2175 net.cpp:369] scale1 -> conv1 (in-place)
  1059. I0803 12:10:47.339010 2175 layer_factory.hpp:77] Creating layer scale1
  1060. I0803 12:10:47.339423 2175 net.cpp:124] Setting up scale1
  1061. I0803 12:10:47.339439 2175 net.cpp:131] Top shape: 10 64 227 227 (32978560)
  1062. I0803 12:10:47.339448 2175 net.cpp:139] Memory required for data: 401926320
  1063. I0803 12:10:47.339462 2175 layer_factory.hpp:77] Creating layer relu1
  1064. I0803 12:10:47.339473 2175 net.cpp:86] Creating Layer relu1
  1065. I0803 12:10:47.339485 2175 net.cpp:408] relu1 <- conv1
  1066. I0803 12:10:47.339496 2175 net.cpp:369] relu1 -> conv1 (in-place)
  1067. I0803 12:10:47.339871 2175 net.cpp:124] Setting up relu1
  1068. I0803 12:10:47.339887 2175 net.cpp:131] Top shape: 10 64 227 227 (32978560)
  1069. I0803 12:10:47.339893 2175 net.cpp:139] Memory required for data: 533840560
  1070. I0803 12:10:47.339901 2175 layer_factory.hpp:77] Creating layer norm1
  1071. I0803 12:10:47.339912 2175 net.cpp:86] Creating Layer norm1
  1072. I0803 12:10:47.339921 2175 net.cpp:408] norm1 <- conv1
  1073. I0803 12:10:47.339933 2175 net.cpp:382] norm1 -> norm1
  1074. I0803 12:10:47.340322 2175 net.cpp:124] Setting up norm1
  1075. I0803 12:10:47.340337 2175 net.cpp:131] Top shape: 10 64 227 227 (32978560)
  1076. I0803 12:10:47.340343 2175 net.cpp:139] Memory required for data: 665754800
  1077. I0803 12:10:47.340349 2175 layer_factory.hpp:77] Creating layer pool1
  1078. I0803 12:10:47.340360 2175 net.cpp:86] Creating Layer pool1
  1079. I0803 12:10:47.340366 2175 net.cpp:408] pool1 <- norm1
  1080. I0803 12:10:47.340379 2175 net.cpp:382] pool1 -> pool1
  1081. I0803 12:10:47.340472 2175 net.cpp:124] Setting up pool1
  1082. I0803 12:10:47.340487 2175 net.cpp:131] Top shape: 10 64 113 113 (8172160)
  1083. I0803 12:10:47.340492 2175 net.cpp:139] Memory required for data: 698443440
  1084. I0803 12:10:47.340498 2175 layer_factory.hpp:77] Creating layer conv2
  1085. I0803 12:10:47.340517 2175 net.cpp:86] Creating Layer conv2
  1086. I0803 12:10:47.340523 2175 net.cpp:408] conv2 <- pool1
  1087. I0803 12:10:47.340538 2175 net.cpp:382] conv2 -> conv2
  1088. I0803 12:10:47.347864 2175 net.cpp:124] Setting up conv2
  1089. I0803 12:10:47.347887 2175 net.cpp:131] Top shape: 10 256 113 113 (32688640)
  1090. I0803 12:10:47.347894 2175 net.cpp:139] Memory required for data: 829198000
  1091. I0803 12:10:47.347909 2175 layer_factory.hpp:77] Creating layer relu2
  1092. I0803 12:10:47.347920 2175 net.cpp:86] Creating Layer relu2
  1093. I0803 12:10:47.347924 2175 net.cpp:408] relu2 <- conv2
  1094. I0803 12:10:47.347932 2175 net.cpp:369] relu2 -> conv2 (in-place)
  1095. I0803 12:10:47.348479 2175 net.cpp:124] Setting up relu2
  1096. I0803 12:10:47.348491 2175 net.cpp:131] Top shape: 10 256 113 113 (32688640)
  1097. I0803 12:10:47.348496 2175 net.cpp:139] Memory required for data: 959952560
  1098. I0803 12:10:47.348502 2175 layer_factory.hpp:77] Creating layer norm2
  1099. I0803 12:10:47.348511 2175 net.cpp:86] Creating Layer norm2
  1100. I0803 12:10:47.348516 2175 net.cpp:408] norm2 <- conv2
  1101. I0803 12:10:47.348523 2175 net.cpp:382] norm2 -> norm2
  1102. I0803 12:10:47.348768 2175 net.cpp:124] Setting up norm2
  1103. I0803 12:10:47.348778 2175 net.cpp:131] Top shape: 10 256 113 113 (32688640)
  1104. I0803 12:10:47.348783 2175 net.cpp:139] Memory required for data: 1090707120
  1105. I0803 12:10:47.348786 2175 layer_factory.hpp:77] Creating layer pool2
  1106. I0803 12:10:47.348793 2175 net.cpp:86] Creating Layer pool2
  1107. I0803 12:10:47.348798 2175 net.cpp:408] pool2 <- norm2
  1108. I0803 12:10:47.348803 2175 net.cpp:382] pool2 -> pool2
  1109. I0803 12:10:47.348870 2175 net.cpp:124] Setting up pool2
  1110. I0803 12:10:47.348878 2175 net.cpp:131] Top shape: 10 256 56 56 (8028160)
  1111. I0803 12:10:47.348881 2175 net.cpp:139] Memory required for data: 1122819760
  1112. I0803 12:10:47.348886 2175 layer_factory.hpp:77] Creating layer conv3
  1113. I0803 12:10:47.348894 2175 net.cpp:86] Creating Layer conv3
  1114. I0803 12:10:47.348899 2175 net.cpp:408] conv3 <- pool2
  1115. I0803 12:10:47.348906 2175 net.cpp:382] conv3 -> conv3
  1116. I0803 12:10:47.360764 2175 net.cpp:124] Setting up conv3
  1117. I0803 12:10:47.360780 2175 net.cpp:131] Top shape: 10 384 56 56 (12042240)
  1118. I0803 12:10:47.360783 2175 net.cpp:139] Memory required for data: 1170988720
  1119. I0803 12:10:47.360791 2175 layer_factory.hpp:77] Creating layer relu3
  1120. I0803 12:10:47.360798 2175 net.cpp:86] Creating Layer relu3
  1121. I0803 12:10:47.360801 2175 net.cpp:408] relu3 <- conv3
  1122. I0803 12:10:47.360808 2175 net.cpp:369] relu3 -> conv3 (in-place)
  1123. I0803 12:10:47.360967 2175 net.cpp:124] Setting up relu3
  1124. I0803 12:10:47.360975 2175 net.cpp:131] Top shape: 10 384 56 56 (12042240)
  1125. I0803 12:10:47.360977 2175 net.cpp:139] Memory required for data: 1219157680
  1126. I0803 12:10:47.360980 2175 layer_factory.hpp:77] Creating layer conv4
  1127. I0803 12:10:47.360988 2175 net.cpp:86] Creating Layer conv4
  1128. I0803 12:10:47.360991 2175 net.cpp:408] conv4 <- conv3
  1129. I0803 12:10:47.360996 2175 net.cpp:382] conv4 -> conv4
  1130. I0803 12:10:47.368546 2175 net.cpp:124] Setting up conv4
  1131. I0803 12:10:47.368559 2175 net.cpp:131] Top shape: 10 384 56 56 (12042240)
  1132. I0803 12:10:47.368561 2175 net.cpp:139] Memory required for data: 1267326640
  1133. I0803 12:10:47.368582 2175 layer_factory.hpp:77] Creating layer relu4
  1134. I0803 12:10:47.368605 2175 net.cpp:86] Creating Layer relu4
  1135. I0803 12:10:47.368609 2175 net.cpp:408] relu4 <- conv4
  1136. I0803 12:10:47.368614 2175 net.cpp:369] relu4 -> conv4 (in-place)
  1137. I0803 12:10:47.368779 2175 net.cpp:124] Setting up relu4
  1138. I0803 12:10:47.368785 2175 net.cpp:131] Top shape: 10 384 56 56 (12042240)
  1139. I0803 12:10:47.368801 2175 net.cpp:139] Memory required for data: 1315495600
  1140. I0803 12:10:47.368803 2175 layer_factory.hpp:77] Creating layer conv5
  1141. I0803 12:10:47.368827 2175 net.cpp:86] Creating Layer conv5
  1142. I0803 12:10:47.368831 2175 net.cpp:408] conv5 <- conv4
  1143. I0803 12:10:47.368835 2175 net.cpp:382] conv5 -> conv5
  1144. I0803 12:10:47.375073 2175 net.cpp:124] Setting up conv5
  1145. I0803 12:10:47.375088 2175 net.cpp:131] Top shape: 10 256 56 56 (8028160)
  1146. I0803 12:10:47.375092 2175 net.cpp:139] Memory required for data: 1347608240
  1147. I0803 12:10:47.375113 2175 layer_factory.hpp:77] Creating layer relu5
  1148. I0803 12:10:47.375118 2175 net.cpp:86] Creating Layer relu5
  1149. I0803 12:10:47.375135 2175 net.cpp:408] relu5 <- conv5
  1150. I0803 12:10:47.375141 2175 net.cpp:369] relu5 -> conv5 (in-place)
  1151. I0803 12:10:47.375282 2175 net.cpp:124] Setting up relu5
  1152. I0803 12:10:47.375288 2175 net.cpp:131] Top shape: 10 256 56 56 (8028160)
  1153. I0803 12:10:47.375289 2175 net.cpp:139] Memory required for data: 1379720880
  1154. I0803 12:10:47.375291 2175 layer_factory.hpp:77] Creating layer pool5
  1155. I0803 12:10:47.375310 2175 net.cpp:86] Creating Layer pool5
  1156. I0803 12:10:47.375313 2175 net.cpp:408] pool5 <- conv5
  1157. I0803 12:10:47.375331 2175 net.cpp:382] pool5 -> pool5
  1158. I0803 12:10:47.375396 2175 net.cpp:124] Setting up pool5
  1159. I0803 12:10:47.375399 2175 net.cpp:131] Top shape: 10 256 28 28 (2007040)
  1160. I0803 12:10:47.375401 2175 net.cpp:139] Memory required for data: 1387749040
  1161. I0803 12:10:47.375403 2175 layer_factory.hpp:77] Creating layer fc6
  1162. I0803 12:10:47.375408 2175 net.cpp:86] Creating Layer fc6
  1163. I0803 12:10:47.375411 2175 net.cpp:408] fc6 <- pool5
  1164. I0803 12:10:47.375414 2175 net.cpp:382] fc6 -> fc6
  1165. I0803 12:10:53.939147 2175 net.cpp:124] Setting up fc6
  1166. I0803 12:10:53.970624 2175 net.cpp:131] Top shape: 10 4096 (40960)
  1167. I0803 12:10:53.970654 2175 net.cpp:139] Memory required for data: 1387912880
  1168. I0803 12:10:54.020473 2175 layer_factory.hpp:77] Creating layer relu6
  1169. I0803 12:10:54.029845 2175 net.cpp:86] Creating Layer relu6
  1170. I0803 12:10:54.029889 2175 net.cpp:408] relu6 <- fc6
  1171. I0803 12:10:54.029919 2175 net.cpp:369] relu6 -> fc6 (in-place)
  1172. I0803 12:10:54.952005 2175 net.cpp:124] Setting up relu6
  1173. I0803 12:10:54.952059 2175 net.cpp:131] Top shape: 10 4096 (40960)
  1174. I0803 12:10:54.952069 2175 net.cpp:139] Memory required for data: 1388076720
  1175. I0803 12:10:54.952081 2175 layer_factory.hpp:77] Creating layer drop6
  1176. I0803 12:10:54.952183 2175 net.cpp:86] Creating Layer drop6
  1177. I0803 12:10:54.952199 2175 net.cpp:408] drop6 <- fc6
  1178. I0803 12:10:54.952221 2175 net.cpp:369] drop6 -> fc6 (in-place)
  1179. I0803 12:10:54.952363 2175 net.cpp:124] Setting up drop6
  1180. I0803 12:10:54.952380 2175 net.cpp:131] Top shape: 10 4096 (40960)
  1181. I0803 12:10:54.952388 2175 net.cpp:139] Memory required for data: 1388240560
  1182. I0803 12:10:54.952399 2175 layer_factory.hpp:77] Creating layer fc7
  1183. I0803 12:10:54.952422 2175 net.cpp:86] Creating Layer fc7
  1184. I0803 12:10:54.952433 2175 net.cpp:408] fc7 <- fc6
  1185. I0803 12:10:54.952450 2175 net.cpp:382] fc7 -> fc7
  1186. I0803 12:10:55.168879 2175 net.cpp:124] Setting up fc7
  1187. I0803 12:10:55.168905 2175 net.cpp:131] Top shape: 10 4096 (40960)
  1188. I0803 12:10:55.168921 2175 net.cpp:139] Memory required for data: 1388404400
  1189. I0803 12:10:55.168929 2175 layer_factory.hpp:77] Creating layer relu7
  1190. I0803 12:10:55.168952 2175 net.cpp:86] Creating Layer relu7
  1191. I0803 12:10:55.168956 2175 net.cpp:408] relu7 <- fc7
  1192. I0803 12:10:55.168962 2175 net.cpp:369] relu7 -> fc7 (in-place)
  1193. I0803 12:10:55.213413 2175 net.cpp:124] Setting up relu7
  1194. I0803 12:10:55.213459 2175 net.cpp:131] Top shape: 10 4096 (40960)
  1195. I0803 12:10:55.213466 2175 net.cpp:139] Memory required for data: 1388568240
  1196. I0803 12:10:55.213479 2175 layer_factory.hpp:77] Creating layer drop7
  1197. I0803 12:10:55.213507 2175 net.cpp:86] Creating Layer drop7
  1198. I0803 12:10:55.213522 2175 net.cpp:408] drop7 <- fc7
  1199. I0803 12:10:55.213541 2175 net.cpp:369] drop7 -> fc7 (in-place)
  1200. I0803 12:10:55.213641 2175 net.cpp:124] Setting up drop7
  1201. I0803 12:10:55.213655 2175 net.cpp:131] Top shape: 10 4096 (40960)
  1202. I0803 12:10:55.213662 2175 net.cpp:139] Memory required for data: 1388732080
  1203. I0803 12:10:55.213670 2175 layer_factory.hpp:77] Creating layer fc8
  1204. I0803 12:10:55.213688 2175 net.cpp:86] Creating Layer fc8
  1205. I0803 12:10:55.213696 2175 net.cpp:408] fc8 <- fc7
  1206. I0803 12:10:55.213712 2175 net.cpp:382] fc8 -> fc8
  1207. I0803 12:10:55.271644 2175 net.cpp:124] Setting up fc8
  1208. I0803 12:10:55.271661 2175 net.cpp:131] Top shape: 10 1000 (10000)
  1209. I0803 12:10:55.271664 2175 net.cpp:139] Memory required for data: 1388772080
  1210. I0803 12:10:55.271672 2175 layer_factory.hpp:77] Creating layer fc8_fc8_0_split
  1211. I0803 12:10:55.322099 2175 net.cpp:86] Creating Layer fc8_fc8_0_split
  1212. I0803 12:10:55.322120 2175 net.cpp:408] fc8_fc8_0_split <- fc8
  1213. I0803 12:10:55.322129 2175 net.cpp:382] fc8_fc8_0_split -> fc8_fc8_0_split_0
  1214. I0803 12:10:55.322139 2175 net.cpp:382] fc8_fc8_0_split -> fc8_fc8_0_split_1
  1215. I0803 12:10:55.322221 2175 net.cpp:124] Setting up fc8_fc8_0_split
  1216. I0803 12:10:55.322228 2175 net.cpp:131] Top shape: 10 1000 (10000)
  1217. I0803 12:10:55.322232 2175 net.cpp:131] Top shape: 10 1000 (10000)
  1218. I0803 12:10:55.322234 2175 net.cpp:139] Memory required for data: 1388852080
  1219. I0803 12:10:55.322237 2175 layer_factory.hpp:77] Creating layer accuracy
  1220. I0803 12:10:55.327911 2175 net.cpp:86] Creating Layer accuracy
  1221. I0803 12:10:55.327934 2175 net.cpp:408] accuracy <- fc8_fc8_0_split_0
  1222. I0803 12:10:55.327941 2175 net.cpp:408] accuracy <- label_data_1_split_0
  1223. I0803 12:10:55.327951 2175 net.cpp:382] accuracy -> accuracy
  1224. I0803 12:10:55.327967 2175 net.cpp:124] Setting up accuracy
  1225. I0803 12:10:55.327975 2175 net.cpp:131] Top shape: (1)
  1226. I0803 12:10:55.327978 2175 net.cpp:139] Memory required for data: 1388852084
  1227. I0803 12:10:55.327983 2175 layer_factory.hpp:77] Creating layer loss
  1228. I0803 12:10:55.336427 2175 net.cpp:86] Creating Layer loss
  1229. I0803 12:10:55.336453 2175 net.cpp:408] loss <- fc8_fc8_0_split_1
  1230. I0803 12:10:55.336462 2175 net.cpp:408] loss <- label_data_1_split_1
  1231. I0803 12:10:55.336469 2175 net.cpp:382] loss -> loss
  1232. I0803 12:10:55.341104 2175 layer_factory.hpp:77] Creating layer loss
  1233. I0803 12:10:55.341733 2175 net.cpp:124] Setting up loss
  1234. I0803 12:10:55.341776 2175 net.cpp:131] Top shape: (1)
  1235. I0803 12:10:55.341780 2175 net.cpp:134] with loss weight 1
  1236. I0803 12:10:55.353785 2175 net.cpp:139] Memory required for data: 1388852088
  1237. I0803 12:10:55.353804 2175 net.cpp:200] loss needs backward computation.
  1238. I0803 12:10:55.353821 2175 net.cpp:202] accuracy does not need backward computation.
  1239. I0803 12:10:55.353832 2175 net.cpp:200] fc8_fc8_0_split needs backward computation.
  1240. I0803 12:10:55.353842 2175 net.cpp:200] fc8 needs backward computation.
  1241. I0803 12:10:55.353850 2175 net.cpp:200] drop7 needs backward computation.
  1242. I0803 12:10:55.353859 2175 net.cpp:200] relu7 needs backward computation.
  1243. I0803 12:10:55.353868 2175 net.cpp:200] fc7 needs backward computation.
  1244. I0803 12:10:55.353873 2175 net.cpp:200] drop6 needs backward computation.
  1245. I0803 12:10:55.353881 2175 net.cpp:200] relu6 needs backward computation.
  1246. I0803 12:10:55.353888 2175 net.cpp:200] fc6 needs backward computation.
  1247. I0803 12:10:55.353895 2175 net.cpp:200] pool5 needs backward computation.
  1248. I0803 12:10:55.353904 2175 net.cpp:200] relu5 needs backward computation.
  1249. I0803 12:10:55.353911 2175 net.cpp:200] conv5 needs backward computation.
  1250. I0803 12:10:55.353920 2175 net.cpp:200] relu4 needs backward computation.
  1251. I0803 12:10:55.353927 2175 net.cpp:200] conv4 needs backward computation.
  1252. I0803 12:10:55.353935 2175 net.cpp:200] relu3 needs backward computation.
  1253. I0803 12:10:55.353943 2175 net.cpp:200] conv3 needs backward computation.
  1254. I0803 12:10:55.353955 2175 net.cpp:200] pool2 needs backward computation.
  1255. I0803 12:10:55.353962 2175 net.cpp:200] norm2 needs backward computation.
  1256. I0803 12:10:55.353971 2175 net.cpp:200] relu2 needs backward computation.
  1257. I0803 12:10:55.353978 2175 net.cpp:200] conv2 needs backward computation.
  1258. I0803 12:10:55.353988 2175 net.cpp:200] pool1 needs backward computation.
  1259. I0803 12:10:55.354003 2175 net.cpp:200] norm1 needs backward computation.
  1260. I0803 12:10:55.354012 2175 net.cpp:200] relu1 needs backward computation.
  1261. I0803 12:10:55.354019 2175 net.cpp:200] scale1 needs backward computation.
  1262. I0803 12:10:55.354027 2175 net.cpp:200] bn1 needs backward computation.
  1263. I0803 12:10:55.354035 2175 net.cpp:200] conv1 needs backward computation.
  1264. I0803 12:10:55.354045 2175 net.cpp:202] label_data_1_split does not need backward computation.
  1265. I0803 12:10:55.354055 2175 net.cpp:202] data does not need backward computation.
  1266. I0803 12:10:55.354063 2175 net.cpp:244] This network produces output accuracy
  1267. I0803 12:10:55.354075 2175 net.cpp:244] This network produces output loss
  1268. I0803 12:10:55.354123 2175 net.cpp:257] Network initialization done.
  1269. I0803 12:10:55.385907 2175 solver.cpp:56] Solver scaffolding done.
  1270. I0803 12:10:55.402374 2175 caffe.cpp:248] Starting Optimization
  1271. I0803 12:10:55.402422 2175 solver.cpp:273] Solving AlexNet
  1272. I0803 12:10:55.402431 2175 solver.cpp:274] Learning Rate Policy: step
  1273. I0803 12:10:55.789188 2175 solver.cpp:331] Iteration 0, Testing net (#0)
  1274. I0803 12:10:57.610282 2175 blocking_queue.cpp:49] Waiting for data
  1275. I0803 12:11:36.445051 2228 data_layer.cpp:73] Restarting data prefetching from start.
  1276. I0803 12:11:36.613025 2175 solver.cpp:398] Test net output #0: accuracy = 0
  1277. I0803 12:11:36.613050 2175 solver.cpp:398] Test net output #1: loss = 6.80939 (* 1 = 6.80939 loss)
  1278. F0803 12:11:36.613711 2175 syncedmem.cpp:71] Check failed: error == cudaSuccess (2 vs. 0) out of memory
  1279. *** Check failure stack trace: ***
  1280. @ 0x7f8d2b64d5cd google::LogMessage::Fail()
  1281. @ 0x7f8d2b64f433 google::LogMessage::SendToLog()
  1282. @ 0x7f8d2b64d15b google::LogMessage::Flush()
  1283. @ 0x7f8d2b64fe1e google::LogMessageFatal::~LogMessageFatal()
  1284. @ 0x7f8d2bc84ad8 caffe::SyncedMemory::mutable_gpu_data()
  1285. @ 0x7f8d2bdf90f2 caffe::Blob<>::mutable_gpu_data()
  1286. @ 0x7f8d2be2ce48 caffe::CuDNNConvolutionLayer<>::Forward_gpu()
  1287. @ 0x7f8d2bc938b1 caffe::Net<>::ForwardFromTo()
  1288. @ 0x7f8d2bc939b7 caffe::Net<>::Forward()
  1289. @ 0x7f8d2bdef348 caffe::Solver<>::Step()
  1290. @ 0x7f8d2bdefeea caffe::Solver<>::Solve()
  1291. @ 0x40aeb4 train()
  1292. @ 0x4075a8 main
  1293. @ 0x7f8d29de4830 __libc_start_main
  1294. @ 0x407e79 _start
  1295. @ (nil) (unknown)
  1296. Aborted (core dumped)
Advertisement
Add Comment
Please, Sign In to add comment