Advertisement
shayegh

log file

Apr 12th, 2018
140
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 54.67 KB | None | 0 0
  1. ./caffe train --solver C:/Users/mahtab/Desktop/finetune_mitindoor67/solver.prototxt --weights C:/Users/mahtab/Desktop/finetune_mitindoor
  2. 67/places205CNN_iter_300000_upgraded.caffemodel 2>&1 | tee -a C:/Users/mahtab/Desktop/finetune_mitindoor67/modify_caffenet.log
  3. >>
  4. ./caffe : I0412 04:12:22.239899 4704 caffe.cpp:212] Use CPU.
  5. At line:1 char:1
  6. + ./caffe train --solver C:/Users/mahtab/Desktop/finetune_mitindoor67/s ...
  7. + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
  8. + CategoryInfo : NotSpecified: (I0412 04:12:22....p:212] Use CPU.:String) [], Remote
  9. Exception
  10. + FullyQualifiedErrorId : NativeCommandError
  11.  
  12. I0412 04:12:22.239899 4704 solver.cpp:44] Initializing solver from parameters:
  13. test_iter: 134
  14. test_interval: 268
  15. base_lr: 1e-07
  16. display: 50
  17. max_iter: 100000
  18. lr_policy: "step"
  19. gamma: 0.1
  20. momentum: 0.9
  21. weight_decay: 0.0005
  22. stepsize: 20000
  23. snapshot: 10000
  24. snapshot_prefix: "C:/Users/mahtab/Desktop/finetune_mitindoor67"
  25. solver_mode: CPU
  26. net: "C:/Users/mahtab/Desktop/finetune_mitindoor67/SceneRecognitionCaffeNet.prototxt"
  27. train_state {
  28. level: 0
  29. stage: ""
  30. }
  31. I0412 04:12:22.239899 4704 solver.cpp:87] Creating training net from net file:
  32. C:/Users/mahtab/Desktop/finetune_mitindoor67/SceneRecognitionCaffeNet.prototxt
  33. I0412 04:12:22.239899 4704 net.cpp:294] The NetState phase (0) differed from the phase (1)
  34. specified by a rule in layer MITindoor67
  35. I0412 04:12:22.239899 4704 net.cpp:294] The NetState phase (0) differed from the phase (1)
  36. specified by a rule in layer accuracy
  37. I0412 04:12:22.239899 4704 net.cpp:51] Initializing net from parameters:
  38. name: "SceneRecognitionCaffeNet"
  39. state {
  40. phase: TRAIN
  41. level: 0
  42. stage: ""
  43. }
  44. layer {
  45. name: "MITindoor67"
  46. type: "Data"
  47. top: "data"
  48. top: "label"
  49. include {
  50. phase: TRAIN
  51. }
  52. transform_param {
  53. scale: 0.8
  54. mirror: true
  55. crop_size: 227
  56. mean_file: "mean.binaryproto"
  57. }
  58. data_param {
  59. source: "mydataset256_train_lmdb"
  60. batch_size: 20
  61. backend: LMDB
  62. }
  63. }
  64. layer {
  65. name: "conv1"
  66. type: "Convolution"
  67. bottom: "data"
  68. top: "conv1"
  69. param {
  70. lr_mult: 0
  71. decay_mult: 0
  72. }
  73. param {
  74. lr_mult: 0
  75. decay_mult: 0
  76. }
  77. convolution_param {
  78. num_output: 96
  79. kernel_size: 11
  80. stride: 4
  81. weight_filler {
  82. type: "gaussian"
  83. std: 0.01
  84. }
  85. bias_filler {
  86. type: "constant"
  87. value: 0
  88. }
  89. }
  90. }
  91. layer {
  92. name: "relu1"
  93. type: "ReLU"
  94. bottom: "conv1"
  95. top: "conv1"
  96. }
  97. layer {
  98. name: "pool1"
  99. type: "Pooling"
  100. bottom: "conv1"
  101. top: "pool1"
  102. pooling_param {
  103. pool: MAX
  104. kernel_size: 3
  105. stride: 2
  106. }
  107. }
  108. layer {
  109. name: "norm1"
  110. type: "LRN"
  111. bottom: "pool1"
  112. top: "norm1"
  113. lrn_param {
  114. local_size: 5
  115. alpha: 0.0001
  116. beta: 0.75
  117. }
  118. }
  119. layer {
  120. name: "conv2"
  121. type: "Convolution"
  122. bottom: "norm1"
  123. top: "conv2"
  124. param {
  125. lr_mult: 0
  126. decay_mult: 0
  127. }
  128. param {
  129. lr_mult: 0
  130. decay_mult: 0
  131. }
  132. convolution_param {
  133. num_output: 256
  134. pad: 2
  135. kernel_size: 5
  136. group: 2
  137. weight_filler {
  138. type: "gaussian"
  139. std: 0.01
  140. }
  141. bias_filler {
  142. type: "constant"
  143. value: 1
  144. }
  145. }
  146. }
  147. layer {
  148. name: "relu2"
  149. type: "ReLU"
  150. bottom: "conv2"
  151. top: "conv2"
  152. }
  153. layer {
  154. name: "pool2"
  155. type: "Pooling"
  156. bottom: "conv2"
  157. top: "pool2"
  158. pooling_param {
  159. pool: MAX
  160. kernel_size: 3
  161. stride: 2
  162. }
  163. }
  164. layer {
  165. name: "norm2"
  166. type: "LRN"
  167. bottom: "pool2"
  168. top: "norm2"
  169. lrn_param {
  170. local_size: 5
  171. alpha: 0.0001
  172. beta: 0.75
  173. }
  174. }
  175. layer {
  176. name: "conv3"
  177. type: "Convolution"
  178. bottom: "norm2"
  179. top: "conv3"
  180. param {
  181. lr_mult: 0
  182. decay_mult: 0
  183. }
  184. param {
  185. lr_mult: 0
  186. decay_mult: 0
  187. }
  188. convolution_param {
  189. num_output: 384
  190. pad: 1
  191. kernel_size: 3
  192. weight_filler {
  193. type: "gaussian"
  194. std: 0.01
  195. }
  196. bias_filler {
  197. type: "constant"
  198. value: 0
  199. }
  200. }
  201. }
  202. layer {
  203. name: "relu3"
  204. type: "ReLU"
  205. bottom: "conv3"
  206. top: "conv3"
  207. }
  208. layer {
  209. name: "conv4"
  210. type: "Convolution"
  211. bottom: "conv3"
  212. top: "conv4"
  213. param {
  214. lr_mult: 0
  215. decay_mult: 0
  216. }
  217. param {
  218. lr_mult: 0
  219. decay_mult: 0
  220. }
  221. convolution_param {
  222. num_output: 384
  223. pad: 1
  224. kernel_size: 3
  225. group: 2
  226. weight_filler {
  227. type: "gaussian"
  228. std: 0.01
  229. }
  230. bias_filler {
  231. type: "constant"
  232. value: 1
  233. }
  234. }
  235. }
  236. layer {
  237. name: "relu4"
  238. type: "ReLU"
  239. bottom: "conv4"
  240. top: "conv4"
  241. }
  242. layer {
  243. name: "conv5"
  244. type: "Convolution"
  245. bottom: "conv4"
  246. top: "conv5"
  247. param {
  248. lr_mult: 0
  249. decay_mult: 0
  250. }
  251. param {
  252. lr_mult: 0
  253. decay_mult: 0
  254. }
  255. convolution_param {
  256. num_output: 256
  257. pad: 1
  258. kernel_size: 3
  259. group: 2
  260. weight_filler {
  261. type: "gaussian"
  262. std: 0.01
  263. }
  264. bias_filler {
  265. type: "constant"
  266. value: 1
  267. }
  268. }
  269. }
  270. layer {
  271. name: "relu5"
  272. type: "ReLU"
  273. bottom: "conv5"
  274. top: "conv5"
  275. }
  276. layer {
  277. name: "pool5"
  278. type: "Pooling"
  279. bottom: "conv5"
  280. top: "pool5"
  281. pooling_param {
  282. pool: MAX
  283. kernel_size: 3
  284. stride: 2
  285. }
  286. }
  287. layer {
  288. name: "pool5_flatten"
  289. type: "Flatten"
  290. bottom: "pool5"
  291. top: "pool5_flatten"
  292. }
  293. layer {
  294. name: "fc6"
  295. type: "Convolution"
  296. bottom: "pool5_flatten"
  297. top: "fc6"
  298. param {
  299. lr_mult: 0
  300. decay_mult: 0
  301. }
  302. param {
  303. lr_mult: 0
  304. decay_mult: 0
  305. }
  306. convolution_param {
  307. num_output: 4096
  308. pad: 0
  309. kerne
  310. l_size: 7
  311. stride: 1
  312. }
  313. }
  314. layer {
  315. name: "relu6"
  316. type: "ReLU"
  317. bottom: "fc6"
  318. top: "fc6"
  319. }
  320. layer {
  321. name: "drop6"
  322. type: "Dropout"
  323. bottom: "fc6"
  324. top: "fc6"
  325. dropout_param {
  326. dropout_ratio: 0.5
  327. }
  328. }
  329. layer {
  330. name: "fc7"
  331. type: "Convolution"
  332. bottom: "fc6"
  333. top: "fc7"
  334. param {
  335. lr_mult: 0
  336. decay_mult: 0
  337. }
  338. param {
  339. lr_mult: 0
  340. decay_mult: 0
  341. }
  342. convolution_param {
  343. num_output: 4096
  344. kernel_size: 1
  345. stride: 1
  346. }
  347. }
  348. layer {
  349. name: "relu7"
  350. type: "ReLU"
  351. bottom: "fc7"
  352. top: "fc7"
  353. }
  354. layer {
  355. name: "drop7"
  356. type: "Dropout"
  357. bottom: "fc7"
  358. top: "fc7"
  359. dropout_param {
  360. dropout_ratio: 0.5
  361. }
  362. }
  363. layer {
  364. name: "fc8_mitindoor67"
  365. type: "Convolution"
  366. bottom: "fc7"
  367. top: "fc8_mitindoor67"
  368. param {
  369. lr_mult: 10
  370. decay_mult: 0
  371. }
  372. param {
  373. lr_mult: 20
  374. decay_mult: 0
  375. }
  376. convolution_param {
  377. num_output: 67
  378. kernel_size: 1
  379. stride: 1
  380. }
  381. }
  382. layer {
  383. name: "loss"
  384. type: "SoftmaxWithLoss"
  385. bottom: "fc8_mitindoor67"
  386. bottom: "label"
  387. top: "loss"
  388. }
  389. I0412 04:12:22.239899 4704 layer_factory.cpp:58] Creating layer MITindoor67
  390. I0412 04:12:22.239899 4704 db_lmdb.cpp:40] Opened lmdb mydataset256_train_lmdb
  391. I0412 04:12:22.239899 4704 net.cpp:84] Creating Layer MITindoor67
  392. I0412 04:12:22.239899 4704 net.cpp:380] MITindoor67 -> data
  393. I0412 04:12:22.239899 4704 net.cpp:380] MITindoor67 -> label
  394. I0412 04:12:22.239899 4704 data_transformer.cpp:25] Loading mean file from: mean.binaryproto
  395. I0412 04:12:22.257618 4704 common.cpp:36] System entropy source not available, using fallback
  396. algorithm to generate seed instead.
  397. I0412 04:12:22.257618 4704 data_layer.cpp:45] output data size: 20,3,227,227
  398. I0412 04:12:22.270828 4704 net.cpp:122] Setting up MITindoor67
  399. I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 3 227 227 (3091740)
  400. I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 (20)
  401. I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 12367040
  402. I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer conv1
  403. I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer conv1
  404. I0412 04:12:22.270828 4704 net.cpp:406] conv1 <- data
  405. I0412 04:12:22.270828 4704 net.cpp:380] conv1 -> conv1
  406. I0412 04:12:22.270828 4704 net.cpp:122] Setting up conv1
  407. I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 96 55 55 (5808000)
  408. I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 35599040
  409. I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer relu1
  410. I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer relu1
  411. I0412 04:12:22.270828 4704 net.cpp:406] relu1 <- conv1
  412. I0412 04:12:22.270828 4704 net.cpp:367] relu1 -> conv1 (in-place)
  413. I0412 04:12:22.270828 4704 net.cpp:122] Setting up relu1
  414. I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 96 55 55 (5808000)
  415. I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 58831040
  416. I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer pool1
  417. I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer pool1
  418. I0412 04:12:22.270828 4704 net.cpp:406] pool1 <- conv1
  419. I0412 04:12:22.270828 4704 net.cpp:380] pool1 -> pool1
  420. I0412 04:12:22.270828 4704 net.cpp:122] Setting up pool1
  421. I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 96 27 27 (1399680)
  422. I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 64429760
  423. I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer norm1
  424. I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer norm1
  425. I0412 04:12:22.270828 4704 net.cpp:406] norm1 <- pool1
  426. I0412 04:12:22.270828 4704 net.cpp:380] norm1 -> norm1
  427. I0412 04:12:22.270828 4704 net.cpp:122] Setting up norm1
  428. I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 96 27 27 (1399680)
  429. I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 70028480
  430. I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer conv2
  431. I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer conv2
  432. I0412 04:12:22.270828 4704 net.cpp:406] conv2 <- norm1
  433. I0412 04:12:22.270828 4704 net.cpp:380] conv2 -> conv2
  434. I0412 04:12:22.270828 4704 net.cpp:122] Setting up conv2
  435. I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 256 27 27 (3732480)
  436. I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 84958400
  437. I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer relu2
  438. I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer relu2
  439. I0412 04:12:22.270828 4704 net.cpp:406] relu2 <- conv2
  440. I0412 04:12:22.270828 4704 net.cpp:367] relu2 -> conv2 (in-place)
  441. I0412 04:12:22.270828 4704 net.cpp:122] Setting up relu2
  442. I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 256 27 27 (3732480)
  443. I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 99888320
  444. I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer pool2
  445. I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer pool2
  446. I0412 04:12:22.270828 4704 net.cpp:406] pool2 <- conv2
  447. I0412 04:12:22.270828 4704 net.cpp:380] pool2 -> pool2
  448. I0412 04:12:22.270828 4704 net.cpp:122] Setting up pool2
  449. I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 256 13 13 (865280)
  450. I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 103349440
  451. I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer norm2
  452. I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer norm2
  453. I0412 04:12:22.270828 4704 net.cpp:406] norm2 <- pool2
  454. I0412 04:12:22.270828 4704 net.cpp:380] norm2 -> norm2
  455. I0412 04:12:22.270828 4704 net.cpp:122] Setting up norm2
  456. I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 256 13 13 (865280)
  457. I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 106810560
  458. I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer conv3
  459. I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer conv3
  460. I0412 04:12:22.270828 4704 net.cpp:406] conv3 <- norm2
  461. I0412 04:12:22.270828 4704 net.cpp:380] conv3 -> conv3
  462. I0412 04:12:22.286504 4704 net.cpp:122] Setting up conv3
  463. I0412 04:12:22.286504 4704 net.cpp:129] Top shape: 20 384 13 13 (1297920)
  464. I0412 04:12:22.286504 4704 net.cpp:137] Memory required for data: 112002240
  465. I0412 04:12:22.286504 4704 layer_factory.cpp:58] Creating layer relu3
  466. I0412 04:12:22.286504 4704 net.cpp:84] Creating Layer relu3
  467. I0412 04:12:22.286504 4704 net.cpp:406] relu3 <- conv3
  468. I0412 04:12:22.286504 4704 net.cpp:367] relu3 -> conv3 (in-place)
  469. I0412 04:12:22.286504 4704 net.cpp:122] Setting up relu3
  470. I0412 04:12:22.286504 4704 net.cpp:129] Top shape: 20 384 13 13 (1297920)
  471. I0412 04:12:22.286504 4704 net.cpp:137] Memory required for data: 117193920
  472. I0412 04:12:22.286504 4704 layer_factory.cpp:58] Creating layer conv4
  473. I0412 04:12:22.286504 4704 net.cpp:84] Creating Layer conv4
  474. I0412 04:12:22.286504 4704 net.cpp:406] conv4 <- conv3
  475. I0412 04:12:22.286504 4704 net.cpp:380] conv4 -> conv4
  476. I0412 04:12:22.286504 4704 net.cpp:122] Setting up conv4
  477. I0412 04:12:22.286504 4704 net.cpp:129] Top shape: 20 384 13 13 (1297920)
  478. I0412 04:12:22.286504 4704 net.cpp:137] Memory required for data: 122385600
  479. I0412 04:12:22.286504 4704 layer_factory.cpp:58] Creating layer relu4
  480. I0412 04:12:22.286504 4704 net.cpp:84] Creating Layer relu4
  481. I0412 04:12:22.286504 4704 net.cpp:406] relu4 <- conv4
  482. I0412 04:12:22.286504 4704 net.cpp:367] relu4 -> conv4 (in-place)
  483. I0412 04:12:22.286504 4704 net.cpp:122] Setting up relu4
  484. I0412 04:12:22.286504 4704 net.cpp:129] Top shape: 20 384 13 13 (1297920)
  485. I0412 04:12:22.286504 4704 net.cpp:137] Memory required for data: 127577280
  486. I0412 04:12:22.286504 4704 layer_factory.cpp:58] Creating layer conv5
  487. I0412 04:12:22.286504 4704 net.cpp:84] Creating Layer conv5
  488. I0412 04:12:22.286504 4704 net.cpp:406] conv5 <- conv4
  489. I0412 04:12:22.286504 4704 net.cpp:380] conv5 -> conv5
  490. I0412 04:12:22.302098 4704 net.cpp:122] Setting up conv5
  491. I0412 04:12:22.302098 4704 net.cpp:129] Top shape: 20 256 13 13 (865280)
  492. I0412 04:12:22.302098 4704 net.cpp:137] Memory required for data: 131038400
  493. I0412 04:12:22.302098 4704 layer_factory.cpp:58] Creating layer relu5
  494. I0412 04:12:22.302098 4704 net.cpp:84] Creating Layer relu5
  495. I0412 04:12:22.302098 4704 net.cpp:406] relu5 <- conv5
  496. I0412 04:12:22.302098 4704 net.cpp:367] relu5 -> conv5 (in-place)
  497. I0412 04:12:22.302098 4704 net.cpp:122] Setting up relu5
  498. I0412 04:12:22.302098 4704 net.cpp:129] Top shape: 20 256 13 13 (865280)
  499. I0412 04:12:22.302098 4704 net.cpp:137] Memory required for data: 134499520
  500. I0412 04:12:22.302098 4704 layer_factory.cpp:58] Creating layer pool5
  501. I0412 04:12:22.302098 4704 net.cpp:84] Creating Layer pool5
  502. I0412 04:12:22.302098 4704 net.cpp:406] pool5 <- conv5
  503. I0412 04:12:22.302098 4704 net.cpp:380] pool5 -> pool5
  504. I0412 04:12:22.302098 4704 net.cpp:122] Setting up pool5
  505. I0412 04:12:22.302098 4704 net.cpp:129] Top shape: 20 256 6 6 (184320)
  506. I0412 04:12:22.302098 4704 net.cpp:137] Memory required for data: 135236800
  507. I0412 04:12:22.302098 4704 layer_factory.cpp:58] Creating layer pool5_flatten
  508. I0412 04:12:22.302098 4704 net.cpp:84] Creating Layer pool5_flatten
  509. I0412 04:12:22.302098 4704 net.cpp:406] pool5_flatten <- pool5
  510. I0412 04:12:22.302098 4704 net.cpp:380] pool5_flatten -> pool5_flatten
  511. I0412 04:12:22.302098 4704 net.cpp:122] Setting up pool5_flatten
  512. I0412 04:12:22.302098 4704 net.cpp:129] Top shape: 20 9216 (184320)
  513. I0412 04:12:22.302098 4704 net.cpp:137] Memory required for data: 135974080
  514. I0412 04:12:22.302098 4704 layer_factory.cpp:58] Creating layer fc6
  515. I0412 04:12:22.302098 4704 net.cpp:84] Creating Layer fc6
  516. I0412 04:12:22.302098 4704 net.cpp:406] fc6 <- pool5_flatten
  517. I0412 04:12:22.302098 4704 net.cpp:380] fc6 -> fc6
  518. I0412 04:12:22.333617 4704 net.cpp:122] Setting up fc6
  519. I0412 04:12:22.333617 4704 net.cpp:129] Top shape: 20 4096 (81920)
  520. I0412 04:12:22.333617 4704 net.cpp:137] Memory required for data: 136301760
  521. I0412 04:12:22.333617 4704 layer_factory.cpp:58] Creating layer relu6
  522. I0412 04:12:22.333617 4704 net.cpp:84] Creating Layer relu6
  523. I0412 04:12:22.333617 4704 net.cpp:406] relu6 <- fc6
  524. I0412 04:12:22.333617 4704 net.cpp:367] relu6 -> fc6 (in-place)
  525. I0412 04:12:22.333617 4704 net.cpp:122] Setting up relu6
  526. I0412 04:12:22.333617 4704 net.cpp:129] Top shape: 20 4096 (81920)
  527. I0412 04:12:22.333617 4704 net.cpp:137] Memory required for data: 136629440
  528. I0412 04:12:22.333617 4704 layer_factory.cpp:58] Creating layer drop6
  529. I0412 04:12:22.333617 4704 net.cpp:84] Creating Layer drop6
  530. I0412 04:12:22.333617 4704 net.cpp:406] drop6 <- fc6
  531. I0412 04:12:22.333617 4704 net.cpp:367] drop6 -> fc6 (in-place)
  532. I0412 04:12:22.333617 4704 net.cpp:122] Setting up drop6
  533. I0412 04:12:22.333617 4704 net.cpp:129] Top shape: 20 4096 (81920)
  534. I0412 04:12:22.333617 4704 net.cpp:137] Memory required for data: 136957120
  535. I0412 04:12:22.333617 4704 layer_factory.cpp:58] Creating layer fc7
  536. I0412 04:12:22.333617 4704 net.cpp:84] Creating Layer fc7
  537. I0412 04:12:22.333617 4704 net.cpp:406] fc7 <- fc6
  538. I0412 04:12:22.333617 4704 net.cpp:380] fc7 -> fc7
  539. I0412 04:12:22.364234 4704 net.cpp:122] Setting up fc7
  540. I0412 04:12:22.364234 4704 net.cpp:129] Top shape: 20 4096 (81920)
  541. I0412 04:12:22.364234 4704 net.cpp:137] Memory required for data: 137284800
  542. I0412 04:12:22.364234 4704 layer_factory.cpp:58] Creating layer relu7
  543. I0412 04:12:22.364234 4704 net.cpp:84] Creating Layer relu7
  544. I0412 04:12:22.364234 4704 net.cpp:406] relu7 <- fc7
  545. I0412 04:12:22.364234 4704 net.cpp:367] relu7 -> fc7 (in-place)
  546. I0412 04:12:22.364234 4704 net.cpp:122] Setting up relu7
  547. I0412 04:12:22.364234 4704 net.cpp:129] Top shape: 20 4096 (81920)
  548. I0412 04:12:22.364234 4704 net.cpp:137] Memory required for data: 137612480
  549. I0412 04:12:22.364234 4704 layer_factory.cpp:58] Creating layer drop7
  550. I0412 04:12:22.364234 4704 net.cpp:84] Creating Layer drop7
  551. I0412 04:12:22.364234 4704 net.cpp:406] drop7 <- fc7
  552. I0412 04:12:22.364234 4704 net.cpp:367] drop7 -> fc7 (in-place)
  553. I0412 04:12:22.364234 4704 net.cpp:122] Setting up drop7
  554. I0412 04:12:22.364234 4704 net.cpp:129] Top shape: 20 4096 (81920)
  555. I0412 04:12:22.364234 4704 net.cpp:137] Memory required for data: 137940160
  556. I0412 04:12:22.364234 4704 layer_factory.cpp:58] Creating layer fc8_mitindoor67
  557. I0412 04:12:22.364234 4704 net.cpp:84] Creating Layer fc8_mitindoor67
  558. I0412 04:12:22.364234 4704 net.cpp:406] fc8_mitindoor67 <- fc7
  559. I0412 04:12:22.364234 4704 net.cpp:380] fc8_mitindoor67 -> fc8_mitindoor67
  560. I0412 04:12:22.364234 4704 net.cpp:122] Setting up fc8_mitindoor67
  561. I0412 04:12:22.364234 4704 net.cpp:129] Top shape: 20 67 (1340)
  562. I0412 04:12:22.364234 4704 net.cpp:137] Memory required for data: 137945520
  563. I0412 04:12:22.364234 4704 layer_factory.cpp:58] Creating layer loss
  564. I0412 04:12:22.364234 4704 net.cpp:84] Creating Layer loss
  565. I0412 04:12:22.364234 4704 net.cpp:406] loss <- fc8_mitindoor67
  566. I0412 04:12:22.364234 4704 net.cpp:406] loss <- label
  567. I0412 04:12:22.364234 4704 net.cpp:380] loss -> loss
  568. I0412 04:12:22.364234 4704 layer_factory.cpp:58] Creating layer loss
  569. I0412 04:12:22.364234 4704 net.cpp:122] Setting up loss
  570. I0412 04:12:22.364234 4704 net.cpp:129] Top shape: (1)
  571. I0412 04:12:22.364234 4704 net.cpp:132] with loss weight 1
  572. I0412 04:12:22.364234 4704 net.cpp:137] Memory required for data: 137945524
  573. I0412 04:12:22.364234 4704 net.cpp:198] loss needs backward computation.
  574. I0412 04:12:22.364234 4704 net.cpp:198] fc8_mitindoor67 needs backward computation.
  575. I0412 04:12:22.364234 4704 net.cpp:200] drop7 does not need backward computation.
  576. I0412 04:12:22.364234 4704 net.cpp:200] relu7 does not need backward computation.
  577. I0412 04:12:22.364234 4704 net.cpp:200] fc7 does not need backward computation.
  578. I0412 04:12:22.364234 4704 net.cpp:200] drop6 does not need backward computation.
  579. I0412 04:12:22.364234 4704 net.cpp:200] relu6 does not need backward computation.
  580. I0412 04:12:22.364234 4704 net.cpp:200] fc6 does not need backward computation.
  581. I0412 04:12:22.364234 4704 net.cpp:200] pool5_flatten does not need backward computation.
  582. I0412 04:12:22.364234 4704 net.cpp:200] pool5 does not need backward computation.
  583. I0412 04:12:22.364234 4704 net.cpp:200] relu5 does not need backward computation.
  584. I0412 04:12:22.364234 4704 net.cpp:200] conv5 does not need backward computation.
  585. I0412 04:12:22.364234 4704 net.cpp:200] relu4 does not need backward computation.
  586. I0412 04:12:22.364234 4704 net.cpp:200] conv4 does not need backward computation.
  587. I0412 04:12:22.364234 4704 net.cpp:200] relu3 does not need backward computation.
  588. I0412 04:12:22.364234 4704 net.cpp:200] conv3 does not need backward computation.
  589. I0412 04:12:22.364234 4704 net.cpp:200] norm2 does not need backward computation.
  590. I0412 04:12:22.364234 4704 net.cpp:200] pool2 does not need backward computation.
  591. I0412 04:12:22.364234 4704 net.cpp:200] relu2 does not need backward computation.
  592. I0412 04:12:22.364234 4704 net.cpp:200] conv2 does not need backward computation.
  593. I0412 04:12:22.364234 4704 net.cpp:200] norm1 does not need backward computation.
  594. I0412 04:12:22.364234 4704 net.cpp:200] pool1 does not need backward computation.
  595. I0412 04:12:22.364234 4704 net.cpp:200] relu1 does not need backward computation.
  596. I0412 04:12:22.364234 4704 net.cpp:200] conv1 does not need backward computation.
  597. I0412 04:12:22.364234 4704 net.cpp:200] MITindoor67 does not need backward computatio
  598. n.
  599. I0412 04:12:22.536056 4704 net.cpp:242] This network produces output loss
  600. I0412 04:12:22.536056 4704 net.cpp:255] Network initialization done.
  601. I0412 04:12:22.536056 4704 solver.cpp:172] Creating test net (#0) specified by net file:
  602. C:/Users/mahtab/Desktop/finetune_mitindoor67/SceneRecognitionCaffeNet.prototxt
  603. I0412 04:12:22.536056 4704 net.cpp:294] The NetState phase (1) differed from the phase (0)
  604. specified by a rule in layer MITindoor67
  605. I0412 04:12:22.536056 4704 net.cpp:51] Initializing net from parameters:
  606. name: "SceneRecognitionCaffeNet"
  607. state {
  608. phase: TEST
  609. }
  610. layer {
  611. name: "MITindoor67"
  612. type: "Data"
  613. top: "data"
  614. top: "label"
  615. include {
  616. phase: TEST
  617. }
  618. transform_param {
  619. mirror: false
  620. crop_size: 227
  621. mean_file: "mean.binaryproto"
  622. }
  623. data_param {
  624. source: "mydataset256_test_lmdb"
  625. batch_size: 10
  626. backend: LMDB
  627. }
  628. }
  629. layer {
  630. name: "conv1"
  631. type: "Convolution"
  632. bottom: "data"
  633. top: "conv1"
  634. param {
  635. lr_mult: 0
  636. decay_mult: 0
  637. }
  638. param {
  639. lr_mult: 0
  640. decay_mult: 0
  641. }
  642. convolution_param {
  643. num_output: 96
  644. kernel_size: 11
  645. stride: 4
  646. weight_filler {
  647. type: "gaussian"
  648. std: 0.01
  649. }
  650. bias_filler {
  651. type: "constant"
  652. value: 0
  653. }
  654. }
  655. }
  656. layer {
  657. name: "relu1"
  658. type: "ReLU"
  659. bottom: "conv1"
  660. top: "conv1"
  661. }
  662. layer {
  663. name: "pool1"
  664. type: "Pooling"
  665. bottom: "conv1"
  666. top: "pool1"
  667. pooling_param {
  668. pool: MAX
  669. kernel_size: 3
  670. stride: 2
  671. }
  672. }
  673. layer {
  674. name: "norm1"
  675. type: "LRN"
  676. bottom: "pool1"
  677. top: "norm1"
  678. lrn_param {
  679. local_size: 5
  680. alpha: 0.0001
  681. beta: 0.75
  682. }
  683. }
  684. layer {
  685. name: "conv2"
  686. type: "Convolution"
  687. bottom: "norm1"
  688. top: "conv2"
  689. param {
  690. lr_mult: 0
  691. decay_mult: 0
  692. }
  693. param {
  694. lr_mult: 0
  695. decay_mult: 0
  696. }
  697. convolution_param {
  698. num_output: 256
  699. pad: 2
  700. kernel_size: 5
  701. group: 2
  702. weight_filler {
  703. type: "gaussian"
  704. std: 0.01
  705. }
  706. bias_filler {
  707. type: "constant"
  708. value: 1
  709. }
  710. }
  711. }
  712. layer {
  713. name: "relu2"
  714. type: "ReLU"
  715. bottom: "conv2"
  716. top: "conv2"
  717. }
  718. layer {
  719. name: "pool2"
  720. type: "Pooling"
  721. bottom: "conv2"
  722. top: "pool2"
  723. pooling_param {
  724. pool: MAX
  725. kernel_size: 3
  726. stride: 2
  727. }
  728. }
  729. layer {
  730. name: "norm2"
  731. type: "LRN"
  732. bottom: "pool2"
  733. top: "norm2"
  734. lrn_param {
  735. local_size: 5
  736. alpha: 0.0001
  737. beta: 0.75
  738. }
  739. }
  740. layer {
  741. name: "conv3"
  742. type: "Convolution"
  743. bottom: "norm2"
  744. top: "conv3"
  745. param {
  746. lr_mult: 0
  747. decay_mult: 0
  748. }
  749. param {
  750. lr_mult: 0
  751. decay_mult: 0
  752. }
  753. convolution_param {
  754. num_output: 384
  755. pad: 1
  756. kernel_size: 3
  757. weight_filler {
  758. type: "gaussian"
  759. std: 0.01
  760. }
  761. bias_filler {
  762. type: "constant"
  763. value: 0
  764. }
  765. }
  766. }
  767. layer {
  768. name: "relu3"
  769. type: "ReLU"
  770. bottom: "conv3"
  771. top: "conv3"
  772. }
  773. layer {
  774. name: "conv4"
  775. type: "Convolution"
  776. bottom: "conv3"
  777. top: "conv4"
  778. param {
  779. lr_mult: 0
  780. decay_mult: 0
  781. }
  782. param {
  783. lr_mult: 0
  784. decay_mult: 0
  785. }
  786. convolution_param {
  787. num_output: 384
  788. pad: 1
  789. kernel_size: 3
  790. group: 2
  791. weight_filler {
  792. type: "gaussian"
  793. std: 0.01
  794. }
  795. bias_filler {
  796. type: "constant"
  797. value: 1
  798. }
  799. }
  800. }
  801. layer {
  802. name: "relu4"
  803. type: "ReLU"
  804. bottom: "conv4"
  805. top: "conv4"
  806. }
  807. layer {
  808. name: "conv5"
  809. type: "Convolution"
  810. bottom: "conv4"
  811. top: "conv5"
  812. param {
  813. lr_mult: 0
  814. decay_mult: 0
  815. }
  816. param {
  817. lr_mult: 0
  818. decay_mult: 0
  819. }
  820. convolution_param {
  821. num_output: 256
  822. pad: 1
  823. kernel_size: 3
  824. group: 2
  825. weight_filler {
  826. type: "gaussian"
  827. std: 0.01
  828. }
  829. bias_filler {
  830. type: "constant"
  831. value: 1
  832. }
  833. }
  834. }
  835. layer {
  836. name: "relu5"
  837. type: "ReLU"
  838. bottom: "conv5"
  839. top: "conv5"
  840. }
  841. layer {
  842. name: "pool5"
  843. type: "Pooling"
  844. bottom: "conv5"
  845. top: "pool5"
  846. pooling_param {
  847.  
  848. pool: MAX
  849. kernel_size: 3
  850. stride: 2
  851. }
  852. }
  853. layer {
  854. name: "pool5_flatten"
  855. type: "Flatten"
  856. bottom: "pool5"
  857. top: "pool5_flatten"
  858. }
  859. layer {
  860. name: "fc6"
  861. type: "Convolution"
  862. bottom: "pool5_flatten"
  863. top: "fc6"
  864. param {
  865. lr_mult: 0
  866. decay_mult: 0
  867. }
  868. param {
  869. lr_mult: 0
  870. decay_mult: 0
  871. }
  872. convolution_param {
  873. num_output: 4096
  874. pad: 0
  875. kernel_size: 7
  876. stride: 1
  877. }
  878. }
  879. layer {
  880. name: "relu6"
  881. type: "ReLU"
  882. bottom: "fc6"
  883. top: "fc6"
  884. }
  885. layer {
  886. name: "drop6"
  887. type: "Dropout"
  888. bottom: "fc6"
  889. top: "fc6"
  890. dropout_param {
  891. dropout_ratio: 0.5
  892. }
  893. }
  894. layer {
  895. name: "fc7"
  896. type: "Convolution"
  897. bottom: "fc6"
  898. top: "fc7"
  899. param {
  900. lr_mult: 0
  901. decay_mult: 0
  902. }
  903. param {
  904. lr_mult: 0
  905. decay_mult: 0
  906. }
  907. convolution_param {
  908. num_output: 4096
  909. kernel_size: 1
  910. stride: 1
  911. }
  912. }
  913. layer {
  914. name: "relu7"
  915. type: "ReLU"
  916. bottom: "fc7"
  917. top: "fc7"
  918. }
  919. layer {
  920. name: "drop7"
  921. type: "Dropout"
  922. bottom: "fc7"
  923. top: "fc7"
  924. dropout_param {
  925. dropout_ratio: 0.5
  926. }
  927. }
  928. layer {
  929. name: "fc8_mitindoor67"
  930. type: "Convolution"
  931. bottom: "fc7"
  932. top: "fc8_mitindoor67"
  933. param {
  934. lr_mult: 10
  935. decay_mult: 0
  936. }
  937. param {
  938. lr_mult: 20
  939. decay_mult: 0
  940. }
  941. convolution_param {
  942. num_output: 67
  943. kernel_size: 1
  944. stride: 1
  945. }
  946. }
  947. layer {
  948. name: "loss"
  949. type: "SoftmaxWithLoss"
  950. bottom: "fc8_mitindoor67"
  951. bottom: "label"
  952. top: "loss"
  953. }
  954. layer {
  955. name: "accuracy"
  956. type: "Accuracy"
  957. bottom: "fc8_mitindoor67"
  958. bottom: "label"
  959. top: "accuracy"
  960. include {
  961. phase: TEST
  962. }
  963. }
  964. I0412 04:12:22.598542 4704 layer_factory.cpp:58] Creating layer MITindoor67
  965. I0412 04:12:22.598542 4704 db_lmdb.cpp:40] Opened lmdb mydataset256_test_lmdb
  966. I0412 04:12:22.598542 4704 net.cpp:84] Creating Layer MITindoor67
  967. I0412 04:12:22.598542 4704 net.cpp:380] MITindoor67 -> data
  968. I0412 04:12:22.598542 4704 net.cpp:380] MITindoor67 -> label
  969. I0412 04:12:22.598542 4704 data_transformer.cpp:25] Loading mean file from: mean.binaryproto
  970. I0412 04:12:22.598542 4704 data_layer.cpp:45] output data size: 10,3,227,227
  971. I0412 04:12:22.614207 4704 net.cpp:122] Setting up MITindoor67
  972. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 3 227 227 (1545870)
  973. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 (10)
  974. I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 6183520
  975. I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer label_MITindoor67_1_split
  976. I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer label_MITindoor67_1_split
  977. I0412 04:12:22.614207 4704 net.cpp:406] label_MITindoor67_1_split <- label
  978. I0412 04:12:22.614207 4704 net.cpp:380] label_MITindoor67_1_split -> label_MITindoor67_1_split_0
  979. I0412 04:12:22.614207 4704 net.cpp:380] label_MITindoor67_1_split -> label_MITindoor67_1_split_1
  980. I0412 04:12:22.614207 4704 net.cpp:122] Setting up label_MITindoor67_1_split
  981. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 (10)
  982. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 (10)
  983. I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 6183600
  984. I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer conv1
  985. I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer conv1
  986. I0412 04:12:22.614207 4704 net.cpp:406] conv1 <- data
  987. I0412 04:12:22.614207 4704 net.cpp:380] conv1 -> conv1
  988. I0412 04:12:22.614207 4704 net.cpp:122] Setting up conv1
  989. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 96 55 55 (2904000)
  990. I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 17799600
  991. I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer relu1
  992. I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer relu1
  993. I0412 04:12:22.614207 4704 net.cpp:406] relu1 <- conv1
  994. I0412 04:12:22.614207 4704 net.cpp:367] relu1 -> conv1 (in-place)
  995. I0412 04:12:22.614207 4704 net.cpp:122] Setting up relu1
  996. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 96 55 55 (2904000)
  997. I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 29415600
  998. I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer pool1
  999. I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer pool1
  1000. I0412 04:12:22.614207 4704 net.cpp:406] pool1 <- conv1
  1001. I0412 04:12:22.614207 4704 net.cpp:380] pool1 -> pool1
  1002. I0412 04:12:22.614207 4704 net.cpp:122] Setting up pool1
  1003. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 96 27 27 (699840)
  1004. I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 32214960
  1005. I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer norm1
  1006. I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer norm1
  1007. I0412 04:12:22.614207 4704 net.cpp:406] norm1 <- pool1
  1008. I0412 04:12:22.614207 4704 net.cpp:380] norm1 -> norm1
  1009. I0412 04:12:22.614207 4704 net.cpp:122] Setting up norm1
  1010. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 96 27 27 (699840)
  1011. I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 35014320
  1012. I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer conv2
  1013. I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer conv2
  1014. I0412 04:12:22.614207 4704 net.cpp:406] conv2 <- norm1
  1015. I0412 04:12:22.614207 4704 net.cpp:380] conv2 -> conv2
  1016. I0412 04:12:22.614207 4704 net.cpp:122] Setting up conv2
  1017. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 256 27 27 (1866240)
  1018. I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 42479280
  1019. I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer relu2
  1020. I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer relu2
  1021. I0412 04:12:22.614207 4704 net.cpp:406] relu2 <- conv2
  1022. I0412 04:12:22.614207 4704 net.cpp:367] relu2 -> conv2 (in-place)
  1023. I0412 04:12:22.614207 4704 net.cpp:122] Setting up relu2
  1024. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 256 27 27 (1866240)
  1025. I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 49944240
  1026. I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer pool2
  1027. I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer pool2
  1028. I0412 04:12:22.614207 4704 net.cpp:406] pool2 <- conv2
  1029. I0412 04:12:22.614207 4704 net.cpp:380] pool2 -> pool2
  1030. I0412 04:12:22.614207 4704 net.cpp:122] Setting up pool2
  1031. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 256 13 13 (432640)
  1032. I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 51674800
  1033. I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer norm2
  1034. I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer norm2
  1035. I0412 04:12:22.614207 4704 net.cpp:406] norm2 <- pool2
  1036. I0412 04:12:22.614207 4704 net.cpp:380] norm2 -> norm2
  1037. I0412 04:12:22.614207 4704 net.cpp:122] Setting up norm2
  1038. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 256 13 13 (432640)
  1039. I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 53405360
  1040. I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer conv3
  1041. I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer conv3
  1042. I0412 04:12:22.614207 4704 net.cpp:406] conv3 <- norm2
  1043. I0412 04:12:22.614207 4704 net.cpp:380] conv3 -> conv3
  1044. I0412 04:12:22.614207 4704 net.cpp:122] Setting up conv3
  1045. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 384 13 13 (648960)
  1046. I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 56001200
  1047. I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer relu3
  1048. I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer relu3
  1049. I0412 04:12:22.614207 4704 net.cpp:406] relu3 <- conv3
  1050. I0412 04:12:22.614207 4704 net.cpp:367] relu3 -> conv3 (in-place)
  1051. I0412 04:12:22.614207 4704 net.cpp:122] Setting up relu3
  1052. I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 384 13 13 (648960)
  1053. I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 58597040
  1054. I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer conv4
  1055. I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer conv4
  1056. I0412 04:12:22.614207 4704 net.cpp:406] conv4 <- conv3
  1057. I0412 04:12:22.614207 4704 net.cpp:380] conv4 -> conv4
  1058. I0412 04:12:22.629875 4704 net.cpp:122] Setting up conv4
  1059. I0412 04:12:22.629875 4704 net.cpp:129] Top shape: 10 384 13 13 (648960)
  1060. I0412 04:12:22.629875 4704 net.cpp:137] Memory required for data: 61192880
  1061. I0412 04:12:22.629875 4704 layer_factory.cpp:58] Creating layer relu4
  1062. I0412 04:12:22.629875 4704 net.cpp:84] Creating Layer relu4
  1063. I0412 04:12:22.629875 4704 net.cpp:406] relu4 <- conv4
  1064. I0412 04:12:22.629875 4704 net.cpp:367] relu4 -> conv4 (in-place)
  1065. I0412 04:12:22.629875 4704 net.cpp:122] Setting up relu4
  1066. I0412 04:12:22.629875 4704 net.cpp:129] Top shape: 10 384 13 13 (648960)
  1067. I0412 04:12:22.629875 4704 net.cpp:137] Memory required for data: 63788720
  1068. I0412 04:12:22.629875 4704 layer_factory.cpp:58] Creating layer conv5
  1069. I0412 04:12:22.629875 4704 net.cpp:84] Creating Layer conv5
  1070. I0412 04:12:22.629875 4704 net.cpp:406] conv5 <- conv4
  1071. I0412 04:12:22.629875 4704 net.cpp:380] conv5 -> conv5
  1072. I0412 04:12:22.629875 4704 net.cpp:122] Setting up conv5
  1073. I0412 04:12:22.629875 4704 net.cpp:129] Top shape: 10 256 13 13 (432640)
  1074. I0412 04:12:22.629875 4704 net.cpp:137] Memory required for data: 65519280
  1075. I0412 04:12:22.629875 4704 layer_factory.cpp:58] Creating layer relu5
  1076. I0412 04:12:22.629875 4704 net.cpp:84] Creating Layer relu5
  1077. I0412 04:12:22.629875 4704 net.cpp:406] relu5 <- conv5
  1078. I0412 04:12:22.629875 4704 net.cpp:367] relu5 -> conv5 (in-place)
  1079. I0412 04:12:22.629875 4704 net.cpp:122] Setting up relu5
  1080. I0412 04:12:22.629875 4704 net.cpp:129] Top shape: 10 256 13 13 (432640)
  1081. I0412 04:12:22.629875 4704 net.cpp:137] Memory required for data: 67249840
  1082. I0412 04:12:22.629875 4704 layer_factory.cpp:58] Creating layer pool5
  1083. I0412 04:12:22.629875 4704 net.cpp:84] Creating Layer pool5
  1084. I0412 04:12:22.629875 4704 net.cpp:406] pool5 <- conv5
  1085. I0412 04:12:22.629875 4704 net.cpp:380] pool5 -> pool5
  1086. I0412 04:12:22.629875 4704 net.cpp:122] Setting up pool5
  1087. I0412 04:12:22.629875 4704 net.cpp:129] Top shape: 10 256 6 6 (92160)
  1088. I0412 04:12:22.629875 4704 net.cpp:137] Memory required for data: 67618480
  1089. I0412 04:12:22.629875 4704 layer_factory.cpp:58] Creating layer pool5_flatten
  1090. I0412 04:12:22.629875 4704 net.cpp:84] Creating Layer pool5_flatten
  1091. I0412 04:12:22.629875 4704 net.cpp:406] pool5_flatten <- pool5
  1092. I0412 04:12:22.629875 4704 net.cpp:380] pool5_flatten -> pool5_flatten
  1093. I0412 04:12:22.629875 4704 net.cpp:122] Setting up pool5_flatten
  1094. I0412 04:12:22.629875 4704 net.cpp:129] Top shape: 10 9216 (92160)
  1095. I0412 04:12:22.629875 4704 net.cpp:137] Memory required for data: 67987120
  1096. I0412 04:12:22.629875 4704 layer_factory.cpp:58] Creating layer fc6
  1097. I0412 04:12:22.629875 4704 net.cpp:84] Creating Layer fc6
  1098. I0412 04:12:22.629875 4704 net.cpp:406] fc6 <- pool5_flatten
  1099. I0412 04:12:22.629875 4704 net.cpp:380] fc6 -> fc6
  1100. I0412 04:12:22.677767 4704 net.cpp:122] Setting up fc6
  1101. I0412 04:12:22.677767 4704 net.cpp:129] Top shape: 10 4096 (40960)
  1102. I0412 04:12:22.677767 4704 net.cpp:137] Memory required for data: 68150960
  1103. I0412 04:12:22.677767 4704 layer_factory.cpp:58] Creating layer relu6
  1104. I0412 04:12:22.677767 4704 net.cpp:84] Creating Layer relu6
  1105. I0412 04:12:22.677767 4704 net.cpp:406] relu6 <- fc6
  1106. I0412 04:12:22.677767 4704 net.cpp:367] relu6 -> fc6 (in-place)
  1107. I0412 04:12:22.677767 4704 net.cpp:122] Setting up relu6
  1108. I0412 04:12:22.677767 4704 net.cpp:129] Top shape: 10 4096 (40960)
  1109. I0412 04:12:22.677767 4704 net.cpp:137] Memory required for data: 68314800
  1110. I0412 04:12:22.677767 4704 layer_factory.cpp:58] Creating layer drop6
  1111. I0412 04:12:22.677767 4704 net.cpp:84] Creating Layer drop6
  1112. I0412 04:12:22.677767 4704 net.cpp:406] drop6 <- fc6
  1113. I0412 04:12:22.677767 4704 net.cpp:367] drop6 -> fc6 (in-place)
  1114. I0412 04:12:22.677767 4704 net.cpp:122] Setting up drop6
  1115. I0412 04:12:22.677767 4704 net.cpp:129] Top shape: 10 4096 (40960)
  1116. I0412 04:12:22.677767 4704 net.cpp:137] Memory required for data: 68478640
  1117. I0412 04:12:22.677767 4704 layer_factory.cpp:58] Creating layer fc7
  1118. I0412 04:12:22.677767 4704 net.cpp:84] Creating Layer fc7
  1119. I0412 04:12:22.677767 4704 net.cpp:406] fc7 <- fc6
  1120. I0412 04:12:22.677767 4704 net.cpp:380] fc7 -> fc7
  1121. I0412 04:12:22.692293 4704 net.cpp:122] Setting up fc7
  1122. I0412 04:12:22.692293 4704 net.cpp:129] Top shape: 10 4096 (40960)
  1123. I0412 04:12:22.692293 4704 net.cpp:137] Memory required for data: 68642480
  1124. I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer relu7
  1125. I0412 04:12:22.692293 4704 net.cpp:84] Creating Layer relu7
  1126. I0412 04:12:22.692293 4704 net.cpp:406] relu7 <- fc7
  1127. I0412 04:12:22.692293 4704 net.cpp:367] relu7 -> fc7 (in-place)
  1128. I0412 04:12:22.692293 4704 net.cpp:122] Setting up relu7
  1129. I0412 04:12:22.692293 4704 net.cpp:129] Top shape: 10 4096 (40960)
  1130. I0412 04:12:22.692293 4704 net.cpp:137] Memory required for data: 68806320
  1131. I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer drop7
  1132. I0412 04:12:22.692293 4704 net.cpp:84] Creating Layer drop7
  1133. I0412 04:12:22.692293 4704 net.cpp:406] drop7 <- fc7
  1134. I0412 04:12:22.692293 4704 net.cpp:367] drop7 -> fc7 (in-place)
  1135. I0412 04:12:22.692293 4704 net.cpp:122] Setting up drop7
  1136. I0412 04:12:22.692293 4704 net.cpp:129] Top shape: 10 4096 (40960)
  1137. I0412 04:12:22.692293 4704 net.cpp:137] Memory required for data: 68970160
  1138. I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer fc8_mitindoor67
  1139. I0412 04:12:22.692293 4704 net.cpp:84] Creating Layer fc8_mitindoor67
  1140. I0412 04:12:22.692293 4704 net.cpp:406] fc8_mitindoor67 <- fc7
  1141. I0412 04:12:22.692293 4704 net.cpp:380] fc8_mitindoor67 -> fc8_mitindoor67
  1142. I0412 04:12:22.692293 4704 net.cpp:122] Setting up fc8_mitindoor67
  1143. I0412 04:12:22.692293 4704 net.cpp:129] Top shape: 10 67 (670)
  1144. I0412 04:12:22.692293 4704 net.cpp:137] Memory required for data: 68972840
  1145. I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer
  1146. fc8_mitindoor67_fc8_mitindoor67_0_split
  1147. I0412 04:12:22.692293 4704 net.cpp:84] Creating Layer fc8_mitindoor67_fc8_mitindoor67_0_split
  1148. I0412 04:12:22.692293 4704 net.cpp:406] fc8_mitindoor67_fc8_mitindoor67_0_split <-
  1149. fc8_mitindoor67
  1150. I0412 04:12:22.692293 4704 net.cpp:380] fc8_mitindoor67_fc8_mitindoor67_0_split ->
  1151. fc8_mitindoor67_fc8_mitindoor67_0_split_0
  1152. I0412 04:12:22.692293 4704 net.cpp:380] fc8_mitindoor67_fc8_mitindoor67_0_split ->
  1153. fc8_mitindoor67_fc8_mitindoor67_0_split_1
  1154. I0412 04:12:22.692293 4704 net.cpp:122] Setting up fc8_mitindoor67_fc8_mitindoor67_0_split
  1155. I0412 04:12:22.692293 4704 net.cpp:129] Top shape: 10 67 (670)
  1156. I0412 04:12:22.692293 4704 net.cpp:129] Top shape: 10 67 (670)
  1157. I0412 04:12:22.692293 4704 net.cpp:137] Memory required for data: 68978200
  1158. I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer loss
  1159. I0412 04:12:22.692293 4704 net.cpp:84] Creating Layer loss
  1160. I0412 04:12:22.692293 4704 net.cpp:406] loss <- fc8_mitindoor67_fc8_mitindoor67_0_split_0
  1161. I0412 04:12:22.692293 4704 net.cpp:406] loss <- label_MITindoor67_1_split_0
  1162. I0412 04:12:22.692293 4704 net.cpp:380] loss -> loss
  1163. I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer loss
  1164. I0412 04:12:22.692293 4704 net.cpp:122] Setting up loss
  1165. I0412 04:12:22.692293 4704 net.cpp:129] Top shape: (1)
  1166. I0412 04:12:22.692293 4704 net.cpp:132] with loss weight 1
  1167. I0412 04:12:22.692293 4704 net.cpp:137] Memory required for data: 68978204
  1168. I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer accuracy
  1169. I0412 04:12:22.692293 4704 net.cpp:84] Creating Layer accuracy
  1170. I0412 04:12:22.692293 4704 net.cpp:406] accuracy <- fc8_mitindoor67_fc8_mitindoor67_0_split_1
  1171. I0412 04:12:22.692293 4704 net.cpp:406] accuracy <- label_MITindoor67_1_split_1
  1172. I0412 04:12:22.692293 4704 net.cpp:380] accuracy -> accuracy
  1173. I0412 04:12:22.692293 4704 net.cpp:122] Setting up accuracy
  1174. I0412 04:12:22.707957 4704 net.cpp:129] Top shape: (1)
  1175. I0412 04:12:22.707957 4704 net.cpp:137] Memory required for data: 68978208
  1176. I0412 04:12:22.707957 4704 net.cpp:200] accuracy does not need backward computation.
  1177. I0412 04:12:22.707957 4704 net.cpp:198] loss needs backward computation.
  1178. I0412 04:12:22.707957 4704 net.cpp:198] fc8_mitindoor67_fc8_mitindoor67_0_split needs backward
  1179. computation.
  1180. I0412 04:12:22.707957 4704 net.cpp:198] fc8_mitindoor67 needs backward computation.
  1181. I0412 04:12:22.707957 4704 net.cpp:200] drop7 does not need backward computation.
  1182. I0412 04:12:22.707957 4704 net.cpp:200] relu7 does not need backward computation.
  1183. I0412 04:12:22.707957 4704 net.cpp:200] fc7 does not need backward computation.
  1184. I0412 04:12:22.707957 4704 net.cpp:200] drop6 does not need backward computation.
  1185. I0412 04:12:22.707957 4704 net.cpp:200] relu6 does not need backward computation.
  1186. I0412 04:12:22.707957 4704 net.cpp:200] fc6 does not need backward computation.
  1187. I0412 04:12:22.707957 4704 net.cpp:200] pool5_flatten does not need backward computation.
  1188. I0412 04:12:22.707957 4704 net.cpp:200] pool5 does not need backward computation.
  1189. I0412 04:12:22.707957 4704 net.cpp:200] relu5 d
  1190. oes not need backward computation.
  1191. I0412 04:12:22.707957 4704 net.cpp:200] conv5 does not need backward computation.
  1192. I0412 04:12:22.707957 4704 net.cpp:200] relu4 does not need backward computation.
  1193. I0412 04:12:22.707957 4704 net.cpp:200] conv4 does not need backward computation.
  1194. I0412 04:12:22.707957 4704 net.cpp:200] relu3 does not need backward computation.
  1195. I0412 04:12:22.707957 4704 net.cpp:200] conv3 does not need backward computation.
  1196. I0412 04:12:22.707957 4704 net.cpp:200] norm2 does not need backward computation.
  1197. I0412 04:12:22.707957 4704 net.cpp:200] pool2 does not need backward computation.
  1198. I0412 04:12:22.707957 4704 net.cpp:200] relu2 does not need backward computation.
  1199. I0412 04:12:22.707957 4704 net.cpp:200] conv2 does not need backward computation.
  1200. I0412 04:12:22.707957 4704 net.cpp:200] norm1 does not need backward computation.
  1201. I0412 04:12:22.707957 4704 net.cpp:200] pool1 does not need backward computation.
  1202. I0412 04:12:22.707957 4704 net.cpp:200] relu1 does not need backward computation.
  1203. I0412 04:12:22.707957 4704 net.cpp:200] conv1 does not need backward computation.
  1204. I0412 04:12:22.707957 4704 net.cpp:200] label_MITindoor67_1_split does not need backward
  1205. computation.
  1206. I0412 04:12:22.707957 4704 net.cpp:200] MITindoor67 does not need backward computation.
  1207. I0412 04:12:22.707957 4704 net.cpp:242] This network produces output accuracy
  1208. I0412 04:12:22.707957 4704 net.cpp:242] This network produces output loss
  1209. I0412 04:12:22.707957 4704 net.cpp:255] Network initialization done.
  1210. I0412 04:12:22.707957 4704 solver.cpp:56] Solver scaffolding done.
  1211. I0412 04:12:22.707957 4704 caffe.cpp:155] Finetuning from
  1212. C:/Users/mahtab/Desktop/finetune_mitindoor67/places205CNN_iter_300000_upgraded.caffemodel
  1213. I0412 04:12:22.895382 4704 upgrade_proto.cpp:53] Attempting to upgrade input file specified
  1214. using deprecated V1LayerParameter:
  1215. C:/Users/mahtab/Desktop/finetune_mitindoor67/places205CNN_iter_300000_upgraded.caffemodel
  1216. I0412 04:12:23.145401 4704 upgrade_proto.cpp:61] Successfully upgraded file specified using
  1217. deprecated V1LayerParameter
  1218. I0412 04:12:23.161070 4704 net.cpp:744] Ignoring source layer data
  1219. I0412 04:12:23.192430 4704 net.cpp:744] Ignoring source layer fc8
  1220. I0412 04:12:23.427315 4704 upgrade_proto.cpp:53] Attempting to upgrade input file specified
  1221. using deprecated V1LayerParameter:
  1222. C:/Users/mahtab/Desktop/finetune_mitindoor67/places205CNN_iter_300000_upgraded.caffemodel
  1223. I0412 04:12:23.630440 4704 upgrade_proto.cpp:61] Successfully upgraded file specified using
  1224. deprecated V1LayerParameter
  1225. I0412 04:12:23.646035 4704 net.cpp:744] Ignoring source layer data
  1226. I0412 04:12:23.677067 4704 net.cpp:744] Ignoring source layer fc8
  1227. I0412 04:12:23.708406 4704 caffe.cpp:249] Starting Optimization
  1228. I0412 04:12:23.708406 4704 solver.cpp:272] Solving SceneRecognitionCaffeNet
  1229. I0412 04:12:23.708406 4704 solver.cpp:273] Learning Rate Policy: step
  1230. I0412 04:12:23.770778 4704 solver.cpp:330] Iteration 0, Testing net (#0)
  1231. I0412 04:13:47.208680 4232 data_layer.cpp:73] Restarting data prefetching from start.
  1232. I0412 04:13:49.770946 4704 solver.cpp:397] Test net output #0: accuracy = 0
  1233. I0412 04:13:49.770946 4704 solver.cpp:397] Test net output #1: loss = 4.20469 (* 1 = 4.20469
  1234. loss)
  1235. I0412 04:13:51.082967 4704 solver.cpp:218] Iteration 0 (0 iter/s, 87.374s/50 iters), loss =4.20469
  1236. I0412 04:13:51.082967 4704 solver.cpp:237] Train net output #0: loss = 4.20469 (* 1 =4.20469 loss)
  1237. I0412 04:13:51.082967 4704 sgd_solver.cpp:105] Iteration 0, lr = 1e-07
  1238. I0412 04:14:58.489819 4704 solver.cpp:218] Iteration 50 (0.741774 iter/s, 67.406s/50 iters),loss = 4.04563
  1239. I0412 04:14:58.489819 4704 solver.cpp:237] Train net output #0: loss = 4.04563 (* 1 =4.04563 loss)
  1240. I0412 04:14:58.489819 4704 sgd_solver.cpp:105] Iteration 50, lr = 1e-07
  1241. I0412 04:16:05.783648 4704 solver.cpp:218] Iteration 100 (0.743019 iter/s, 67.293s/50 iters),loss = 3.89188
  1242. I0412 04:16:05.783648 4704 solver.cpp:237] Train net output #0: loss = 3.89188 (* 1 =3.89188 loss)
  1243. I0412 04:16:05.783648 4704 sgd_solver.cpp:105] Iteration 100, lr = 1e-07
  1244. I0412 04:17:13.099241 4704 solver.cpp:218] Iteration 150 (0.742776 iter/s, 67.315s/50 iters),loss = 3.69574
  1245. I0412 04:17:13.099241 4704 solver.cpp:237] Train net output #0: loss = 3.69574 (* 1 =3.69574 loss)
  1246. I0412 04:17:13.099241 4704 sgd_solver.cpp:105] Iteration 150, lr = 1e-07
  1247. I0412 04:18:20.946854 4704 solver.cpp:218] Iteration 200 (0.736952 iter/s, 67.847s/50 iters),loss = 3.53715
  1248. I0412 04:18:20.946854 4704 solver.cpp:237] Train net output #0: loss = 3.53715 (* 1 =3.53715 loss)
  1249. I0412 04:18:20.946854 4704 sgd_solver.cpp:105] Iteration 200, lr = 1e-07
  1250. I0412 04:19:28.125959 4704 solver.cpp:218] Iteration 250 (0.74428 iter/s, 67.179s/50 iters),loss = 3.3488
  1251. I0412 04:19:28.125959 4704 solver.cpp:237] Train net output #0: loss = 3.3488 (* 1 = 3.3488loss)
  1252. I0412 04:19:28.125959 4704 sgd_solver.cpp:105] Iteration 250, lr = 1e-07
  1253. I0412 04:19:51.017532 4704 solver.cpp:330] Iteration 268, Testing net (#0)
  1254. I0412 04:21:16.677172 4704 solver.cpp:397] Test net output #0: accuracy = 1
  1255. I0412 04:21:16.677172 4704 solver.cpp:397] Test net output #1: loss = 3.3788 (* 1 = 3.3788loss)
  1256. I0412 04:22:01.020563 4704 solver.cpp:218] Iteration 300 (0.327024 iter/s, 152.894s/50 iters),loss = 3.14588
  1257. I0412 04:22:01.020563 4704 solver.cpp:237] Train net output #0: loss = 3.14588 (* 1 =3.14588 loss)
  1258. I0412 04:22:01.020563 4704 sgd_solver.cpp:105] Iteration 300, lr = 1e-07
  1259. I0412 04:23:08.178277 4704 solver.cpp:218] Iteration 350 (0.744524 iter/s, 67.157s/50 iters),loss = 3.00576
  1260. I0412 04:23:08.178277 4704 solver.cpp:237] Train net output #0: loss = 3.00576 (* 1 =3.00576 loss)
  1261. I0412 04:23:08.178277 4704 sgd_solver.cpp:105] Iteration 350, lr = 1e-07
  1262. I0412 04:24:15.349581 4704 solver.cpp:218] Iteration 400 (0.744369 iter/s, 67.171s/50 iters),loss = 2.83526
  1263. I0412 04:24:15.349581 4704 solver.cpp:237] Train net output #0: loss = 2.83526 (* 1 =2.83526 loss)
  1264. I0412 04:24:15.349581 4704 sgd_solver.cpp:105] Iteration 400, lr = 1e-07
  1265. I0412 04:25:22.693616 4704 solver.cpp:218] Iteration 450 (0.742457 iter/s, 67.344s/50 iters),loss = 2.59777
  1266. I0412 04:25:22.693616 4704 solver.cpp:237] Train net output #0: loss = 2.59777 (* 1 =2.59777 loss)
  1267. I0412 04:25:22.693616 4704 sgd_solver.cpp:105] Iteration 450, lr = 1e-07
  1268. I0412 04:26:29.989542 4704 solver.cpp:218] Iteration 500 (0.742997 iter/s, 67.295s/50 iters),loss = 2.58771
  1269. I0412 04:26:29.989542 4704 solver.cpp:237] Train net output #0: loss = 2.58771 (* 1 =2.58771 loss)
  1270. I0412 04:26:29.989542 4704 sgd_solver.cpp:105] Iteration 500, lr = 1e-07
  1271. I0412 04:27:17.091591 4704 solver.cpp:330] Iteration 536, Testing net (#0)
  1272. I0412 04:28:42.646239 4704 solver.cpp:397] Test net output #0: accuracy = 1
  1273. I0412 04:28:42.646239 4704 solver.cpp:397] Test net output #1: loss = 2.58355 (* 1 = 2.58355loss)
  1274. I0412 04:29:02.723795 4704 solver.cpp:218] Iteration 550 (0.327367 iter/s, 152.734s/50 iters),loss = 2.41916
  1275. I0412 04:29:02.723795 4704 solver.cpp:237] Train net output #0: loss = 2.41916 (* 1 =2.41916 loss)
  1276. I0412 04:29:02.723795 4704 sgd_solver.cpp:105] Iteration 550, lr = 1e-07
  1277. I0412 04:30:10.005543 4704 solver.cpp:218] Iteration 600 (0.743152 iter/s, 67.281s/50 iters),loss = 2.17474
  1278. I0412 04:30:10.005543 4704 solver.cpp:237] Train net output #0: loss = 2.17474 (* 1 =2.17474 loss)
  1279. I0412 04:30:10.005543 4704 sgd_solver.cpp:105] Iteration 600, lr = 1e-07
  1280. I0412 04:31:17.192559 4704 solver.cpp:218] Iteration 650 (0.744192 iter/s, 67.187s/50 iters),loss = 2.06964
  1281. I0412 04:31:17.192559 4704 solver.cpp:237] Train net output #0: loss = 2.06964 (* 1 =2.06964 loss)
  1282. I0412 04:31:17.192559 4704 sgd_solver.cpp:105] Iteration 650, lr = 1e-07
  1283. I0412 04:32:24.360908 4704 solver.cpp:218] Iteration 700 (0.744402 iter/s, 67.168s/50 iters),loss = 1.89071
  1284. I0412 04:32:24.360908 4704 solver.cpp:237] Train net output #0: loss = 1.89071 (* 1 =1.89071 loss)
  1285. I0412 04:32:24.360908 4704 sgd_solver.cpp:105] Iteration 700, lr = 1e-07
  1286. I0412 04:33:31.505738 4704 solver.cpp:218] Iteration 750 (0.744668 iter/s, 67.144s/50 iters),loss = 1.87087
  1287. I0412 04:33:31.505738 4704 solver.cpp:237] Train net output #0: loss = 1.87087 (* 1 =1.87087 loss)
  1288. I0412 04:33:31.505738 4704 sgd_solver.cpp:105] Iteration 750, lr = 1e-07
  1289. I0412 04:34:38.818370 4704 solver.cpp:218] Iteration 800 (0.74281 iter/s, 67.312s/50 iters),loss = 1.69662
  1290. I0412 04:34:38.818370 4704 solver.cpp:237] Train net output #0: loss = 1.69662 (* 1 =1.69662 loss)
  1291. I0412 04:34:38.818370 4704 sgd_solver.cpp:105] Iteration 800, lr = 1e-07
  1292. I0412 04:34:42.958595 4704 solver.cpp:330] Iteration 804, Testing net (#0)
  1293. I0412 04:36:08.349354 4704 solver.cpp:397] Test net output #0: accuracy = 1
  1294. I0412 04:36:08.365051 4704 solver.cpp:397] Test net output #1: loss = 1.90856 (* 1 = 1.90856loss)
  1295. I0412 04:37:11.380419 4704 solver.cpp:218] Iteration 850 (0.327736 iter/s, 152.562s/50 iters),loss = 1.68572
  1296. I0412 04:37:11.380419 4704 solver.cpp:237] Train net output #0: loss = 1.68572 (* 1 =1.68572 loss)
  1297. I0412 04:37:11.380419 4704 sgd_solver.cpp:105] Iteration 850, lr = 1e-07
  1298. I0412 04:38:18.349567 4704 solver.cpp:218] Iteration 900 (0.746614 iter/s, 66.969s/50 iters),loss = 1.5322
  1299. I0412 04:38:18.349567 4704 solver.cpp:237] Train net output #0: loss = 1.5322 (* 1 = 1.5322loss)
  1300. I0412 04:38:18.349567 4704 sgd_solver.cpp:105] Iteration 900, lr = 1e-07
  1301. I0412 04:39:25.224520 4704 solver.cpp:218] Iteration 950 (0.747675 iter/s, 66.874s/50 iters),loss = 1.35434
  1302. I0412 04:39:25.224520 4704 solver.cpp:237] Train net output #0: loss = 1.35434 (* 1 =1.35434 loss)
  1303. I0412 04:39:25.224520 4704 sgd_solver.cpp:105] Iteration 950, lr = 1e-07
  1304. I0412 04:40:32.130380 4704 solver.cpp:218] Iteration 1000 (0.747328 iter/s, 66.905s/50 iters),loss = 1.30102
  1305. I0412 04:40:32.130380 4704 solver.cpp:237] Train net output #0: loss = 1.30102 (* 1 =1.30102 loss)
  1306. I0412 04:40:32.130380 4704 sgd_solver.cpp:105] Iteration 1000, lr = 1e-07
  1307. I0412 04:41:40.880671 4704 solver.cpp:218] Iteration 1050 (0.727273 iter/s, 68.75s/50 iters),loss = 1.24691
  1308. I0412 04:41:40.880671 4704 solver.cpp:237] Train net output #0: loss = 1.24691 (* 1 =1.24691 loss)
  1309. I0412 04:41:40.880671 4704 sgd_solver.cpp:105] Iteration 1050, lr = 1e-07
  1310. I0412 04:42:09.927634 4704 solver.cpp:330] Iteration 1072, Testing net (#0)
  1311. I0412 04:43:38.349730 4704 solver.cpp:397] Test net output #0: accuracy = 1
  1312. I0412 04:43:38.349730 4704 solver.cpp:397] Test net output #1: loss = 1.40134 (* 1 = 1.40134loss)
  1313. I0412 04:44:18.818953 4704 solver.cpp:218] Iteration 1100 (0.31658 iter/s, 157.938s/50 iters),loss = 1.18656
  1314. I0412 04:44:18.818953 4704 solver.cpp:237] Train net output #0: loss = 1.18656 (* 1 =1.18656 loss)
  1315. I0412 04:44:18.818953 4704 sgd_solver.cpp:105] Iteration 1100, lr = 1e-07
  1316. I0412 04:45:29.631006 4704 solver.cpp:218] Iteration 1150 (0.706095 iter/s, 70.812s/50 iters),loss = 1.06573
  1317. I0412 04:45:29.631006 4704 solver.cpp:237] Train net output #0: loss = 1.06573 (* 1 =1.06573 loss)
  1318. I0412 04:45:29.631006 4704 sgd_solver.cpp:105] Iteration 1150, lr = 1e-07
  1319. I0412 04:46:36.709095 4704 solver.cpp:218] Iteration 1200 (0.745401 iter/s, 67.078s/50 iters),loss = 1.0079
  1320. I0412 04:46:36.709095 4704 solver.cpp:237] Train net output #0: loss = 1.0079 (* 1 = 1.0079loss)
  1321. I0412 04:46:36.709095 4704 sgd_solver.cpp:105] Iteration 1200, lr = 1e-07
  1322. I0412 04:47:43.599740 4704 solver.cpp:218] Iteration 1250 (0.747496 iter/s, 66.89s/50 iters),loss = 0.896948
  1323. I0412 04:47:43.599740 4704 solver.cpp:237] Train net output #0: loss = 0.896948 (* 1 =0.896948 loss)
  1324. I0412 04:47:43.599740 4704 sgd_solver.cpp:105] Iteration 1250, lr = 1e-07
  1325. I0412 04:48:50.458986 4704 solver.cpp:218] Iteration 1300 (0.747842 iter/s, 66.859s/50 iters),loss = 0.760583
  1326. I0412 04:48:50.458986 4704 solver.cpp:237] Train net output #0: loss = 0.760583 (* 1 =0.760583 loss)
  1327. I0412 04:48:50.458986 4704 sgd_solver.cpp:105] Iteration 1300, lr = 1e-07
  1328. I0412 04:49:42.693409 4704 solver.cpp:330] Iteration 1340, Testing net (#0)
  1329. I0412 04:51:07.927647 4704 solver.cpp:397] Test net output #0: accuracy = 1
  1330. I0412 04:51:07.927647 4704 solver.cpp:397] Test net output #1: loss = 1.05483 (* 1 = 1.05483loss)
  1331. I0412 04:51:22.568167 4704 solver.cpp:218] Iteration 1350 (0.328746 iter/s, 152.093s/50 iters),loss = 0.775195
  1332. I0412 04:51:22.568167 4704 solver.cpp:237] Train net output #0: loss = 0.775195 (* 1 =0.775195 loss)
  1333. I0412 04:51:22.568167 4704 sgd_solver.cpp:105] Iteration 1350, lr = 1e-07
  1334. I0412 04:52:29.552525 4704 solver.cpp:218] Iteration 1400 (0.746447 iter/s, 66.984s/50 iters),loss = 0.718551
  1335. I0412 04:52:29.552525 4704 solver.cpp:237] Train net output #0: loss = 0.718551 (* 1 =0.718551 loss)
  1336. I0412 04:52:29.552525 4704 sgd_solver.cpp:105] Iteration 1400, lr = 1e-07
  1337. I0412 04:53:37.411788 4704 solver.cpp:218] Iteration 1450 (0.736822 iter/s, 67.859s/50 iters),loss = 0.691536
  1338. I0412 04:53:37.411788 4704 solver.cpp:237] Train net output #0: loss = 0.691536 (* 1 =0.691536 loss)
  1339. I0412 04:53:37.411788 4704 sgd_solver.cpp:105] Iteration 1450, lr = 1e-07
  1340. I0412 04:54:44.599256 4704 solver.cpp:218] Iteration 1500 (0.744192 iter/s, 67.187s/50 iters),loss = 0.834102
  1341. I0412 04:54:44.599256 4704 solver.cpp:237] Train net output #0: loss = 0.834102 (* 1 =0.834102 loss)
  1342. I0412 04:54:44.599256 4704 sgd_solver.cpp:105] Iteration 1500, lr = 1e-07
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement