Advertisement
Guest User

Untitled

a guest
May 6th, 2016
53
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 8.21 KB | None | 0 0
  1. name: "NN2"
  2. layer {
  3. name: "data"
  4. type: "Data"
  5. top: "data"
  6. top: "label"
  7. include {
  8. phase: TRAIN
  9. }
  10. transform_param {
  11. mirror: true
  12. mean_file :"/home/jiayi-wei/caffe/examples/NN2/image_train_mean.binaryproto"
  13. data_param {
  14. source: "/home/jiayi-wei/caffe/examples/NN2/img_train_lmdb"
  15. batch_size: 30
  16. backend: LMDB
  17. }
  18. }
  19. layer {
  20. name: "data"
  21. type: "Data"
  22. top: "data"
  23. top: "label"
  24. include {
  25. phase: TEST
  26. }
  27. transform_param {
  28. mirror: false
  29. mean_file :"/home/jiayi-wei/caffe/examples/NN2/image_train_mean.binaryproto"
  30. data_param {
  31. source: "/home/jiayi-wei/caffe/examples/NN2/img_val_lmdb"
  32. batch_size: 11
  33. backend: LMDB
  34. }
  35. }
  36. #first layers
  37. layer {
  38. name: "conv11"
  39. type: "Convolution"
  40. bottom: "data"
  41. top: "conv11"
  42. param {
  43. lr_mult: 1
  44. decay_mult: 1
  45. }
  46. param {
  47. lr_mult: 2
  48. decay_mult: 0
  49. }
  50. convolution_param {
  51. num_output: 64
  52. kernel_size: 3
  53. stride: 1
  54. weight_filler {
  55. type: "gaussian"
  56. std: 0.01
  57. }
  58. bias_filler {
  59. type: "constant"
  60. value: 0
  61. }
  62. }
  63. }
  64. layer {
  65. name: "relu11"
  66. type: "ReLU"
  67. bottom: "conv11"
  68. top: "conv11"
  69. }
  70. layer {
  71. name: "conv12"
  72. type: "Convolution"
  73. bottom: "conv11"
  74. top: "conv12"
  75. param {
  76. lr_mult: 1
  77. decay_mult: 1
  78. }
  79. param {
  80. lr_mult: 2
  81. decay_mult: 0
  82. }
  83. convolution_param {
  84. num_output: 128
  85. kernel_size: 3
  86. stride: 1
  87. weight_filler {
  88. type: "gaussian"
  89. std: 0.01
  90. }
  91. bias_filler {
  92. type: "constant"
  93. value: 0
  94. }
  95. }
  96. }
  97. layer {
  98. name: "relu12"
  99. type: "ReLU"
  100. bottom: "conv12"
  101. top: "conv12"
  102. }
  103. layer {
  104. name: "pool1"
  105. type: "Pooling"
  106. bottom: "conv12"
  107. top: "pool1"
  108. pooling_param {
  109. pool: MAX
  110. kernel_size: 2
  111. stride: 2
  112. }
  113. }
  114. #second layers
  115. layer {
  116. name: "conv21"
  117. type: "Convolution"
  118. bottom: "pool1"
  119. top: "conv21"
  120. param {
  121. lr_mult: 1
  122. decay_mult: 1
  123. }
  124. param {
  125. lr_mult: 2
  126. decay_mult: 0
  127. }
  128. convolution_param {
  129. num_output: 64
  130. kernel_size: 3
  131. stride: 1
  132. weight_filler {
  133. type: "gaussian"
  134. std: 0.01
  135. }
  136. bias_filler {
  137. type: "constant"
  138. value: 0
  139. }
  140. }
  141. }
  142. layer {
  143. name: "relu21"
  144. type: "ReLU"
  145. bottom: "conv21"
  146. top: "conv21"
  147. }
  148. layer {
  149. name: "conv22"
  150. type: "Convolution"
  151. bottom: "conv21"
  152. top: "conv22"
  153. param {
  154. lr_mult: 1
  155. decay_mult: 1
  156. }
  157. param {
  158. lr_mult: 2
  159. decay_mult: 0
  160. }
  161. convolution_param {
  162. num_output: 128
  163. kernel_size: 3
  164. stride: 1
  165. weight_filler {
  166. type: "gaussian"
  167. std: 0.01
  168. }
  169. bias_filler {
  170. type: "constant"
  171. value: 0
  172. }
  173. }
  174. }
  175. layer {
  176. name: "relu22"
  177. type: "ReLU"
  178. bottom: "conv22"
  179. top: "conv22"
  180. }
  181. layer {
  182. name: "pool2"
  183. type: "Pooling"
  184. bottom: "conv22"
  185. top: "pool2"
  186. pooling_param {
  187. pool: MAX
  188. kernel_size: 2
  189. stride: 2
  190. }
  191. }
  192. #third layers
  193. layer {
  194. name: "conv31"
  195. type: "Convolution"
  196. bottom: "pool2"
  197. top: "conv31"
  198. param {
  199. lr_mult: 1
  200. decay_mult: 1
  201. }
  202. param {
  203. lr_mult: 2
  204. decay_mult: 0
  205. }
  206. convolution_param {
  207. num_output: 128
  208. pad:1
  209. kernel_size: 3
  210. stride: 1
  211. weight_filler {
  212. type: "gaussian"
  213. std: 0.01
  214. }
  215. bias_filler {
  216. type: "constant"
  217. value: 0
  218. }
  219. }
  220. }
  221. layer {
  222. name: "relu31"
  223. type: "ReLU"
  224. bottom: "conv31"
  225. top: "conv31"
  226. }
  227. layer {
  228. name: "conv32"
  229. type: "Convolution"
  230. bottom: "conv31"
  231. top: "conv32"
  232. param {
  233. lr_mult: 1
  234. decay_mult: 1
  235. }
  236. param {
  237. lr_mult: 2
  238. decay_mult: 0
  239. }
  240. convolution_param {
  241. num_output: 128
  242. pad:1
  243. kernel_size: 3
  244. stride: 1
  245. weight_filler {
  246. type: "gaussian"
  247. std: 0.01
  248. }
  249. bias_filler {
  250. type: "constant"
  251. value: 0
  252. }
  253. }
  254. }
  255. layer {
  256. name: "relu32"
  257. type: "ReLU"
  258. bottom: "conv32"
  259. top: "conv32"
  260. }
  261. layer {
  262. name: "pool3"
  263. type: "Pooling"
  264. bottom: "conv32"
  265. top: "pool3"
  266. pooling_param {
  267. pool: MAX
  268. pad:1
  269. kernel_size: 2
  270. stride: 2
  271. }
  272. }
  273. #fourth layer
  274. layer {
  275. name: "conv41"
  276. type: "Convolution"
  277. bottom: "pool3"
  278. top: "conv41"
  279. param {
  280. lr_mult: 1
  281. decay_mult: 1
  282. }
  283. param {
  284. lr_mult: 2
  285. decay_mult: 0
  286. }
  287. convolution_param {
  288. num_output: 256
  289. pad:1
  290. kernel_size: 3
  291. stride: 1
  292. weight_filler {
  293. type: "gaussian"
  294. std: 0.01
  295. }
  296. bias_filler {
  297. type: "constant"
  298. value: 0
  299. }
  300. }
  301. }
  302. layer {
  303. name: "relu41"
  304. type: "ReLU"
  305. bottom: "conv41"
  306. top: "conv41"
  307. }
  308. layer {
  309. name: "conv42"
  310. type: "Convolution"
  311. bottom: "conv41"
  312. top: "conv42"
  313. param {
  314. lr_mult: 1
  315. decay_mult: 1
  316. }
  317. param {
  318. lr_mult: 2
  319. decay_mult: 0
  320. }
  321. convolution_param {
  322. num_output: 256
  323. pad:1
  324. kernel_size: 3
  325. stride: 1
  326. weight_filler {
  327. type: "gaussian"
  328. std: 0.01
  329. }
  330. bias_filler {
  331. type: "constant"
  332. value: 0
  333. }
  334. }
  335. }
  336. layer {
  337. name: "relu42"
  338. type: "ReLU"
  339. bottom: "conv42"
  340. top: "conv42"
  341. }
  342. layer {
  343. name: "conv43"
  344. type: "Convolution"
  345. bottom: "conv42"
  346. top: "conv43"
  347. param {
  348. lr_mult: 1
  349. decay_mult: 1
  350. }
  351. param {
  352. lr_mult: 2
  353. decay_mult: 0
  354. }
  355. convolution_param {
  356. num_output: 256
  357. pad:1
  358. kernel_size: 3
  359. stride: 1
  360. weight_filler {
  361. type: "gaussian"
  362. std: 0.01
  363. }
  364. bias_filler {
  365. type: "constant"
  366. value: 0
  367. }
  368. }
  369. }
  370. layer {
  371. name: "relu43"
  372. type: "ReLU"
  373. bottom: "conv43"
  374. top: "conv43"
  375. }
  376. layer {
  377. name: "pool4"
  378. type: "Pooling"
  379. bottom: "conv43"
  380. top: "pool4"
  381. pooling_param {
  382. pool: MAX
  383. kernel_size: 2
  384. stride: 2
  385. }
  386. }
  387. #fiveth layer
  388. layer {
  389. name: "conv51"
  390. type: "Convolution"
  391. bottom: "pool4"
  392. top: "conv51"
  393. param {
  394. lr_mult: 1
  395. decay_mult: 1
  396. }
  397. param {
  398. lr_mult: 2
  399. decay_mult: 0
  400. }
  401. convolution_param {
  402. num_output: 256
  403. pad:1
  404. kernel_size: 3
  405. stride: 1
  406. weight_filler {
  407. type: "gaussian"
  408. std: 0.01
  409. }
  410. bias_filler {
  411. type: "constant"
  412. value: 0
  413. }
  414. }
  415. }
  416. layer {
  417. name: "relu51"
  418. type: "ReLU"
  419. bottom: "conv51"
  420. top: "conv51"
  421. }
  422. layer {
  423. name: "conv52"
  424. type: "Convolution"
  425. bottom: "conv51"
  426. top: "conv52"
  427. param {
  428. lr_mult: 1
  429. decay_mult: 1
  430. }
  431. param {
  432. lr_mult: 2
  433. decay_mult: 0
  434. }
  435. convolution_param {
  436. num_output: 256
  437. pad:1
  438. kernel_size: 3
  439. stride: 1
  440. weight_filler {
  441. type: "gaussian"
  442. std: 0.01
  443. }
  444. bias_filler {
  445. type: "constant"
  446. value: 0
  447. }
  448. }
  449. }
  450. layer {
  451. name: "relu52"
  452. type: "ReLU"
  453. bottom: "conv52"
  454. top: "conv52"
  455. }
  456. layer {
  457. name: "conv53"
  458. type: "Convolution"
  459. bottom: "conv52"
  460. top: "conv53"
  461. param {
  462. lr_mult: 1
  463. decay_mult: 1
  464. }
  465. param {
  466. lr_mult: 2
  467. decay_mult: 0
  468. }
  469. convolution_param {
  470. num_output: 256
  471. pad:1
  472. kernel_size: 3
  473. stride: 1
  474. weight_filler {
  475. type: "gaussian"
  476. std: 0.01
  477. }
  478. bias_filler {
  479. type: "constant"
  480. value: 0
  481. }
  482. }
  483. }
  484. layer {
  485. name: "pool5"
  486. type: "Pooling"
  487. bottom: "conv53"
  488. top: "pool5"
  489. pooling_param {
  490. pool: AVE
  491. pad:1
  492. kernel_size: 2
  493. stride: 2
  494. }
  495. }
  496.  
  497. #drop_Fc
  498. layer {
  499. name: "dropout"
  500. type: "Dropout"
  501. bottom: "pool5"
  502. top: "pool5"
  503. dropout_param {
  504. dropout_ratio: 0.5
  505. }
  506. }
  507. layer {
  508. name: "fc6"
  509. type: "InnerProduct"
  510. bottom: "pool5"
  511. top: "fc6"
  512. param {
  513. lr_mult: 1
  514. decay_mult: 1
  515. }
  516. param {
  517. lr_mult: 2
  518. decay_mult: 0
  519. }
  520. inner_product_param {
  521. num_output:1000
  522. weight_filler {
  523. type: "gaussian"
  524. std: 0.005
  525. }
  526. bias_filler {
  527. type: "constant"
  528. value: 1
  529. }
  530. }
  531. }
  532.  
  533. layer {
  534. name: "fc7"
  535. type: "InnerProduct"
  536. bottom: "fc6"
  537. top: "fc7"
  538. param {
  539. lr_mult: 1
  540. decay_mult: 1
  541. }
  542. param {
  543. lr_mult: 2
  544. decay_mult: 0
  545. }
  546. inner_product_param {
  547. num_output:10575
  548. weight_filler {
  549. type: "gaussian"
  550. std: 0.005
  551. }
  552. bias_filler {
  553. type: "constant"
  554. value: 1
  555. }
  556. }
  557. }
  558. layer {
  559. name: "accuracy"
  560. type: "Accuracy"
  561. bottom: "fc7"
  562. bottom: "label"
  563. top: "accuracy"
  564. include {
  565. phase: TEST
  566. }
  567. }
  568. layer {
  569. name: "SoftMax"
  570. type: "SoftmaxWithLoss"
  571. bottom: "fc7"
  572. bottom: "label"
  573. top: "SoftMax"
  574. }
  575.  
  576. net: "train_val.prototxt"
  577. test_iter: 10000
  578. test_interval: 1000
  579. base_lr: 0.01
  580. lr_policy: "step"
  581. gamma: 0.1
  582. stepsize: 100000
  583. display: 20
  584. max_iter: 450000
  585. momentum: 0.9
  586. weight_decay: 0.0005
  587. snapshot: 10000
  588. snapshot_prefix: "/home/jiayi-wei/caffe/examples/NN2"
  589. solver_mode: GPU
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement