Advertisement
Guest User

Untitled

a guest
Feb 23rd, 2017
97
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 6.68 KB | None | 0 0
  1. name: "Fisher Yu's Test Net @ PASCAL VOC 2012"
  2. layer {
  3. name: "data"
  4. type: "ImageLabelData"
  5. top: "data"
  6. top: "label"
  7. transform_param {
  8. mirror: false
  9. crop_size: 500
  10. mean_value: 102.93
  11. mean_value: 111.36
  12. mean_value: 116.52
  13. }
  14. image_label_data_param {
  15. image_list_path: "/home/liuyufei/ssd/PASCAL_VOC2012"
  16. label_list_path: "/home/liuyufei/ssd/PASCAL_VOC2012"
  17. batch_size: 2
  18. shuffle: true
  19. label_slice {
  20. dim: 16
  21. dim: 16
  22. stride: 8
  23. stride: 8
  24. offset: 186
  25. offset: 186
  26. }
  27. padding: REFLECT
  28. }
  29. }
  30. layer {
  31. name: "conv1_1"
  32. type: "Convolution"
  33. bottom: "data"
  34. top: "conv1_1"
  35. param {
  36. lr_mult: 1
  37. decay_mult: 1
  38. }
  39. param {
  40. lr_mult: 2
  41. decay_mult: 0
  42. }
  43. convolution_param {
  44. num_output: 64
  45. kernel_size: 3
  46. }
  47. }
  48. layer {
  49. name: "relu1_1"
  50. type: "ReLU"
  51. bottom: "conv1_1"
  52. top: "conv1_1"
  53. }
  54. layer {
  55. name: "conv1_2"
  56. type: "Convolution"
  57. bottom: "conv1_1"
  58. top: "conv1_2"
  59. param {
  60. lr_mult: 1
  61. decay_mult: 1
  62. }
  63. param {
  64. lr_mult: 2
  65. decay_mult: 0
  66. }
  67. convolution_param {
  68. num_output: 64
  69. kernel_size: 3
  70. }
  71. }
  72. layer {
  73. name: "relu1_2"
  74. type: "ReLU"
  75. bottom: "conv1_2"
  76. top: "conv1_2"
  77. }
  78. layer {
  79. name: "pool1"
  80. type: "Pooling"
  81. bottom: "conv1_2"
  82. top: "pool1"
  83. pooling_param {
  84. pool: MAX
  85. kernel_size: 2
  86. stride: 2
  87. }
  88. }
  89. layer {
  90. name: "conv2_1"
  91. type: "Convolution"
  92. bottom: "pool1"
  93. top: "conv2_1"
  94. param {
  95. lr_mult: 1
  96. decay_mult: 1
  97. }
  98. param {
  99. lr_mult: 2
  100. decay_mult: 0
  101. }
  102. convolution_param {
  103. num_output: 128
  104. kernel_size: 3
  105. }
  106. }
  107. layer {
  108. name: "relu2_1"
  109. type: "ReLU"
  110. bottom: "conv2_1"
  111. top: "conv2_1"
  112. }
  113. layer {
  114. name: "conv2_2"
  115. type: "Convolution"
  116. bottom: "conv2_1"
  117. top: "conv2_2"
  118. param {
  119. lr_mult: 1
  120. decay_mult: 1
  121. }
  122. param {
  123. lr_mult: 2
  124. decay_mult: 0
  125. }
  126. convolution_param {
  127. num_output: 128
  128. kernel_size: 3
  129. }
  130. }
  131. layer {
  132. name: "relu2_2"
  133. type: "ReLU"
  134. bottom: "conv2_2"
  135. top: "conv2_2"
  136. }
  137. layer {
  138. name: "pool2"
  139. type: "Pooling"
  140. bottom: "conv2_2"
  141. top: "pool2"
  142. pooling_param {
  143. pool: MAX
  144. kernel_size: 2
  145. stride: 2
  146. }
  147. }
  148. layer {
  149. name: "conv3_1"
  150. type: "Convolution"
  151. bottom: "pool2"
  152. top: "conv3_1"
  153. param {
  154. lr_mult: 1
  155. decay_mult: 1
  156. }
  157. param {
  158. lr_mult: 2
  159. decay_mult: 0
  160. }
  161. convolution_param {
  162. num_output: 256
  163. kernel_size: 3
  164. }
  165. }
  166. layer {
  167. name: "relu3_1"
  168. type: "ReLU"
  169. bottom: "conv3_1"
  170. top: "conv3_1"
  171. }
  172. layer {
  173. name: "conv3_2"
  174. type: "Convolution"
  175. bottom: "conv3_1"
  176. top: "conv3_2"
  177. param {
  178. lr_mult: 1
  179. decay_mult: 1
  180. }
  181. param {
  182. lr_mult: 2
  183. decay_mult: 0
  184. }
  185. convolution_param {
  186. num_output: 256
  187. kernel_size: 3
  188. }
  189. }
  190. layer {
  191. name: "relu3_2"
  192. type: "ReLU"
  193. bottom: "conv3_2"
  194. top: "conv3_2"
  195. }
  196. layer {
  197. name: "conv3_3"
  198. type: "Convolution"
  199. bottom: "conv3_2"
  200. top: "conv3_3"
  201. param {
  202. lr_mult: 1
  203. decay_mult: 1
  204. }
  205. param {
  206. lr_mult: 2
  207. decay_mult: 0
  208. }
  209. convolution_param {
  210. num_output: 256
  211. kernel_size: 3
  212. }
  213. }
  214. layer {
  215. name: "relu3_3"
  216. type: "ReLU"
  217. bottom: "conv3_3"
  218. top: "conv3_3"
  219. }
  220. layer {
  221. name: "pool3"
  222. type: "Pooling"
  223. bottom: "conv3_3"
  224. top: "pool3"
  225. pooling_param {
  226. pool: MAX
  227. kernel_size: 2
  228. stride: 2
  229. }
  230. }
  231. layer {
  232. name: "conv4_1"
  233. type: "Convolution"
  234. bottom: "pool3"
  235. top: "conv4_1"
  236. param {
  237. lr_mult: 1
  238. decay_mult: 1
  239. }
  240. param {
  241. lr_mult: 2
  242. decay_mult: 0
  243. }
  244. convolution_param {
  245. num_output: 512
  246. kernel_size: 3
  247. }
  248. }
  249. layer {
  250. name: "relu4_1"
  251. type: "ReLU"
  252. bottom: "conv4_1"
  253. top: "conv4_1"
  254. }
  255. layer {
  256. name: "conv4_2"
  257. type: "Convolution"
  258. bottom: "conv4_1"
  259. top: "conv4_2"
  260. param {
  261. lr_mult: 1
  262. decay_mult: 1
  263. }
  264. param {
  265. lr_mult: 2
  266. decay_mult: 0
  267. }
  268. convolution_param {
  269. num_output: 512
  270. kernel_size: 3
  271. }
  272. }
  273. layer {
  274. name: "relu4_2"
  275. type: "ReLU"
  276. bottom: "conv4_2"
  277. top: "conv4_2"
  278. }
  279. layer {
  280. name: "conv4_3"
  281. type: "Convolution"
  282. bottom: "conv4_2"
  283. top: "conv4_3"
  284. param {
  285. lr_mult: 1
  286. decay_mult: 1
  287. }
  288. param {
  289. lr_mult: 2
  290. decay_mult: 0
  291. }
  292. convolution_param {
  293. num_output: 512
  294. kernel_size: 3
  295. }
  296. }
  297. layer {
  298. name: "relu4_3"
  299. type: "ReLU"
  300. bottom: "conv4_3"
  301. top: "conv4_3"
  302. }
  303. layer {
  304. name: "conv5_1"
  305. type: "Convolution"
  306. bottom: "conv4_3"
  307. top: "conv5_1"
  308. param {
  309. lr_mult: 1
  310. decay_mult: 1
  311. }
  312. param {
  313. lr_mult: 2
  314. decay_mult: 0
  315. }
  316. convolution_param {
  317. num_output: 512
  318. kernel_size: 3
  319. dilation: 2
  320. }
  321. }
  322. layer {
  323. name: "relu5_1"
  324. type: "ReLU"
  325. bottom: "conv5_1"
  326. top: "conv5_1"
  327. }
  328. layer {
  329. name: "conv5_2"
  330. type: "Convolution"
  331. bottom: "conv5_1"
  332. top: "conv5_2"
  333. param {
  334. lr_mult: 1
  335. decay_mult: 1
  336. }
  337. param {
  338. lr_mult: 2
  339. decay_mult: 0
  340. }
  341. convolution_param {
  342. num_output: 512
  343. kernel_size: 3
  344. dilation: 2
  345. }
  346. }
  347. layer {
  348. name: "relu5_2"
  349. type: "ReLU"
  350. bottom: "conv5_2"
  351. top: "conv5_2"
  352. }
  353. layer {
  354. name: "conv5_3"
  355. type: "Convolution"
  356. bottom: "conv5_2"
  357. top: "conv5_3"
  358. param {
  359. lr_mult: 1
  360. decay_mult: 1
  361. }
  362. param {
  363. lr_mult: 2
  364. decay_mult: 0
  365. }
  366. convolution_param {
  367. num_output: 512
  368. kernel_size: 3
  369. dilation: 2
  370. }
  371. }
  372. layer {
  373. name: "relu5_3"
  374. type: "ReLU"
  375. bottom: "conv5_3"
  376. top: "conv5_3"
  377. }
  378. layer {
  379. name: "fc6"
  380. type: "Convolution"
  381. bottom: "conv5_3"
  382. top: "fc6"
  383. param {
  384. lr_mult: 1
  385. decay_mult: 1
  386. }
  387. param {
  388. lr_mult: 2
  389. decay_mult: 0
  390. }
  391. convolution_param {
  392. num_output: 4096
  393. kernel_size: 7
  394. dilation: 4
  395. }
  396. }
  397. layer {
  398. name: "relu6"
  399. type: "ReLU"
  400. bottom: "fc6"
  401. top: "fc6"
  402. }
  403. layer {
  404. name: "drop6"
  405. type: "Dropout"
  406. bottom: "fc6"
  407. top: "fc6"
  408. dropout_param {
  409. dropout_ratio: 0.5
  410. }
  411. }
  412. layer {
  413. name: "fc7"
  414. type: "Convolution"
  415. bottom: "fc6"
  416. top: "fc7"
  417. param {
  418. lr_mult: 1
  419. decay_mult: 1
  420. }
  421. param {
  422. lr_mult: 2
  423. decay_mult: 0
  424. }
  425. convolution_param {
  426. num_output: 4096
  427. kernel_size: 1
  428. }
  429. }
  430. layer {
  431. name: "relu7"
  432. type: "ReLU"
  433. bottom: "fc7"
  434. top: "fc7"
  435. }
  436. layer {
  437. name: "drop7"
  438. type: "Dropout"
  439. bottom: "fc7"
  440. top: "fc7"
  441. dropout_param {
  442. dropout_ratio: 0.5
  443. }
  444. }
  445. layer {
  446. name: "final"
  447. type: "Convolution"
  448. bottom: "fc7"
  449. top: "final"
  450. param {
  451. lr_mult: 1
  452. decay_mult: 1
  453. }
  454. param {
  455. lr_mult: 2
  456. decay_mult: 0
  457. }
  458. convolution_param {
  459. num_output: 21
  460. kernel_size: 1
  461. weight_filler {
  462. type: "gaussian"
  463. std: 0.001
  464. }
  465. bias_filler {
  466. type: "constant"
  467. value: 0
  468. }
  469. }
  470. }
  471. layer {
  472. name: "loss"
  473. type: "SoftmaxWithLoss"
  474. bottom: "final"
  475. bottom: "label"
  476. top: "loss"
  477. loss_param {
  478. ignore_label: 255
  479. normalization: VALID
  480. }
  481. }
  482. layer {
  483. name: "accuracy"
  484. type: "Accuracy"
  485. bottom: "final"
  486. bottom: "label"
  487. top: "accuracy"
  488. accuracy_param {
  489. ignore_label: 255
  490. }
  491. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement