Advertisement
Guest User

prototxt

a guest
Oct 26th, 2015
280
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 8.23 KB | None | 0 0
  1. name: "CNN"
  2. layers {
  3. name: "data"
  4. type: HDF5_DATA
  5. top: "data"
  6. top: "label"
  7. hdf5_data_param {
  8. source: ".../data/train.txt"
  9. batch_size: 256
  10. shuffle: false
  11. }
  12. }
  13. # hzvt horistonal -> vertical
  14. layers {
  15. name: "hzvt1"
  16. type: CONVOLUTION
  17. bottom: "data"
  18. top: "hzvt1"
  19. blobs_lr: 1
  20. blobs_lr: 2
  21. convolution_param {
  22. num_output: 16
  23. kernel_w: 6
  24. kernel_h: 1
  25. stride_w: 1
  26. stride_h: 1
  27. weight_filler {
  28. type: "msra"
  29. variance_norm: AVERAGE
  30. }
  31. bias_filler {
  32. type: "constant"
  33. }
  34. }
  35. }
  36. layers {
  37. name: "relu_hzvt1"
  38. type: RELU
  39. relu_param {
  40. negative_slope: 0.2417
  41. }
  42. bottom: "hzvt1"
  43. top: "hzvt1"
  44. }
  45. layers {
  46. name: "hzvt2"
  47. type: CONVOLUTION
  48. bottom: "hzvt1"
  49. top: "hzvt2"
  50. blobs_lr: 1
  51. blobs_lr: 2
  52. convolution_param {
  53. num_output: 22
  54. kernel_w: 7
  55. kernel_h: 1
  56. stride_w: 1
  57. stride_h: 1
  58. weight_filler {
  59. type: "msra"
  60. variance_norm: AVERAGE
  61. }
  62. bias_filler {
  63. type: "constant"
  64. }
  65. }
  66. }
  67. layers {
  68. name: "relu_hzvt2"
  69. type: RELU
  70. relu_param {
  71. negative_slope: 0.2417
  72. }
  73. bottom: "hzvt2"
  74. top: "hzvt2"
  75. }
  76. layers {
  77. name: "hzvt3"
  78. type: CONVOLUTION
  79. bottom: "hzvt2"
  80. top: "hzvt3"
  81. blobs_lr: 1
  82. blobs_lr: 2
  83. convolution_param {
  84. num_output: 30
  85. kernel_w: 7
  86. kernel_h: 1
  87. stride_w: 1
  88. stride_h: 1
  89. weight_filler {
  90. type: "msra"
  91. variance_norm: AVERAGE
  92. }
  93. bias_filler {
  94. type: "constant"
  95. }
  96. }
  97. }
  98. layers {
  99. name: "relu_hzvt3"
  100. type: RELU
  101. relu_param {
  102. negative_slope: 0.2417
  103. }
  104. bottom: "hzvt3"
  105. top: "hzvt3"
  106. }
  107. layers {
  108. name: "hzvt4"
  109. type: CONVOLUTION
  110. bottom: "hzvt3"
  111. top: "hzvt4"
  112. blobs_lr: 1
  113. blobs_lr: 2
  114. convolution_param {
  115. num_output: 41
  116. kernel_w: 7
  117. kernel_h: 1
  118. stride_w: 1
  119. stride_h: 1
  120. weight_filler {
  121. type: "msra"
  122. variance_norm: AVERAGE
  123. }
  124. bias_filler {
  125. type: "constant"
  126. }
  127. }
  128. }
  129. layers {
  130. name: "relu_hzvt4"
  131. type: RELU
  132. relu_param {
  133. negative_slope: 0.2417
  134. }
  135. bottom: "hzvt4"
  136. top: "hzvt4"
  137. }
  138. layers {
  139. name: "hzvt5"
  140. type: CONVOLUTION
  141. bottom: "hzvt4"
  142. top: "hzvt5"
  143. blobs_lr: 1
  144. blobs_lr: 2
  145. convolution_param {
  146. num_output: 400
  147. kernel_w: 1
  148. kernel_h: 7
  149. stride_w: 1
  150. stride_h: 1
  151. weight_filler {
  152. type: "msra"
  153. variance_norm: AVERAGE
  154. }
  155. bias_filler {
  156. type: "constant"
  157. }
  158. }
  159. }
  160. layers {
  161. name: "relu_hzvt5"
  162. type: RELU
  163. relu_param {
  164. negative_slope: 0.2417
  165. }
  166. bottom: "hzvt5"
  167. top: "hzvt5"
  168. }
  169. layers {
  170. name: "drop_hzvt5"
  171. type: DROPOUT
  172. bottom: "hzvt5"
  173. top: "hzvt5"
  174. dropout_param {
  175. dropout_ratio: 0.44
  176. }
  177. }
  178. # vthz vertical -> horisontal
  179. layers {
  180. name: "vthz1"
  181. type: CONVOLUTION
  182. bottom: "data"
  183. top: "vthz1"
  184. blobs_lr: 1
  185. blobs_lr: 2
  186. convolution_param {
  187. num_output: 16
  188. kernel_w: 1
  189. kernel_h: 7
  190. stride_w: 1
  191. stride_h: 1
  192. weight_filler {
  193. type: "msra"
  194. variance_norm: AVERAGE
  195. }
  196. bias_filler {
  197. type: "constant"
  198. }
  199. }
  200. }
  201. layers {
  202. name: "relu_vthz1"
  203. type: RELU
  204. relu_param {
  205. negative_slope: 0.2417
  206. }
  207. bottom: "vthz1"
  208. top: "vthz1"
  209. }
  210. layers {
  211. name: "vthz2"
  212. type: CONVOLUTION
  213. bottom: "vthz1"
  214. top: "vthz2"
  215. blobs_lr: 1
  216. blobs_lr: 2
  217. convolution_param {
  218. num_output: 22
  219. kernel_w: 6
  220. kernel_h: 1
  221. stride_w: 1
  222. stride_h: 1
  223. weight_filler {
  224. type: "msra"
  225. variance_norm: AVERAGE
  226. }
  227. bias_filler {
  228. type: "constant"
  229. }
  230. }
  231. }
  232. layers {
  233. name: "relu_vthz2"
  234. type: RELU
  235. relu_param {
  236. negative_slope: 0.2417
  237. }
  238. bottom: "vthz2"
  239. top: "vthz2"
  240. }
  241. layers {
  242. name: "vthz3"
  243. type: CONVOLUTION
  244. bottom: "vthz2"
  245. top: "vthz3"
  246. blobs_lr: 1
  247. blobs_lr: 2
  248. convolution_param {
  249. num_output: 30
  250. kernel_w: 7
  251. kernel_h: 1
  252. stride_w: 1
  253. stride_h: 1
  254. weight_filler {
  255. type: "msra"
  256. variance_norm: AVERAGE
  257. }
  258. bias_filler {
  259. type: "constant"
  260. }
  261. }
  262. }
  263. layers {
  264. name: "relu_vthz3"
  265. type: RELU
  266. relu_param {
  267. negative_slope: 0.2417
  268. }
  269. bottom: "vthz3"
  270. top: "vthz3"
  271. }
  272. layers {
  273. name: "vthz4"
  274. type: CONVOLUTION
  275. bottom: "vthz3"
  276. top: "vthz4"
  277. blobs_lr: 1
  278. blobs_lr: 2
  279. convolution_param {
  280. num_output: 41
  281. kernel_w: 7
  282. kernel_h: 1
  283. stride_w: 1
  284. stride_h: 1
  285. weight_filler {
  286. type: "msra"
  287. variance_norm: AVERAGE
  288. }
  289. bias_filler {
  290. type: "constant"
  291. }
  292. }
  293. }
  294. layers {
  295. name: "relu_vthz4"
  296. type: RELU
  297. relu_param {
  298. negative_slope: 0.2417
  299. }
  300. bottom: "vthz4"
  301. top: "vthz4"
  302. }
  303. layers {
  304. name: "vthz5"
  305. type: CONVOLUTION
  306. bottom: "vthz4"
  307. top: "vthz5"
  308. blobs_lr: 1
  309. blobs_lr: 2
  310. convolution_param {
  311. num_output: 400
  312. kernel_w: 7
  313. kernel_h: 1
  314. stride_w: 1
  315. stride_h: 1
  316. weight_filler {
  317. type: "msra"
  318. variance_norm: AVERAGE
  319. }
  320. bias_filler {
  321. type: "constant"
  322. }
  323. }
  324. }
  325. layers {
  326. name: "relu_vthz5"
  327. type: RELU
  328. relu_param {
  329. negative_slope: 0.2417
  330. }
  331. bottom: "vthz5"
  332. top: "vthz5"
  333. }
  334. layers {
  335. name: "drop_vthz5"
  336. type: DROPOUT
  337. bottom: "vthz5"
  338. top: "vthz5"
  339. dropout_param {
  340. dropout_ratio: 0.44
  341. }
  342. }
  343. # merge paths
  344. layers {
  345. name: "combined"
  346. type: CONCAT
  347. bottom: "hzvt5"
  348. bottom: "vthz5"
  349. top: "combined"
  350. }
  351. layers {
  352. name: "fc1"
  353. type: INNER_PRODUCT
  354. bottom: "combined"
  355. top: "fc1"
  356. blobs_lr: 1
  357. blobs_lr: 2
  358. weight_decay: 1
  359. weight_decay: 0
  360. inner_product_param {
  361. num_output: 400
  362. weight_filler {
  363. type: "msra"
  364. variance_norm: AVERAGE
  365. }
  366. bias_filler {
  367. type: "constant"
  368. value: 0
  369. }
  370. }
  371. }
  372. layers {
  373. name: "relu_fc1"
  374. type: RELU
  375. relu_param {
  376. negative_slope: 0.2417
  377. }
  378. bottom: "fc1"
  379. top: "fc1_mod"
  380. }
  381. layers {
  382. name: "drop_fc1"
  383. type: DROPOUT
  384. bottom: "fc1_mod"
  385. top: "fc1_mod"
  386. dropout_param {
  387. dropout_ratio: 0.44
  388. }
  389. }
  390. layers {
  391. name: "final"
  392. type: INNER_PRODUCT
  393. bottom: "fc1_mod"
  394. top: "final"
  395. blobs_lr: 1
  396. blobs_lr: 2
  397. weight_decay: 1
  398. weight_decay: 0
  399. inner_product_param {
  400. num_output: 2
  401. weight_filler {
  402. type: "xavier"
  403. variance_norm: AVERAGE
  404. }
  405. bias_filler {
  406. type: "constant"
  407. value: 0
  408. }
  409. }
  410. }
  411. layers {
  412. name: "pred"
  413. type: SOFTMAX
  414. bottom: "final"
  415. top: "pred"
  416. }
  417. layers {
  418. name: "loss"
  419. type: SOFTMAX_LOSS
  420. bottom: "final"
  421. bottom: "label"
  422. top: "loss"
  423. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement