Advertisement
Guest User

Untitled

a guest
May 30th, 2016
77
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 5.18 KB | None | 0 0
  1. name: "VGG_ILSVRC_16_layers"
  2.  
  3. input: "data"
  4. input_shape {
  5. dim: 1
  6. dim: 3
  7. dim: 224
  8. dim: 224
  9. }
  10.  
  11. input: "im_info"
  12. input_shape {
  13. dim: 1
  14. dim: 3
  15. }
  16.  
  17. layer {
  18. name: "conv1_1"
  19. type: "Convolution"
  20. bottom: "data"
  21. top: "conv1_1"
  22. convolution_param {
  23. num_output: 64
  24. pad: 1 kernel_size: 3
  25. }
  26. }
  27. layer {
  28. name: "relu1_1"
  29. type: "ReLU"
  30. bottom: "conv1_1"
  31. top: "conv1_1"
  32. }
  33. layer {
  34. name: "conv1_2"
  35. type: "Convolution"
  36. bottom: "conv1_1"
  37. top: "conv1_2"
  38. convolution_param {
  39. num_output: 64
  40. pad: 1 kernel_size: 3
  41. }
  42. }
  43. layer {
  44. name: "relu1_2"
  45. type: "ReLU"
  46. bottom: "conv1_2"
  47. top: "conv1_2"
  48. }
  49. layer {
  50. name: "pool1"
  51. type: "Pooling"
  52. bottom: "conv1_2"
  53. top: "pool1"
  54. pooling_param {
  55. pool: MAX
  56. kernel_size: 2 stride: 2
  57. }
  58. }
  59. layer {
  60. name: "conv2_1"
  61. type: "Convolution"
  62. bottom: "pool1"
  63. top: "conv2_1"
  64. convolution_param {
  65. num_output: 128
  66. pad: 1 kernel_size: 3
  67. }
  68. }
  69. layer {
  70. name: "relu2_1"
  71. type: "ReLU"
  72. bottom: "conv2_1"
  73. top: "conv2_1"
  74. }
  75. layer {
  76. name: "conv2_2"
  77. type: "Convolution"
  78. bottom: "conv2_1"
  79. top: "conv2_2"
  80. convolution_param {
  81. num_output: 128
  82. pad: 1 kernel_size: 3
  83. }
  84. }
  85. layer {
  86. name: "relu2_2"
  87. type: "ReLU"
  88. bottom: "conv2_2"
  89. top: "conv2_2"
  90. }
  91. layer {
  92. name: "pool2"
  93. type: "Pooling"
  94. bottom: "conv2_2"
  95. top: "pool2"
  96. pooling_param {
  97. pool: MAX
  98. kernel_size: 2 stride: 2
  99. }
  100. }
  101. layer {
  102. name: "conv3_1"
  103. type: "Convolution"
  104. bottom: "pool2"
  105. top: "conv3_1"
  106. convolution_param {
  107. num_output: 256
  108. pad: 1 kernel_size: 3
  109. }
  110. }
  111. layer {
  112. name: "relu3_1"
  113. type: "ReLU"
  114. bottom: "conv3_1"
  115. top: "conv3_1"
  116. }
  117. layer {
  118. name: "conv3_2"
  119. type: "Convolution"
  120. bottom: "conv3_1"
  121. top: "conv3_2"
  122. convolution_param {
  123. num_output: 256
  124. pad: 1 kernel_size: 3
  125. }
  126. }
  127. layer {
  128. name: "relu3_2"
  129. type: "ReLU"
  130. bottom: "conv3_2"
  131. top: "conv3_2"
  132. }
  133. layer {
  134. name: "conv3_3"
  135. type: "Convolution"
  136. bottom: "conv3_2"
  137. top: "conv3_3"
  138. convolution_param {
  139. num_output: 256
  140. pad: 1 kernel_size: 3
  141. }
  142. }
  143. layer {
  144. name: "relu3_3"
  145. type: "ReLU"
  146. bottom: "conv3_3"
  147. top: "conv3_3"
  148. }
  149. layer {
  150. name: "pool3"
  151. type: "Pooling"
  152. bottom: "conv3_3"
  153. top: "pool3"
  154. pooling_param {
  155. pool: MAX
  156. kernel_size: 2 stride: 2
  157. }
  158. }
  159. layer {
  160. name: "conv4_1"
  161. type: "Convolution"
  162. bottom: "pool3"
  163. top: "conv4_1"
  164. convolution_param {
  165. num_output: 512
  166. pad: 1 kernel_size: 3
  167. }
  168. }
  169. layer {
  170. name: "relu4_1"
  171. type: "ReLU"
  172. bottom: "conv4_1"
  173. top: "conv4_1"
  174. }
  175. layer {
  176. name: "conv4_2"
  177. type: "Convolution"
  178. bottom: "conv4_1"
  179. top: "conv4_2"
  180. convolution_param {
  181. num_output: 512
  182. pad: 1 kernel_size: 3
  183. }
  184. }
  185. layer {
  186. name: "relu4_2"
  187. type: "ReLU"
  188. bottom: "conv4_2"
  189. top: "conv4_2"
  190. }
  191. layer {
  192. name: "conv4_3"
  193. type: "Convolution"
  194. bottom: "conv4_2"
  195. top: "conv4_3"
  196. convolution_param {
  197. num_output: 512
  198. pad: 1 kernel_size: 3
  199. }
  200. }
  201. layer {
  202. name: "relu4_3"
  203. type: "ReLU"
  204. bottom: "conv4_3"
  205. top: "conv4_3"
  206. }
  207. layer {
  208. name: "pool4"
  209. type: "Pooling"
  210. bottom: "conv4_3"
  211. top: "pool4"
  212. pooling_param {
  213. pool: MAX
  214. kernel_size: 2 stride: 2
  215. }
  216. }
  217. layer {
  218. name: "conv5_1"
  219. type: "Convolution"
  220. bottom: "pool4"
  221. top: "conv5_1"
  222. convolution_param {
  223. num_output: 512
  224. pad: 1 kernel_size: 3
  225. }
  226. }
  227. layer {
  228. name: "relu5_1"
  229. type: "ReLU"
  230. bottom: "conv5_1"
  231. top: "conv5_1"
  232. }
  233. layer {
  234. name: "conv5_2"
  235. type: "Convolution"
  236. bottom: "conv5_1"
  237. top: "conv5_2"
  238. convolution_param {
  239. num_output: 512
  240. pad: 1 kernel_size: 3
  241. }
  242. }
  243. layer {
  244. name: "relu5_2"
  245. type: "ReLU"
  246. bottom: "conv5_2"
  247. top: "conv5_2"
  248. }
  249. layer {
  250. name: "conv5_3"
  251. type: "Convolution"
  252. bottom: "conv5_2"
  253. top: "conv5_3"
  254. convolution_param {
  255. num_output: 512
  256. pad: 1 kernel_size: 3
  257. }
  258. }
  259. layer {
  260. name: "relu5_3"
  261. type: "ReLU"
  262. bottom: "conv5_3"
  263. top: "conv5_3"
  264. }
  265.  
  266. #========= RPN ============
  267.  
  268. layer {
  269. name: "rpn_conv/3x3"
  270. type: "Convolution"
  271. bottom: "conv5_3"
  272. top: "rpn/output"
  273. convolution_param {
  274. num_output: 512
  275. kernel_size: 3 pad: 1 stride: 1
  276. }
  277. }
  278. layer {
  279. name: "rpn_relu/3x3"
  280. type: "ReLU"
  281. bottom: "rpn/output"
  282. top: "rpn/output"
  283. }
  284.  
  285. layer {
  286. name: "rpn_cls_score"
  287. type: "Convolution"
  288. bottom: "rpn/output"
  289. top: "rpn_cls_score"
  290. convolution_param {
  291. num_output: 18 # 2(bg/fg) * 9(anchors)
  292. kernel_size: 1 pad: 0 stride: 1
  293. }
  294. }
  295. layer {
  296. name: "rpn_bbox_pred"
  297. type: "Convolution"
  298. bottom: "rpn/output"
  299. top: "rpn_bbox_pred"
  300. convolution_param {
  301. num_output: 36 # 4 * 9(anchors)
  302. kernel_size: 1 pad: 0 stride: 1
  303. }
  304. }
  305. layer {
  306. bottom: "rpn_cls_score"
  307. top: "rpn_cls_score_reshape"
  308. name: "rpn_cls_score_reshape"
  309. type: "Reshape"
  310. reshape_param { shape { dim: 0 dim: 2 dim: -1 dim: 0 } }
  311. }
  312.  
  313. #========= RoI Proposal ============
  314.  
  315. layer {
  316. name: "rpn_cls_prob"
  317. type: "Softmax"
  318. bottom: "rpn_cls_score_reshape"
  319. top: "rpn_cls_prob"
  320. }
  321. layer {
  322. name: 'rpn_cls_prob_reshape'
  323. type: 'Reshape'
  324. bottom: 'rpn_cls_prob'
  325. top: 'rpn_cls_prob_reshape'
  326. reshape_param { shape { dim: 0 dim: 18 dim: -1 dim: 0 } }
  327. }
  328. layer {
  329. name: 'proposal'
  330. type: 'Python'
  331. bottom: 'rpn_cls_prob_reshape'
  332. bottom: 'rpn_bbox_pred'
  333. bottom: 'im_info'
  334. top: 'rois'
  335. top: 'scores'
  336. python_param {
  337. module: 'rpn.proposal_layer'
  338. layer: 'ProposalLayer'
  339. param_str: "'feat_stride': 16"
  340. }
  341. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement