Advertisement
Guest User

Untitled

a guest
Jul 27th, 2016
46
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 3.49 KB | None | 0 0
  1. name: "R-CNN-exploration"
  2. layer {
  3. name: "data"
  4. type: "Data"
  5. top: "data"
  6. top: "label"
  7. include {
  8. phase: TRAIN
  9. }
  10. transform_param {
  11. mean_file: "data/exploration/explo_mean.binaryproto"
  12. }
  13. data_param {
  14. source: "data/exploration/explo_train_lmdb"
  15. batch_size: 32
  16. backend: LMDB
  17. }
  18. }
  19. layer {
  20. name: "data"
  21. type: "Data"
  22. top: "data"
  23. top: "label"
  24. include {
  25. phase: TEST
  26. }
  27. transform_param {
  28. mean_file: "data/exploration/explo_mean.binaryproto"
  29. }
  30. data_param {
  31. source: "data/exploration/explo_val_lmdb"
  32. batch_size: 512
  33. backend: LMDB
  34. }
  35. }
  36.  
  37.  
  38. layer {
  39. name: "conv1"
  40. type: "Convolution"
  41. bottom: "data"
  42. top: "conv1"
  43. convolution_param {
  44. num_output: 96
  45. kernel_size: 11
  46. stride: 4
  47. }
  48. }
  49. layer {
  50. name: "relu1"
  51. type: "ReLU"
  52. bottom: "conv1"
  53. top: "conv1"
  54. }
  55. layer {
  56. name: "pool1"
  57. type: "Pooling"
  58. bottom: "conv1"
  59. top: "pool1"
  60. pooling_param {
  61. pool: MAX
  62. kernel_size: 3
  63. stride: 2
  64. }
  65. }
  66. layer {
  67. name: "norm1"
  68. type: "LRN"
  69. bottom: "pool1"
  70. top: "norm1"
  71. lrn_param {
  72. local_size: 5
  73. alpha: 0.0001
  74. beta: 0.75
  75. }
  76. }
  77. layer {
  78. name: "conv2"
  79. type: "Convolution"
  80. bottom: "norm1"
  81. top: "conv2"
  82. convolution_param {
  83. num_output: 256
  84. pad: 2
  85. kernel_size: 5
  86. group: 2
  87. }
  88. }
  89. layer {
  90. name: "relu2"
  91. type: "ReLU"
  92. bottom: "conv2"
  93. top: "conv2"
  94. }
  95. layer {
  96. name: "pool2"
  97. type: "Pooling"
  98. bottom: "conv2"
  99. top: "pool2"
  100. pooling_param {
  101. pool: MAX
  102. kernel_size: 3
  103. stride: 2
  104. }
  105. }
  106. layer {
  107. name: "norm2"
  108. type: "LRN"
  109. bottom: "pool2"
  110. top: "norm2"
  111. lrn_param {
  112. local_size: 5
  113. alpha: 0.0001
  114. beta: 0.75
  115. }
  116. }
  117. layer {
  118. name: "conv3"
  119. type: "Convolution"
  120. bottom: "norm2"
  121. top: "conv3"
  122. convolution_param {
  123. num_output: 384
  124. pad: 1
  125. kernel_size: 3
  126. }
  127. }
  128. layer {
  129. name: "relu3"
  130. type: "ReLU"
  131. bottom: "conv3"
  132. top: "conv3"
  133. }
  134. layer {
  135. name: "conv4"
  136. type: "Convolution"
  137. bottom: "conv3"
  138. top: "conv4"
  139. convolution_param {
  140. num_output: 384
  141. pad: 1
  142. kernel_size: 3
  143. group: 2
  144. }
  145. }
  146. layer {
  147. name: "relu4"
  148. type: "ReLU"
  149. bottom: "conv4"
  150. top: "conv4"
  151. }
  152. layer {
  153. name: "conv5"
  154. type: "Convolution"
  155. bottom: "conv4"
  156. top: "conv5"
  157. convolution_param {
  158. num_output: 256
  159. pad: 1
  160. kernel_size: 3
  161. group: 2
  162. }
  163. }
  164. layer {
  165. name: "relu5"
  166. type: "ReLU"
  167. bottom: "conv5"
  168. top: "conv5"
  169. }
  170. layer {
  171. name: "pool5"
  172. type: "Pooling"
  173. bottom: "conv5"
  174. top: "pool5"
  175. pooling_param {
  176. pool: MAX
  177. kernel_size: 3
  178. stride: 2
  179. }
  180. }
  181. layer {
  182. name: "fc6"
  183. type: "InnerProduct"
  184. bottom: "pool5"
  185. top: "fc6"
  186. inner_product_param {
  187. num_output: 4096
  188. }
  189. }
  190. layer {
  191. name: "relu6"
  192. type: "ReLU"
  193. bottom: "fc6"
  194. top: "fc6"
  195. }
  196. layer {
  197. name: "drop6"
  198. type: "Dropout"
  199. bottom: "fc6"
  200. top: "fc6"
  201. dropout_param {
  202. dropout_ratio: 0.5
  203. }
  204. }
  205. layer {
  206. name: "fc7"
  207. type: "InnerProduct"
  208. bottom: "fc6"
  209. top: "fc7"
  210. inner_product_param {
  211. num_output: 4096
  212. }
  213. }
  214. layer {
  215. name: "relu7"
  216. type: "ReLU"
  217. bottom: "fc7"
  218. top: "fc7"
  219. }
  220. layer {
  221. name: "drop7"
  222. type: "Dropout"
  223. bottom: "fc7"
  224. top: "fc7"
  225. dropout_param {
  226. dropout_ratio: 0.5
  227. }
  228. }
  229. # R-CNN classification layer made from R-CNN ILSVRC13 SVMs.
  230. layer {
  231. name: "fc-rcnn"
  232. type: "InnerProduct"
  233. bottom: "fc7"
  234. top: "fc-rcnn"
  235. inner_product_param {
  236. num_output: 36
  237. }
  238. }
  239.  
  240. layer {
  241. name: "accuracy"
  242. type: "Accuracy"
  243. bottom: "fc-rcnn"
  244. bottom: "label"
  245. top: "accuracy"
  246. include {
  247. phase: TEST
  248. }
  249. }
  250. layer {
  251. name: "loss"
  252. type: "SoftmaxWithLoss"
  253. bottom: "fc-rcnn"
  254. bottom: "label"
  255. top: "loss"
  256. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement