Advertisement
Guest User

hybridCNN failed for nvdla_compiler

a guest
Jun 24th, 2018
189
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.70 KB | None | 0 0
  1. name: "hybridnet"
  2. input: "data"
  3. input_dim: 10
  4. input_dim: 3
  5. input_dim: 227
  6. input_dim: 227
  7. layers {
  8. layer {
  9. name: "conv1"
  10. type: "conv"
  11. num_output: 96
  12. kernelsize: 11
  13. stride: 4
  14. weight_filler {
  15. type: "gaussian"
  16. std: 0.01
  17. }
  18. bias_filler {
  19. type: "constant"
  20. value: 0.
  21. }
  22. blobs_lr: 1.
  23. blobs_lr: 2.
  24. weight_decay: 1.
  25. weight_decay: 0.
  26. }
  27. bottom: "data"
  28. top: "conv1"
  29. }
  30. layers {
  31. layer {
  32. name: "relu1"
  33. type: "relu"
  34. }
  35. bottom: "conv1"
  36. top: "conv1"
  37. }
  38. layers {
  39. layer {
  40. name: "pool1"
  41. type: "pool"
  42. pool: MAX
  43. kernelsize: 3
  44. stride: 2
  45. }
  46. bottom: "conv1"
  47. top: "pool1"
  48. }
  49. layers {
  50. layer {
  51. name: "norm1"
  52. type: "lrn"
  53. local_size: 5
  54. alpha: 0.0001
  55. beta: 0.75
  56. }
  57. bottom: "pool1"
  58. top: "norm1"
  59. }
  60. layers {
  61. layer {
  62. name: "conv2"
  63. type: "conv"
  64. num_output: 256
  65. group: 2
  66. kernelsize: 5
  67. pad: 2
  68. weight_filler {
  69. type: "gaussian"
  70. std: 0.01
  71. }
  72. bias_filler {
  73. type: "constant"
  74. value: 1.
  75. }
  76. blobs_lr: 1.
  77. blobs_lr: 2.
  78. weight_decay: 1.
  79. weight_decay: 0.
  80. }
  81. bottom: "norm1"
  82. top: "conv2"
  83. }
  84. layers {
  85. layer {
  86. name: "relu2"
  87. type: "relu"
  88. }
  89. bottom: "conv2"
  90. top: "conv2"
  91. }
  92. layers {
  93. layer {
  94. name: "pool2"
  95. type: "pool"
  96. pool: MAX
  97. kernelsize: 3
  98. stride: 2
  99. }
  100. bottom: "conv2"
  101. top: "pool2"
  102. }
  103. layers {
  104. layer {
  105. name: "norm2"
  106. type: "lrn"
  107. local_size: 5
  108. alpha: 0.0001
  109. beta: 0.75
  110. }
  111. bottom: "pool2"
  112. top: "norm2"
  113. }
  114. layers {
  115. layer {
  116. name: "conv3"
  117. type: "conv"
  118. num_output: 384
  119. kernelsize: 3
  120. pad: 1
  121. weight_filler {
  122. type: "gaussian"
  123. std: 0.01
  124. }
  125. bias_filler {
  126. type: "constant"
  127. value: 0.
  128. }
  129. blobs_lr: 1.
  130. blobs_lr: 2.
  131. weight_decay: 1.
  132. weight_decay: 0.
  133. }
  134. bottom: "norm2"
  135. top: "conv3"
  136. }
  137. layers {
  138. layer {
  139. name: "relu3"
  140. type: "relu"
  141. }
  142. bottom: "conv3"
  143. top: "conv3"
  144. }
  145. layers {
  146. layer {
  147. name: "conv4"
  148. type: "conv"
  149. num_output: 384
  150. group: 2
  151. kernelsize: 3
  152. pad: 1
  153. weight_filler {
  154. type: "gaussian"
  155. std: 0.01
  156. }
  157. bias_filler {
  158. type: "constant"
  159. value: 1.
  160. }
  161. blobs_lr: 1.
  162. blobs_lr: 2.
  163. weight_decay: 1.
  164. weight_decay: 0.
  165. }
  166. bottom: "conv3"
  167. top: "conv4"
  168. }
  169. layers {
  170. layer {
  171. name: "relu4"
  172. type: "relu"
  173. }
  174. bottom: "conv4"
  175. top: "conv4"
  176. }
  177. layers {
  178. layer {
  179. name: "conv5"
  180. type: "conv"
  181. num_output: 256
  182. group: 2
  183. kernelsize: 3
  184. pad: 1
  185. weight_filler {
  186. type: "gaussian"
  187. std: 0.01
  188. }
  189. bias_filler {
  190. type: "constant"
  191. value: 1.
  192. }
  193. blobs_lr: 1.
  194. blobs_lr: 2.
  195. weight_decay: 1.
  196. weight_decay: 0.
  197. }
  198. bottom: "conv4"
  199. top: "conv5"
  200. }
  201. layers {
  202. layer {
  203. name: "relu5"
  204. type: "relu"
  205. }
  206. bottom: "conv5"
  207. top: "conv5"
  208. }
  209. layers {
  210. layer {
  211. name: "pool5"
  212. type: "pool"
  213. kernelsize: 3
  214. pool: MAX
  215. stride: 2
  216. }
  217. bottom: "conv5"
  218. top: "pool5"
  219. }
  220. layers {
  221. layer {
  222. name: "fc6"
  223. type: "innerproduct"
  224. num_output: 4096
  225. weight_filler {
  226. type: "gaussian"
  227. std: 0.005
  228. }
  229. bias_filler {
  230. type: "constant"
  231. value: 1.
  232. }
  233. blobs_lr: 1.
  234. blobs_lr: 2.
  235. weight_decay: 1.
  236. weight_decay: 0.
  237. }
  238. bottom: "pool5"
  239. top: "fc6"
  240. }
  241. layers {
  242. layer {
  243. name: "relu6"
  244. type: "relu"
  245. }
  246. bottom: "fc6"
  247. top: "fc6"
  248. }
  249. layers {
  250. layer {
  251. name: "drop6"
  252. type: "dropout"
  253. dropout_ratio: 0.5
  254. }
  255. bottom: "fc6"
  256. top: "fc6"
  257. }
  258. layers {
  259. layer {
  260. name: "fc7"
  261. type: "innerproduct"
  262. num_output: 4096
  263. weight_filler {
  264. type: "gaussian"
  265. std: 0.005
  266. }
  267. bias_filler {
  268. type: "constant"
  269. value: 1.
  270. }
  271. blobs_lr: 1.
  272. blobs_lr: 2.
  273. weight_decay: 1.
  274. weight_decay: 0.
  275. }
  276. bottom: "fc6"
  277. top: "fc7"
  278. }
  279. layers {
  280. layer {
  281. name: "relu7"
  282. type: "relu"
  283. }
  284. bottom: "fc7"
  285. top: "fc7"
  286. }
  287. layers {
  288. layer {
  289. name: "drop7"
  290. type: "dropout"
  291. dropout_ratio: 0.5
  292. }
  293. bottom: "fc7"
  294. top: "fc7"
  295. }
  296. layers {
  297. layer {
  298. name: "fc8"
  299. type: "innerproduct"
  300. num_output: 1183
  301. weight_filler {
  302. type: "gaussian"
  303. std: 0.01
  304. }
  305. bias_filler {
  306. type: "constant"
  307. value: 0
  308. }
  309. blobs_lr: 1.
  310. blobs_lr: 2.
  311. weight_decay: 1.
  312. weight_decay: 0.
  313. }
  314. bottom: "fc7"
  315. top: "fc8"
  316. }
  317. layers {
  318. layer {
  319. name: "prob"
  320. type: "softmax"
  321. }
  322. bottom: "fc8"
  323. top: "prob"
  324. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement