Advertisement
Guest User

Untitled

a guest
Jul 20th, 2019
119
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 5.33 KB | None | 0 0
  1. input:"data"
  2. input_dim:1
  3. input_dim:3
  4. input_dim:1200
  5. input_dim:1200
  6.  
  7. input:"dataL"
  8. input_dim:1
  9. input_dim:4
  10. input_dim:1200
  11. input_dim:1200
  12.  
  13. input:"dataD"
  14. input_dim:1
  15. input_dim:4
  16. input_dim:1200
  17. input_dim:1200
  18.  
  19. layer {
  20. name: "conv_gA1"
  21. type: "Convolution"
  22. bottom: "dataL"
  23. top: "conv_gA1"
  24. convolution_param {
  25. num_output: 64
  26. kernel_size: 3
  27. stride: 1
  28. pad: 1
  29. weight_filler {
  30. type: "msra"
  31. }
  32. }
  33. }
  34.  
  35. layer {
  36. name: "relu_gA1"
  37. type: "ReLU"
  38. bottom: "conv_gA1"
  39. top: "conv_gA1"
  40. }
  41.  
  42. layer {
  43. name: "conv_gA2"
  44. type: "Convolution"
  45. bottom: "conv_gA1"
  46. top: "conv_gA2"
  47. convolution_param {
  48. num_output: 64
  49. kernel_size: 3
  50. stride: 1
  51. pad: 1
  52. weight_filler {
  53. type: "msra"
  54. }
  55. }
  56. }
  57.  
  58. layer {
  59. name: "relu_gA2"
  60. type: "ReLU"
  61. bottom: "conv_gA2"
  62. top: "conv_gA2"
  63. }
  64.  
  65. layer {
  66. name: "conv_gA3"
  67. type: "Convolution"
  68. bottom: "conv_gA2"
  69. top: "conv_gA3"
  70. convolution_param {
  71. num_output: 64
  72. kernel_size: 3
  73. stride: 1
  74. pad: 1
  75. weight_filler {
  76. type: "msra"
  77. }
  78. }
  79. }
  80.  
  81. layer {
  82. name: "relu_gA3"
  83. type: "ReLU"
  84. bottom: "conv_gA3"
  85. top: "conv_gA3"
  86. }
  87.  
  88. layer {
  89. name: "conv_gA4"
  90. type: "Convolution"
  91. bottom: "conv_gA3"
  92. top: "conv_gA4"
  93. convolution_param {
  94. num_output: 64
  95. kernel_size: 3
  96. stride: 1
  97. pad: 1
  98. weight_filler {
  99. type: "msra"
  100. }
  101. }
  102. }
  103.  
  104. layer {
  105. name: "relu_gA4"
  106. type: "ReLU"
  107. bottom: "conv_gA4"
  108. top: "conv_gA4"
  109. }
  110.  
  111. layer {
  112. name: "conv_gA5"
  113. type: "Convolution"
  114. bottom: "conv_gA4"
  115. top: "conv_gA5"
  116. convolution_param {
  117. num_output: 64
  118. kernel_size: 3
  119. stride: 1
  120. pad: 1
  121. weight_filler {
  122. type: "msra"
  123. }
  124. }
  125. }
  126.  
  127. ##############################################
  128.  
  129. layer {
  130. name: "conv_gB1"
  131. type: "Convolution"
  132. bottom: "dataD"
  133. top: "conv_gB1"
  134. convolution_param {
  135. num_output: 64
  136. kernel_size: 3
  137. stride: 1
  138. pad: 1
  139. weight_filler {
  140. type: "msra"
  141. }
  142. }
  143. }
  144.  
  145. layer {
  146. name: "relu_gB1"
  147. type: "ReLU"
  148. bottom: "conv_gB1"
  149. top: "conv_gB1"
  150. }
  151.  
  152. layer {
  153. name: "conv_gB2"
  154. type: "Convolution"
  155. bottom: "conv_gB1"
  156. top: "conv_gB2"
  157. convolution_param {
  158. num_output: 64
  159. kernel_size: 3
  160. stride: 1
  161. pad: 1
  162. weight_filler {
  163. type: "msra"
  164. }
  165. }
  166. }
  167.  
  168. layer {
  169. name: "relu_gB2"
  170. type: "ReLU"
  171. bottom: "conv_gB2"
  172. top: "conv_gB2"
  173. }
  174.  
  175. layer {
  176. name: "conv_gB3"
  177. type: "Convolution"
  178. bottom: "conv_gB2"
  179. top: "conv_gB3"
  180. convolution_param {
  181. num_output: 64
  182. kernel_size: 3
  183. stride: 1
  184. pad: 1
  185. weight_filler {
  186. type: "msra"
  187. }
  188. }
  189. }
  190.  
  191. layer {
  192. name: "relu_gB3"
  193. type: "ReLU"
  194. bottom: "conv_gB3"
  195. top: "conv_gB3"
  196. }
  197.  
  198. layer {
  199. name: "conv_gB4"
  200. type: "Convolution"
  201. bottom: "conv_gB3"
  202. top: "conv_gB4"
  203. convolution_param {
  204. num_output: 64
  205. kernel_size: 3
  206. stride: 1
  207. pad: 1
  208. weight_filler {
  209. type: "msra"
  210. }
  211. }
  212. }
  213.  
  214. layer {
  215. name: "relu_gB4"
  216. type: "ReLU"
  217. bottom: "conv_gB4"
  218. top: "conv_gB4"
  219. }
  220.  
  221. layer {
  222. name: "conv_gB5"
  223. type: "Convolution"
  224. bottom: "conv_gB4"
  225. top: "conv_gB5"
  226. convolution_param {
  227. num_output: 64
  228. kernel_size: 3
  229. stride: 1
  230. pad: 1
  231. weight_filler {
  232. type: "msra"
  233. }
  234. }
  235. }
  236.  
  237. ##############################################
  238. layer {
  239. name: "Concat"
  240. type: "Concat"
  241. bottom: "conv_gA5"
  242. bottom: "conv_gB5"
  243. top: "Concat"
  244. }
  245.  
  246. layer {
  247. name: "conv_g6"
  248. type: "Convolution"
  249. bottom: "Concat"
  250. top: "conv_g6"
  251. convolution_param {
  252. num_output: 64
  253. kernel_size: 3
  254. stride: 1
  255. pad: 1
  256. weight_filler {
  257. type: "msra"
  258. }
  259. }
  260. }
  261.  
  262. layer {
  263. name: "conv_g7"
  264. type: "Convolution"
  265. bottom: "conv_g6"
  266. top: "conv_g7"
  267. convolution_param {
  268. num_output: 64
  269. kernel_size: 3
  270. stride: 1
  271. pad: 1
  272. weight_filler {
  273. type: "msra"
  274. }
  275. }
  276. }
  277.  
  278. layer {
  279. name: "relu_g7"
  280. type: "ReLU"
  281. bottom: "conv_g7"
  282. top: "conv_g7"
  283. }
  284.  
  285. layer {
  286. name: "conv_g8"
  287. type: "Convolution"
  288. bottom: "conv_g7"
  289. top: "conv_g8"
  290. convolution_param {
  291. num_output: 64
  292. kernel_size: 3
  293. stride: 1
  294. pad: 1
  295. weight_filler {
  296. type: "msra"
  297. }
  298. }
  299. }
  300.  
  301. layer {
  302. name: "relu_g8"
  303. type: "ReLU"
  304. bottom: "conv_g8"
  305. top: "conv_g8"
  306. }
  307.  
  308. layer {
  309. name: "conv_g9"
  310. type: "Convolution"
  311. bottom: "conv_g8"
  312. top: "conv_g9"
  313. convolution_param {
  314. num_output: 64
  315. kernel_size: 3
  316. stride: 1
  317. pad: 1
  318. weight_filler {
  319. type: "msra"
  320. }
  321. }
  322. }
  323.  
  324. layer {
  325. name: "relu_g9"
  326. type: "ReLU"
  327. bottom: "conv_g9"
  328. top: "conv_g9"
  329. }
  330.  
  331. layer {
  332. name: "conv_g10"
  333. type: "Convolution"
  334. bottom: "conv_g9"
  335. top: "conv_g10"
  336. convolution_param {
  337. num_output: 64
  338. kernel_size: 3
  339. stride: 1
  340. pad: 1
  341. weight_filler {
  342. type: "msra"
  343. }
  344. }
  345. }
  346.  
  347. layer {
  348. name: "relu_g10"
  349. type: "ReLU"
  350. bottom: "conv_g10"
  351. top: "conv_g10"
  352. }
  353.  
  354. layer {
  355. name: "conv_g11"
  356. type: "Convolution"
  357. bottom: "conv_g10"
  358. top: "conv_g11"
  359. convolution_param {
  360. num_output: 64
  361. kernel_size: 3
  362. stride: 1
  363. pad: 1
  364. weight_filler {
  365. type: "msra"
  366. }
  367. }
  368. }
  369.  
  370. layer {
  371. name: "relu_g11"
  372. type: "ReLU"
  373. bottom: "conv_g11"
  374. top: "conv_g11"
  375. }
  376.  
  377. layer {
  378. name: "conv_g12"
  379. type: "Convolution"
  380. bottom: "conv_g11"
  381. top: "conv_g12"
  382. convolution_param {
  383. num_output: 3
  384. kernel_size: 3
  385. stride: 1
  386. pad: 1
  387. weight_filler {
  388. type: "msra"
  389. }
  390. }
  391. }
  392.  
  393. layer {
  394. name: "sum"
  395. type: "Eltwise"
  396. bottom: "data"
  397. bottom: "conv_g12"
  398. top: "sum"
  399. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement