Advertisement
Guest User

Untitled

a guest
Feb 22nd, 2019
90
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 58.84 KB | None | 0 0
  1. name: "mxnet-mdoel"
  2. layer {
  3. name: "data"
  4. type: "Input"
  5. top: "data"
  6. input_param {
  7. shape: { dim: 1 dim: 3 dim: 112 dim: 112 }
  8. }
  9. }
  10.  
  11. layer {
  12. bottom: "data"
  13. top: "conv_1_conv2d"
  14. name: "conv_1_conv2d"
  15. type: "Convolution"
  16. convolution_param {
  17. num_output: 64
  18. kernel_size: 3
  19. pad: 1
  20. stride: 2
  21. bias_term: false
  22. }
  23. }
  24.  
  25. layer {
  26. bottom: "conv_1_conv2d"
  27. top: "conv_1_batchnorm"
  28. name: "conv_1_batchnorm"
  29. type: "BatchNorm"
  30. batch_norm_param {
  31. use_global_stats: true
  32. moving_average_fraction: 0.9
  33. eps: 0.001
  34. }
  35. }
  36. layer {
  37. bottom: "conv_1_batchnorm"
  38. top: "conv_1_batchnorm"
  39. name: "conv_1_batchnorm_scale"
  40. type: "Scale"
  41. scale_param { bias_term: true }
  42. }
  43.  
  44. layer {
  45. bottom: "conv_1_batchnorm"
  46. top: "conv_1_relu"
  47. name: "conv_1_relu"
  48. type: "PReLU"
  49. }
  50.  
  51. layer {
  52. bottom: "conv_1_relu"
  53. top: "res_2_block0_conv_sep_conv2d"
  54. name: "res_2_block0_conv_sep_conv2d"
  55. type: "Convolution"
  56. convolution_param {
  57. num_output: 64
  58. kernel_size: 1
  59. pad: 0
  60. stride: 1
  61. bias_term: false
  62. }
  63. }
  64.  
  65. layer {
  66. bottom: "res_2_block0_conv_sep_conv2d"
  67. top: "res_2_block0_conv_sep_batchnorm"
  68. name: "res_2_block0_conv_sep_batchnorm"
  69. type: "BatchNorm"
  70. batch_norm_param {
  71. use_global_stats: true
  72. moving_average_fraction: 0.9
  73. eps: 0.001
  74. }
  75. }
  76. layer {
  77. bottom: "res_2_block0_conv_sep_batchnorm"
  78. top: "res_2_block0_conv_sep_batchnorm"
  79. name: "res_2_block0_conv_sep_batchnorm_scale"
  80. type: "Scale"
  81. scale_param { bias_term: true }
  82. }
  83.  
  84. layer {
  85. bottom: "res_2_block0_conv_sep_batchnorm"
  86. top: "res_2_block0_conv_sep_relu"
  87. name: "res_2_block0_conv_sep_relu"
  88. type: "PReLU"
  89. }
  90.  
  91. layer {
  92. bottom: "res_2_block0_conv_sep_relu"
  93. top: "res_2_block0_conv_dw_conv2d"
  94. name: "res_2_block0_conv_dw_conv2d"
  95. type: "Convolution"
  96. convolution_param {
  97. num_output: 64
  98. kernel_size: 3
  99. pad: 1
  100. group: 64
  101. engine:CAFFE
  102. stride: 1
  103. bias_term: false
  104. }
  105. }
  106.  
  107. layer {
  108. bottom: "res_2_block0_conv_dw_conv2d"
  109. top: "res_2_block0_conv_dw_batchnorm"
  110. name: "res_2_block0_conv_dw_batchnorm"
  111. type: "BatchNorm"
  112. batch_norm_param {
  113. use_global_stats: true
  114. moving_average_fraction: 0.9
  115. eps: 0.001
  116. }
  117. }
  118. layer {
  119. bottom: "res_2_block0_conv_dw_batchnorm"
  120. top: "res_2_block0_conv_dw_batchnorm"
  121. name: "res_2_block0_conv_dw_batchnorm_scale"
  122. type: "Scale"
  123. scale_param { bias_term: true }
  124. }
  125.  
  126. layer {
  127. bottom: "res_2_block0_conv_dw_batchnorm"
  128. top: "res_2_block0_conv_dw_relu"
  129. name: "res_2_block0_conv_dw_relu"
  130. type: "PReLU"
  131. }
  132.  
  133. layer {
  134. bottom: "res_2_block0_conv_dw_relu"
  135. top: "res_2_block0_conv_proj_conv2d"
  136. name: "res_2_block0_conv_proj_conv2d"
  137. type: "Convolution"
  138. convolution_param {
  139. num_output: 64
  140. kernel_size: 1
  141. pad: 0
  142. stride: 1
  143. bias_term: false
  144. }
  145. }
  146.  
  147. layer {
  148. bottom: "res_2_block0_conv_proj_conv2d"
  149. top: "res_2_block0_conv_proj_batchnorm"
  150. name: "res_2_block0_conv_proj_batchnorm"
  151. type: "BatchNorm"
  152. batch_norm_param {
  153. use_global_stats: true
  154. moving_average_fraction: 0.9
  155. eps: 0.001
  156. }
  157. }
  158. layer {
  159. bottom: "res_2_block0_conv_proj_batchnorm"
  160. top: "res_2_block0_conv_proj_batchnorm"
  161. name: "res_2_block0_conv_proj_batchnorm_scale"
  162. type: "Scale"
  163. scale_param { bias_term: true }
  164. }
  165.  
  166. layer {
  167. name: "_plus0"
  168. type: "Eltwise"
  169. bottom: "res_2_block0_conv_proj_batchnorm"
  170. bottom: "conv_1_relu"
  171. top: "_plus0"
  172. eltwise_param { operation: SUM }
  173. }
  174.  
  175. layer {
  176. bottom: "_plus0"
  177. top: "res_2_block1_conv_sep_conv2d"
  178. name: "res_2_block1_conv_sep_conv2d"
  179. type: "Convolution"
  180. convolution_param {
  181. num_output: 64
  182. kernel_size: 1
  183. pad: 0
  184. stride: 1
  185. bias_term: false
  186. }
  187. }
  188.  
  189. layer {
  190. bottom: "res_2_block1_conv_sep_conv2d"
  191. top: "res_2_block1_conv_sep_batchnorm"
  192. name: "res_2_block1_conv_sep_batchnorm"
  193. type: "BatchNorm"
  194. batch_norm_param {
  195. use_global_stats: true
  196. moving_average_fraction: 0.9
  197. eps: 0.001
  198. }
  199. }
  200. layer {
  201. bottom: "res_2_block1_conv_sep_batchnorm"
  202. top: "res_2_block1_conv_sep_batchnorm"
  203. name: "res_2_block1_conv_sep_batchnorm_scale"
  204. type: "Scale"
  205. scale_param { bias_term: true }
  206. }
  207.  
  208. layer {
  209. bottom: "res_2_block1_conv_sep_batchnorm"
  210. top: "res_2_block1_conv_sep_relu"
  211. name: "res_2_block1_conv_sep_relu"
  212. type: "PReLU"
  213. }
  214.  
  215. layer {
  216. bottom: "res_2_block1_conv_sep_relu"
  217. top: "res_2_block1_conv_dw_conv2d"
  218. name: "res_2_block1_conv_dw_conv2d"
  219. type: "Convolution"
  220. convolution_param {
  221. num_output: 64
  222. kernel_size: 3
  223. pad: 1
  224. group: 64
  225. engine:CAFFE
  226. stride: 1
  227. bias_term: false
  228. }
  229. }
  230.  
  231. layer {
  232. bottom: "res_2_block1_conv_dw_conv2d"
  233. top: "res_2_block1_conv_dw_batchnorm"
  234. name: "res_2_block1_conv_dw_batchnorm"
  235. type: "BatchNorm"
  236. batch_norm_param {
  237. use_global_stats: true
  238. moving_average_fraction: 0.9
  239. eps: 0.001
  240. }
  241. }
  242. layer {
  243. bottom: "res_2_block1_conv_dw_batchnorm"
  244. top: "res_2_block1_conv_dw_batchnorm"
  245. name: "res_2_block1_conv_dw_batchnorm_scale"
  246. type: "Scale"
  247. scale_param { bias_term: true }
  248. }
  249.  
  250. layer {
  251. bottom: "res_2_block1_conv_dw_batchnorm"
  252. top: "res_2_block1_conv_dw_relu"
  253. name: "res_2_block1_conv_dw_relu"
  254. type: "PReLU"
  255. }
  256.  
  257. layer {
  258. bottom: "res_2_block1_conv_dw_relu"
  259. top: "res_2_block1_conv_proj_conv2d"
  260. name: "res_2_block1_conv_proj_conv2d"
  261. type: "Convolution"
  262. convolution_param {
  263. num_output: 64
  264. kernel_size: 1
  265. pad: 0
  266. stride: 1
  267. bias_term: false
  268. }
  269. }
  270.  
  271. layer {
  272. bottom: "res_2_block1_conv_proj_conv2d"
  273. top: "res_2_block1_conv_proj_batchnorm"
  274. name: "res_2_block1_conv_proj_batchnorm"
  275. type: "BatchNorm"
  276. batch_norm_param {
  277. use_global_stats: true
  278. moving_average_fraction: 0.9
  279. eps: 0.001
  280. }
  281. }
  282. layer {
  283. bottom: "res_2_block1_conv_proj_batchnorm"
  284. top: "res_2_block1_conv_proj_batchnorm"
  285. name: "res_2_block1_conv_proj_batchnorm_scale"
  286. type: "Scale"
  287. scale_param { bias_term: true }
  288. }
  289.  
  290. layer {
  291. name: "_plus1"
  292. type: "Eltwise"
  293. bottom: "res_2_block1_conv_proj_batchnorm"
  294. bottom: "_plus0"
  295. top: "_plus1"
  296. eltwise_param { operation: SUM }
  297. }
  298.  
  299. layer {
  300. bottom: "_plus1"
  301. top: "dconv_23_conv_sep_conv2d"
  302. name: "dconv_23_conv_sep_conv2d"
  303. type: "Convolution"
  304. convolution_param {
  305. num_output: 128
  306. kernel_size: 1
  307. pad: 0
  308. stride: 1
  309. bias_term: false
  310. }
  311. }
  312.  
  313. layer {
  314. bottom: "dconv_23_conv_sep_conv2d"
  315. top: "dconv_23_conv_sep_batchnorm"
  316. name: "dconv_23_conv_sep_batchnorm"
  317. type: "BatchNorm"
  318. batch_norm_param {
  319. use_global_stats: true
  320. moving_average_fraction: 0.9
  321. eps: 0.001
  322. }
  323. }
  324. layer {
  325. bottom: "dconv_23_conv_sep_batchnorm"
  326. top: "dconv_23_conv_sep_batchnorm"
  327. name: "dconv_23_conv_sep_batchnorm_scale"
  328. type: "Scale"
  329. scale_param { bias_term: true }
  330. }
  331.  
  332. layer {
  333. bottom: "dconv_23_conv_sep_batchnorm"
  334. top: "dconv_23_conv_sep_relu"
  335. name: "dconv_23_conv_sep_relu"
  336. type: "PReLU"
  337. }
  338.  
  339. layer {
  340. bottom: "dconv_23_conv_sep_relu"
  341. top: "dconv_23_conv_dw_conv2d"
  342. name: "dconv_23_conv_dw_conv2d"
  343. type: "Convolution"
  344. convolution_param {
  345. num_output: 128
  346. kernel_size: 3
  347. pad: 1
  348. group: 128
  349. engine:CAFFE
  350. stride: 2
  351. bias_term: false
  352. }
  353. }
  354.  
  355. layer {
  356. bottom: "dconv_23_conv_dw_conv2d"
  357. top: "dconv_23_conv_dw_batchnorm"
  358. name: "dconv_23_conv_dw_batchnorm"
  359. type: "BatchNorm"
  360. batch_norm_param {
  361. use_global_stats: true
  362. moving_average_fraction: 0.9
  363. eps: 0.001
  364. }
  365. }
  366. layer {
  367. bottom: "dconv_23_conv_dw_batchnorm"
  368. top: "dconv_23_conv_dw_batchnorm"
  369. name: "dconv_23_conv_dw_batchnorm_scale"
  370. type: "Scale"
  371. scale_param { bias_term: true }
  372. }
  373.  
  374. layer {
  375. bottom: "dconv_23_conv_dw_batchnorm"
  376. top: "dconv_23_conv_dw_relu"
  377. name: "dconv_23_conv_dw_relu"
  378. type: "PReLU"
  379. }
  380.  
  381. layer {
  382. bottom: "dconv_23_conv_dw_relu"
  383. top: "dconv_23_conv_proj_conv2d"
  384. name: "dconv_23_conv_proj_conv2d"
  385. type: "Convolution"
  386. convolution_param {
  387. num_output: 128
  388. kernel_size: 1
  389. pad: 0
  390. stride: 1
  391. bias_term: false
  392. }
  393. }
  394.  
  395. layer {
  396. bottom: "dconv_23_conv_proj_conv2d"
  397. top: "dconv_23_conv_proj_batchnorm"
  398. name: "dconv_23_conv_proj_batchnorm"
  399. type: "BatchNorm"
  400. batch_norm_param {
  401. use_global_stats: true
  402. moving_average_fraction: 0.9
  403. eps: 0.001
  404. }
  405. }
  406. layer {
  407. bottom: "dconv_23_conv_proj_batchnorm"
  408. top: "dconv_23_conv_proj_batchnorm"
  409. name: "dconv_23_conv_proj_batchnorm_scale"
  410. type: "Scale"
  411. scale_param { bias_term: true }
  412. }
  413.  
  414. layer {
  415. bottom: "dconv_23_conv_proj_batchnorm"
  416. top: "res_3_block0_conv_sep_conv2d"
  417. name: "res_3_block0_conv_sep_conv2d"
  418. type: "Convolution"
  419. convolution_param {
  420. num_output: 128
  421. kernel_size: 1
  422. pad: 0
  423. stride: 1
  424. bias_term: false
  425. }
  426. }
  427.  
  428. layer {
  429. bottom: "res_3_block0_conv_sep_conv2d"
  430. top: "res_3_block0_conv_sep_batchnorm"
  431. name: "res_3_block0_conv_sep_batchnorm"
  432. type: "BatchNorm"
  433. batch_norm_param {
  434. use_global_stats: true
  435. moving_average_fraction: 0.9
  436. eps: 0.001
  437. }
  438. }
  439. layer {
  440. bottom: "res_3_block0_conv_sep_batchnorm"
  441. top: "res_3_block0_conv_sep_batchnorm"
  442. name: "res_3_block0_conv_sep_batchnorm_scale"
  443. type: "Scale"
  444. scale_param { bias_term: true }
  445. }
  446.  
  447. layer {
  448. bottom: "res_3_block0_conv_sep_batchnorm"
  449. top: "res_3_block0_conv_sep_relu"
  450. name: "res_3_block0_conv_sep_relu"
  451. type: "PReLU"
  452. }
  453.  
  454. layer {
  455. bottom: "res_3_block0_conv_sep_relu"
  456. top: "res_3_block0_conv_dw_conv2d"
  457. name: "res_3_block0_conv_dw_conv2d"
  458. type: "Convolution"
  459. convolution_param {
  460. num_output: 128
  461. kernel_size: 3
  462. pad: 1
  463. group: 128
  464. engine:CAFFE
  465. stride: 1
  466. bias_term: false
  467. }
  468. }
  469.  
  470. layer {
  471. bottom: "res_3_block0_conv_dw_conv2d"
  472. top: "res_3_block0_conv_dw_batchnorm"
  473. name: "res_3_block0_conv_dw_batchnorm"
  474. type: "BatchNorm"
  475. batch_norm_param {
  476. use_global_stats: true
  477. moving_average_fraction: 0.9
  478. eps: 0.001
  479. }
  480. }
  481. layer {
  482. bottom: "res_3_block0_conv_dw_batchnorm"
  483. top: "res_3_block0_conv_dw_batchnorm"
  484. name: "res_3_block0_conv_dw_batchnorm_scale"
  485. type: "Scale"
  486. scale_param { bias_term: true }
  487. }
  488.  
  489. layer {
  490. bottom: "res_3_block0_conv_dw_batchnorm"
  491. top: "res_3_block0_conv_dw_relu"
  492. name: "res_3_block0_conv_dw_relu"
  493. type: "PReLU"
  494. }
  495.  
  496. layer {
  497. bottom: "res_3_block0_conv_dw_relu"
  498. top: "res_3_block0_conv_proj_conv2d"
  499. name: "res_3_block0_conv_proj_conv2d"
  500. type: "Convolution"
  501. convolution_param {
  502. num_output: 128
  503. kernel_size: 1
  504. pad: 0
  505. stride: 1
  506. bias_term: false
  507. }
  508. }
  509.  
  510. layer {
  511. bottom: "res_3_block0_conv_proj_conv2d"
  512. top: "res_3_block0_conv_proj_batchnorm"
  513. name: "res_3_block0_conv_proj_batchnorm"
  514. type: "BatchNorm"
  515. batch_norm_param {
  516. use_global_stats: true
  517. moving_average_fraction: 0.9
  518. eps: 0.001
  519. }
  520. }
  521. layer {
  522. bottom: "res_3_block0_conv_proj_batchnorm"
  523. top: "res_3_block0_conv_proj_batchnorm"
  524. name: "res_3_block0_conv_proj_batchnorm_scale"
  525. type: "Scale"
  526. scale_param { bias_term: true }
  527. }
  528.  
  529. layer {
  530. name: "_plus2"
  531. type: "Eltwise"
  532. bottom: "res_3_block0_conv_proj_batchnorm"
  533. bottom: "dconv_23_conv_proj_batchnorm"
  534. top: "_plus2"
  535. eltwise_param { operation: SUM }
  536. }
  537.  
  538. layer {
  539. bottom: "_plus2"
  540. top: "res_3_block1_conv_sep_conv2d"
  541. name: "res_3_block1_conv_sep_conv2d"
  542. type: "Convolution"
  543. convolution_param {
  544. num_output: 128
  545. kernel_size: 1
  546. pad: 0
  547. stride: 1
  548. bias_term: false
  549. }
  550. }
  551.  
  552. layer {
  553. bottom: "res_3_block1_conv_sep_conv2d"
  554. top: "res_3_block1_conv_sep_batchnorm"
  555. name: "res_3_block1_conv_sep_batchnorm"
  556. type: "BatchNorm"
  557. batch_norm_param {
  558. use_global_stats: true
  559. moving_average_fraction: 0.9
  560. eps: 0.001
  561. }
  562. }
  563. layer {
  564. bottom: "res_3_block1_conv_sep_batchnorm"
  565. top: "res_3_block1_conv_sep_batchnorm"
  566. name: "res_3_block1_conv_sep_batchnorm_scale"
  567. type: "Scale"
  568. scale_param { bias_term: true }
  569. }
  570.  
  571. layer {
  572. bottom: "res_3_block1_conv_sep_batchnorm"
  573. top: "res_3_block1_conv_sep_relu"
  574. name: "res_3_block1_conv_sep_relu"
  575. type: "PReLU"
  576. }
  577.  
  578. layer {
  579. bottom: "res_3_block1_conv_sep_relu"
  580. top: "res_3_block1_conv_dw_conv2d"
  581. name: "res_3_block1_conv_dw_conv2d"
  582. type: "Convolution"
  583. convolution_param {
  584. num_output: 128
  585. kernel_size: 3
  586. pad: 1
  587. group: 128
  588. engine:CAFFE
  589. stride: 1
  590. bias_term: false
  591. }
  592. }
  593.  
  594. layer {
  595. bottom: "res_3_block1_conv_dw_conv2d"
  596. top: "res_3_block1_conv_dw_batchnorm"
  597. name: "res_3_block1_conv_dw_batchnorm"
  598. type: "BatchNorm"
  599. batch_norm_param {
  600. use_global_stats: true
  601. moving_average_fraction: 0.9
  602. eps: 0.001
  603. }
  604. }
  605. layer {
  606. bottom: "res_3_block1_conv_dw_batchnorm"
  607. top: "res_3_block1_conv_dw_batchnorm"
  608. name: "res_3_block1_conv_dw_batchnorm_scale"
  609. type: "Scale"
  610. scale_param { bias_term: true }
  611. }
  612.  
  613. layer {
  614. bottom: "res_3_block1_conv_dw_batchnorm"
  615. top: "res_3_block1_conv_dw_relu"
  616. name: "res_3_block1_conv_dw_relu"
  617. type: "PReLU"
  618. }
  619.  
  620. layer {
  621. bottom: "res_3_block1_conv_dw_relu"
  622. top: "res_3_block1_conv_proj_conv2d"
  623. name: "res_3_block1_conv_proj_conv2d"
  624. type: "Convolution"
  625. convolution_param {
  626. num_output: 128
  627. kernel_size: 1
  628. pad: 0
  629. stride: 1
  630. bias_term: false
  631. }
  632. }
  633.  
  634. layer {
  635. bottom: "res_3_block1_conv_proj_conv2d"
  636. top: "res_3_block1_conv_proj_batchnorm"
  637. name: "res_3_block1_conv_proj_batchnorm"
  638. type: "BatchNorm"
  639. batch_norm_param {
  640. use_global_stats: true
  641. moving_average_fraction: 0.9
  642. eps: 0.001
  643. }
  644. }
  645. layer {
  646. bottom: "res_3_block1_conv_proj_batchnorm"
  647. top: "res_3_block1_conv_proj_batchnorm"
  648. name: "res_3_block1_conv_proj_batchnorm_scale"
  649. type: "Scale"
  650. scale_param { bias_term: true }
  651. }
  652.  
  653. layer {
  654. name: "_plus3"
  655. type: "Eltwise"
  656. bottom: "res_3_block1_conv_proj_batchnorm"
  657. bottom: "_plus2"
  658. top: "_plus3"
  659. eltwise_param { operation: SUM }
  660. }
  661.  
  662. layer {
  663. bottom: "_plus3"
  664. top: "res_3_block2_conv_sep_conv2d"
  665. name: "res_3_block2_conv_sep_conv2d"
  666. type: "Convolution"
  667. convolution_param {
  668. num_output: 128
  669. kernel_size: 1
  670. pad: 0
  671. stride: 1
  672. bias_term: false
  673. }
  674. }
  675.  
  676. layer {
  677. bottom: "res_3_block2_conv_sep_conv2d"
  678. top: "res_3_block2_conv_sep_batchnorm"
  679. name: "res_3_block2_conv_sep_batchnorm"
  680. type: "BatchNorm"
  681. batch_norm_param {
  682. use_global_stats: true
  683. moving_average_fraction: 0.9
  684. eps: 0.001
  685. }
  686. }
  687. layer {
  688. bottom: "res_3_block2_conv_sep_batchnorm"
  689. top: "res_3_block2_conv_sep_batchnorm"
  690. name: "res_3_block2_conv_sep_batchnorm_scale"
  691. type: "Scale"
  692. scale_param { bias_term: true }
  693. }
  694.  
  695. layer {
  696. bottom: "res_3_block2_conv_sep_batchnorm"
  697. top: "res_3_block2_conv_sep_relu"
  698. name: "res_3_block2_conv_sep_relu"
  699. type: "PReLU"
  700. }
  701.  
  702. layer {
  703. bottom: "res_3_block2_conv_sep_relu"
  704. top: "res_3_block2_conv_dw_conv2d"
  705. name: "res_3_block2_conv_dw_conv2d"
  706. type: "Convolution"
  707. convolution_param {
  708. num_output: 128
  709. kernel_size: 3
  710. pad: 1
  711. group: 128
  712. engine:CAFFE
  713. stride: 1
  714. bias_term: false
  715. }
  716. }
  717.  
  718. layer {
  719. bottom: "res_3_block2_conv_dw_conv2d"
  720. top: "res_3_block2_conv_dw_batchnorm"
  721. name: "res_3_block2_conv_dw_batchnorm"
  722. type: "BatchNorm"
  723. batch_norm_param {
  724. use_global_stats: true
  725. moving_average_fraction: 0.9
  726. eps: 0.001
  727. }
  728. }
  729. layer {
  730. bottom: "res_3_block2_conv_dw_batchnorm"
  731. top: "res_3_block2_conv_dw_batchnorm"
  732. name: "res_3_block2_conv_dw_batchnorm_scale"
  733. type: "Scale"
  734. scale_param { bias_term: true }
  735. }
  736.  
  737. layer {
  738. bottom: "res_3_block2_conv_dw_batchnorm"
  739. top: "res_3_block2_conv_dw_relu"
  740. name: "res_3_block2_conv_dw_relu"
  741. type: "PReLU"
  742. }
  743.  
  744. layer {
  745. bottom: "res_3_block2_conv_dw_relu"
  746. top: "res_3_block2_conv_proj_conv2d"
  747. name: "res_3_block2_conv_proj_conv2d"
  748. type: "Convolution"
  749. convolution_param {
  750. num_output: 128
  751. kernel_size: 1
  752. pad: 0
  753. stride: 1
  754. bias_term: false
  755. }
  756. }
  757.  
  758. layer {
  759. bottom: "res_3_block2_conv_proj_conv2d"
  760. top: "res_3_block2_conv_proj_batchnorm"
  761. name: "res_3_block2_conv_proj_batchnorm"
  762. type: "BatchNorm"
  763. batch_norm_param {
  764. use_global_stats: true
  765. moving_average_fraction: 0.9
  766. eps: 0.001
  767. }
  768. }
  769. layer {
  770. bottom: "res_3_block2_conv_proj_batchnorm"
  771. top: "res_3_block2_conv_proj_batchnorm"
  772. name: "res_3_block2_conv_proj_batchnorm_scale"
  773. type: "Scale"
  774. scale_param { bias_term: true }
  775. }
  776.  
  777. layer {
  778. name: "_plus4"
  779. type: "Eltwise"
  780. bottom: "res_3_block2_conv_proj_batchnorm"
  781. bottom: "_plus3"
  782. top: "_plus4"
  783. eltwise_param { operation: SUM }
  784. }
  785.  
  786. layer {
  787. bottom: "_plus4"
  788. top: "res_3_block3_conv_sep_conv2d"
  789. name: "res_3_block3_conv_sep_conv2d"
  790. type: "Convolution"
  791. convolution_param {
  792. num_output: 128
  793. kernel_size: 1
  794. pad: 0
  795. stride: 1
  796. bias_term: false
  797. }
  798. }
  799.  
  800. layer {
  801. bottom: "res_3_block3_conv_sep_conv2d"
  802. top: "res_3_block3_conv_sep_batchnorm"
  803. name: "res_3_block3_conv_sep_batchnorm"
  804. type: "BatchNorm"
  805. batch_norm_param {
  806. use_global_stats: true
  807. moving_average_fraction: 0.9
  808. eps: 0.001
  809. }
  810. }
  811. layer {
  812. bottom: "res_3_block3_conv_sep_batchnorm"
  813. top: "res_3_block3_conv_sep_batchnorm"
  814. name: "res_3_block3_conv_sep_batchnorm_scale"
  815. type: "Scale"
  816. scale_param { bias_term: true }
  817. }
  818.  
  819. layer {
  820. bottom: "res_3_block3_conv_sep_batchnorm"
  821. top: "res_3_block3_conv_sep_relu"
  822. name: "res_3_block3_conv_sep_relu"
  823. type: "PReLU"
  824. }
  825.  
  826. layer {
  827. bottom: "res_3_block3_conv_sep_relu"
  828. top: "res_3_block3_conv_dw_conv2d"
  829. name: "res_3_block3_conv_dw_conv2d"
  830. type: "Convolution"
  831. convolution_param {
  832. num_output: 128
  833. kernel_size: 3
  834. pad: 1
  835. group: 128
  836. engine:CAFFE
  837. stride: 1
  838. bias_term: false
  839. }
  840. }
  841.  
  842. layer {
  843. bottom: "res_3_block3_conv_dw_conv2d"
  844. top: "res_3_block3_conv_dw_batchnorm"
  845. name: "res_3_block3_conv_dw_batchnorm"
  846. type: "BatchNorm"
  847. batch_norm_param {
  848. use_global_stats: true
  849. moving_average_fraction: 0.9
  850. eps: 0.001
  851. }
  852. }
  853. layer {
  854. bottom: "res_3_block3_conv_dw_batchnorm"
  855. top: "res_3_block3_conv_dw_batchnorm"
  856. name: "res_3_block3_conv_dw_batchnorm_scale"
  857. type: "Scale"
  858. scale_param { bias_term: true }
  859. }
  860.  
  861. layer {
  862. bottom: "res_3_block3_conv_dw_batchnorm"
  863. top: "res_3_block3_conv_dw_relu"
  864. name: "res_3_block3_conv_dw_relu"
  865. type: "PReLU"
  866. }
  867.  
  868. layer {
  869. bottom: "res_3_block3_conv_dw_relu"
  870. top: "res_3_block3_conv_proj_conv2d"
  871. name: "res_3_block3_conv_proj_conv2d"
  872. type: "Convolution"
  873. convolution_param {
  874. num_output: 128
  875. kernel_size: 1
  876. pad: 0
  877. stride: 1
  878. bias_term: false
  879. }
  880. }
  881.  
  882. layer {
  883. bottom: "res_3_block3_conv_proj_conv2d"
  884. top: "res_3_block3_conv_proj_batchnorm"
  885. name: "res_3_block3_conv_proj_batchnorm"
  886. type: "BatchNorm"
  887. batch_norm_param {
  888. use_global_stats: true
  889. moving_average_fraction: 0.9
  890. eps: 0.001
  891. }
  892. }
  893. layer {
  894. bottom: "res_3_block3_conv_proj_batchnorm"
  895. top: "res_3_block3_conv_proj_batchnorm"
  896. name: "res_3_block3_conv_proj_batchnorm_scale"
  897. type: "Scale"
  898. scale_param { bias_term: true }
  899. }
  900.  
  901. layer {
  902. name: "_plus5"
  903. type: "Eltwise"
  904. bottom: "res_3_block3_conv_proj_batchnorm"
  905. bottom: "_plus4"
  906. top: "_plus5"
  907. eltwise_param { operation: SUM }
  908. }
  909.  
  910. layer {
  911. bottom: "_plus5"
  912. top: "res_3_block4_conv_sep_conv2d"
  913. name: "res_3_block4_conv_sep_conv2d"
  914. type: "Convolution"
  915. convolution_param {
  916. num_output: 128
  917. kernel_size: 1
  918. pad: 0
  919. stride: 1
  920. bias_term: false
  921. }
  922. }
  923.  
  924. layer {
  925. bottom: "res_3_block4_conv_sep_conv2d"
  926. top: "res_3_block4_conv_sep_batchnorm"
  927. name: "res_3_block4_conv_sep_batchnorm"
  928. type: "BatchNorm"
  929. batch_norm_param {
  930. use_global_stats: true
  931. moving_average_fraction: 0.9
  932. eps: 0.001
  933. }
  934. }
  935. layer {
  936. bottom: "res_3_block4_conv_sep_batchnorm"
  937. top: "res_3_block4_conv_sep_batchnorm"
  938. name: "res_3_block4_conv_sep_batchnorm_scale"
  939. type: "Scale"
  940. scale_param { bias_term: true }
  941. }
  942.  
  943. layer {
  944. bottom: "res_3_block4_conv_sep_batchnorm"
  945. top: "res_3_block4_conv_sep_relu"
  946. name: "res_3_block4_conv_sep_relu"
  947. type: "PReLU"
  948. }
  949.  
  950. layer {
  951. bottom: "res_3_block4_conv_sep_relu"
  952. top: "res_3_block4_conv_dw_conv2d"
  953. name: "res_3_block4_conv_dw_conv2d"
  954. type: "Convolution"
  955. convolution_param {
  956. num_output: 128
  957. kernel_size: 3
  958. pad: 1
  959. group: 128
  960. engine:CAFFE
  961. stride: 1
  962. bias_term: false
  963. }
  964. }
  965.  
  966. layer {
  967. bottom: "res_3_block4_conv_dw_conv2d"
  968. top: "res_3_block4_conv_dw_batchnorm"
  969. name: "res_3_block4_conv_dw_batchnorm"
  970. type: "BatchNorm"
  971. batch_norm_param {
  972. use_global_stats: true
  973. moving_average_fraction: 0.9
  974. eps: 0.001
  975. }
  976. }
  977. layer {
  978. bottom: "res_3_block4_conv_dw_batchnorm"
  979. top: "res_3_block4_conv_dw_batchnorm"
  980. name: "res_3_block4_conv_dw_batchnorm_scale"
  981. type: "Scale"
  982. scale_param { bias_term: true }
  983. }
  984.  
  985. layer {
  986. bottom: "res_3_block4_conv_dw_batchnorm"
  987. top: "res_3_block4_conv_dw_relu"
  988. name: "res_3_block4_conv_dw_relu"
  989. type: "PReLU"
  990. }
  991.  
  992. layer {
  993. bottom: "res_3_block4_conv_dw_relu"
  994. top: "res_3_block4_conv_proj_conv2d"
  995. name: "res_3_block4_conv_proj_conv2d"
  996. type: "Convolution"
  997. convolution_param {
  998. num_output: 128
  999. kernel_size: 1
  1000. pad: 0
  1001. stride: 1
  1002. bias_term: false
  1003. }
  1004. }
  1005.  
  1006. layer {
  1007. bottom: "res_3_block4_conv_proj_conv2d"
  1008. top: "res_3_block4_conv_proj_batchnorm"
  1009. name: "res_3_block4_conv_proj_batchnorm"
  1010. type: "BatchNorm"
  1011. batch_norm_param {
  1012. use_global_stats: true
  1013. moving_average_fraction: 0.9
  1014. eps: 0.001
  1015. }
  1016. }
  1017. layer {
  1018. bottom: "res_3_block4_conv_proj_batchnorm"
  1019. top: "res_3_block4_conv_proj_batchnorm"
  1020. name: "res_3_block4_conv_proj_batchnorm_scale"
  1021. type: "Scale"
  1022. scale_param { bias_term: true }
  1023. }
  1024.  
  1025. layer {
  1026. name: "_plus6"
  1027. type: "Eltwise"
  1028. bottom: "res_3_block4_conv_proj_batchnorm"
  1029. bottom: "_plus5"
  1030. top: "_plus6"
  1031. eltwise_param { operation: SUM }
  1032. }
  1033.  
  1034. layer {
  1035. bottom: "_plus6"
  1036. top: "res_3_block5_conv_sep_conv2d"
  1037. name: "res_3_block5_conv_sep_conv2d"
  1038. type: "Convolution"
  1039. convolution_param {
  1040. num_output: 128
  1041. kernel_size: 1
  1042. pad: 0
  1043. stride: 1
  1044. bias_term: false
  1045. }
  1046. }
  1047.  
  1048. layer {
  1049. bottom: "res_3_block5_conv_sep_conv2d"
  1050. top: "res_3_block5_conv_sep_batchnorm"
  1051. name: "res_3_block5_conv_sep_batchnorm"
  1052. type: "BatchNorm"
  1053. batch_norm_param {
  1054. use_global_stats: true
  1055. moving_average_fraction: 0.9
  1056. eps: 0.001
  1057. }
  1058. }
  1059. layer {
  1060. bottom: "res_3_block5_conv_sep_batchnorm"
  1061. top: "res_3_block5_conv_sep_batchnorm"
  1062. name: "res_3_block5_conv_sep_batchnorm_scale"
  1063. type: "Scale"
  1064. scale_param { bias_term: true }
  1065. }
  1066.  
  1067. layer {
  1068. bottom: "res_3_block5_conv_sep_batchnorm"
  1069. top: "res_3_block5_conv_sep_relu"
  1070. name: "res_3_block5_conv_sep_relu"
  1071. type: "PReLU"
  1072. }
  1073.  
  1074. layer {
  1075. bottom: "res_3_block5_conv_sep_relu"
  1076. top: "res_3_block5_conv_dw_conv2d"
  1077. name: "res_3_block5_conv_dw_conv2d"
  1078. type: "Convolution"
  1079. convolution_param {
  1080. num_output: 128
  1081. kernel_size: 3
  1082. pad: 1
  1083. group: 128
  1084. engine:CAFFE
  1085. stride: 1
  1086. bias_term: false
  1087. }
  1088. }
  1089.  
  1090. layer {
  1091. bottom: "res_3_block5_conv_dw_conv2d"
  1092. top: "res_3_block5_conv_dw_batchnorm"
  1093. name: "res_3_block5_conv_dw_batchnorm"
  1094. type: "BatchNorm"
  1095. batch_norm_param {
  1096. use_global_stats: true
  1097. moving_average_fraction: 0.9
  1098. eps: 0.001
  1099. }
  1100. }
  1101. layer {
  1102. bottom: "res_3_block5_conv_dw_batchnorm"
  1103. top: "res_3_block5_conv_dw_batchnorm"
  1104. name: "res_3_block5_conv_dw_batchnorm_scale"
  1105. type: "Scale"
  1106. scale_param { bias_term: true }
  1107. }
  1108.  
  1109. layer {
  1110. bottom: "res_3_block5_conv_dw_batchnorm"
  1111. top: "res_3_block5_conv_dw_relu"
  1112. name: "res_3_block5_conv_dw_relu"
  1113. type: "PReLU"
  1114. }
  1115.  
  1116. layer {
  1117. bottom: "res_3_block5_conv_dw_relu"
  1118. top: "res_3_block5_conv_proj_conv2d"
  1119. name: "res_3_block5_conv_proj_conv2d"
  1120. type: "Convolution"
  1121. convolution_param {
  1122. num_output: 128
  1123. kernel_size: 1
  1124. pad: 0
  1125. stride: 1
  1126. bias_term: false
  1127. }
  1128. }
  1129.  
  1130. layer {
  1131. bottom: "res_3_block5_conv_proj_conv2d"
  1132. top: "res_3_block5_conv_proj_batchnorm"
  1133. name: "res_3_block5_conv_proj_batchnorm"
  1134. type: "BatchNorm"
  1135. batch_norm_param {
  1136. use_global_stats: true
  1137. moving_average_fraction: 0.9
  1138. eps: 0.001
  1139. }
  1140. }
  1141. layer {
  1142. bottom: "res_3_block5_conv_proj_batchnorm"
  1143. top: "res_3_block5_conv_proj_batchnorm"
  1144. name: "res_3_block5_conv_proj_batchnorm_scale"
  1145. type: "Scale"
  1146. scale_param { bias_term: true }
  1147. }
  1148.  
  1149. layer {
  1150. name: "_plus7"
  1151. type: "Eltwise"
  1152. bottom: "res_3_block5_conv_proj_batchnorm"
  1153. bottom: "_plus6"
  1154. top: "_plus7"
  1155. eltwise_param { operation: SUM }
  1156. }
  1157.  
  1158. layer {
  1159. bottom: "_plus7"
  1160. top: "dconv_34_conv_sep_conv2d"
  1161. name: "dconv_34_conv_sep_conv2d"
  1162. type: "Convolution"
  1163. convolution_param {
  1164. num_output: 256
  1165. kernel_size: 1
  1166. pad: 0
  1167. stride: 1
  1168. bias_term: false
  1169. }
  1170. }
  1171.  
  1172. layer {
  1173. bottom: "dconv_34_conv_sep_conv2d"
  1174. top: "dconv_34_conv_sep_batchnorm"
  1175. name: "dconv_34_conv_sep_batchnorm"
  1176. type: "BatchNorm"
  1177. batch_norm_param {
  1178. use_global_stats: true
  1179. moving_average_fraction: 0.9
  1180. eps: 0.001
  1181. }
  1182. }
  1183. layer {
  1184. bottom: "dconv_34_conv_sep_batchnorm"
  1185. top: "dconv_34_conv_sep_batchnorm"
  1186. name: "dconv_34_conv_sep_batchnorm_scale"
  1187. type: "Scale"
  1188. scale_param { bias_term: true }
  1189. }
  1190.  
  1191. layer {
  1192. bottom: "dconv_34_conv_sep_batchnorm"
  1193. top: "dconv_34_conv_sep_relu"
  1194. name: "dconv_34_conv_sep_relu"
  1195. type: "PReLU"
  1196. }
  1197.  
  1198. layer {
  1199. bottom: "dconv_34_conv_sep_relu"
  1200. top: "dconv_34_conv_dw_conv2d"
  1201. name: "dconv_34_conv_dw_conv2d"
  1202. type: "Convolution"
  1203. convolution_param {
  1204. num_output: 256
  1205. kernel_size: 3
  1206. pad: 1
  1207. group: 256
  1208. engine:CAFFE
  1209. stride: 2
  1210. bias_term: false
  1211. }
  1212. }
  1213.  
  1214. layer {
  1215. bottom: "dconv_34_conv_dw_conv2d"
  1216. top: "dconv_34_conv_dw_batchnorm"
  1217. name: "dconv_34_conv_dw_batchnorm"
  1218. type: "BatchNorm"
  1219. batch_norm_param {
  1220. use_global_stats: true
  1221. moving_average_fraction: 0.9
  1222. eps: 0.001
  1223. }
  1224. }
  1225. layer {
  1226. bottom: "dconv_34_conv_dw_batchnorm"
  1227. top: "dconv_34_conv_dw_batchnorm"
  1228. name: "dconv_34_conv_dw_batchnorm_scale"
  1229. type: "Scale"
  1230. scale_param { bias_term: true }
  1231. }
  1232.  
  1233. layer {
  1234. bottom: "dconv_34_conv_dw_batchnorm"
  1235. top: "dconv_34_conv_dw_relu"
  1236. name: "dconv_34_conv_dw_relu"
  1237. type: "PReLU"
  1238. }
  1239.  
  1240. layer {
  1241. bottom: "dconv_34_conv_dw_relu"
  1242. top: "dconv_34_conv_proj_conv2d"
  1243. name: "dconv_34_conv_proj_conv2d"
  1244. type: "Convolution"
  1245. convolution_param {
  1246. num_output: 256
  1247. kernel_size: 1
  1248. pad: 0
  1249. stride: 1
  1250. bias_term: false
  1251. }
  1252. }
  1253.  
  1254. layer {
  1255. bottom: "dconv_34_conv_proj_conv2d"
  1256. top: "dconv_34_conv_proj_batchnorm"
  1257. name: "dconv_34_conv_proj_batchnorm"
  1258. type: "BatchNorm"
  1259. batch_norm_param {
  1260. use_global_stats: true
  1261. moving_average_fraction: 0.9
  1262. eps: 0.001
  1263. }
  1264. }
  1265. layer {
  1266. bottom: "dconv_34_conv_proj_batchnorm"
  1267. top: "dconv_34_conv_proj_batchnorm"
  1268. name: "dconv_34_conv_proj_batchnorm_scale"
  1269. type: "Scale"
  1270. scale_param { bias_term: true }
  1271. }
  1272.  
  1273. layer {
  1274. bottom: "dconv_34_conv_proj_batchnorm"
  1275. top: "res_4_block0_conv_sep_conv2d"
  1276. name: "res_4_block0_conv_sep_conv2d"
  1277. type: "Convolution"
  1278. convolution_param {
  1279. num_output: 256
  1280. kernel_size: 1
  1281. pad: 0
  1282. stride: 1
  1283. bias_term: false
  1284. }
  1285. }
  1286.  
  1287. layer {
  1288. bottom: "res_4_block0_conv_sep_conv2d"
  1289. top: "res_4_block0_conv_sep_batchnorm"
  1290. name: "res_4_block0_conv_sep_batchnorm"
  1291. type: "BatchNorm"
  1292. batch_norm_param {
  1293. use_global_stats: true
  1294. moving_average_fraction: 0.9
  1295. eps: 0.001
  1296. }
  1297. }
  1298. layer {
  1299. bottom: "res_4_block0_conv_sep_batchnorm"
  1300. top: "res_4_block0_conv_sep_batchnorm"
  1301. name: "res_4_block0_conv_sep_batchnorm_scale"
  1302. type: "Scale"
  1303. scale_param { bias_term: true }
  1304. }
  1305.  
  1306. layer {
  1307. bottom: "res_4_block0_conv_sep_batchnorm"
  1308. top: "res_4_block0_conv_sep_relu"
  1309. name: "res_4_block0_conv_sep_relu"
  1310. type: "PReLU"
  1311. }
  1312.  
  1313. layer {
  1314. bottom: "res_4_block0_conv_sep_relu"
  1315. top: "res_4_block0_conv_dw_conv2d"
  1316. name: "res_4_block0_conv_dw_conv2d"
  1317. type: "Convolution"
  1318. convolution_param {
  1319. num_output: 256
  1320. kernel_size: 3
  1321. pad: 1
  1322. group: 256
  1323. engine:CAFFE
  1324. stride: 1
  1325. bias_term: false
  1326. }
  1327. }
  1328.  
  1329. layer {
  1330. bottom: "res_4_block0_conv_dw_conv2d"
  1331. top: "res_4_block0_conv_dw_batchnorm"
  1332. name: "res_4_block0_conv_dw_batchnorm"
  1333. type: "BatchNorm"
  1334. batch_norm_param {
  1335. use_global_stats: true
  1336. moving_average_fraction: 0.9
  1337. eps: 0.001
  1338. }
  1339. }
  1340. layer {
  1341. bottom: "res_4_block0_conv_dw_batchnorm"
  1342. top: "res_4_block0_conv_dw_batchnorm"
  1343. name: "res_4_block0_conv_dw_batchnorm_scale"
  1344. type: "Scale"
  1345. scale_param { bias_term: true }
  1346. }
  1347.  
  1348. layer {
  1349. bottom: "res_4_block0_conv_dw_batchnorm"
  1350. top: "res_4_block0_conv_dw_relu"
  1351. name: "res_4_block0_conv_dw_relu"
  1352. type: "PReLU"
  1353. }
  1354.  
  1355. layer {
  1356. bottom: "res_4_block0_conv_dw_relu"
  1357. top: "res_4_block0_conv_proj_conv2d"
  1358. name: "res_4_block0_conv_proj_conv2d"
  1359. type: "Convolution"
  1360. convolution_param {
  1361. num_output: 256
  1362. kernel_size: 1
  1363. pad: 0
  1364. stride: 1
  1365. bias_term: false
  1366. }
  1367. }
  1368.  
  1369. layer {
  1370. bottom: "res_4_block0_conv_proj_conv2d"
  1371. top: "res_4_block0_conv_proj_batchnorm"
  1372. name: "res_4_block0_conv_proj_batchnorm"
  1373. type: "BatchNorm"
  1374. batch_norm_param {
  1375. use_global_stats: true
  1376. moving_average_fraction: 0.9
  1377. eps: 0.001
  1378. }
  1379. }
  1380. layer {
  1381. bottom: "res_4_block0_conv_proj_batchnorm"
  1382. top: "res_4_block0_conv_proj_batchnorm"
  1383. name: "res_4_block0_conv_proj_batchnorm_scale"
  1384. type: "Scale"
  1385. scale_param { bias_term: true }
  1386. }
  1387.  
  1388. layer {
  1389. name: "_plus8"
  1390. type: "Eltwise"
  1391. bottom: "res_4_block0_conv_proj_batchnorm"
  1392. bottom: "dconv_34_conv_proj_batchnorm"
  1393. top: "_plus8"
  1394. eltwise_param { operation: SUM }
  1395. }
  1396.  
  1397. layer {
  1398. bottom: "_plus8"
  1399. top: "res_4_block1_conv_sep_conv2d"
  1400. name: "res_4_block1_conv_sep_conv2d"
  1401. type: "Convolution"
  1402. convolution_param {
  1403. num_output: 256
  1404. kernel_size: 1
  1405. pad: 0
  1406. stride: 1
  1407. bias_term: false
  1408. }
  1409. }
  1410.  
  1411. layer {
  1412. bottom: "res_4_block1_conv_sep_conv2d"
  1413. top: "res_4_block1_conv_sep_batchnorm"
  1414. name: "res_4_block1_conv_sep_batchnorm"
  1415. type: "BatchNorm"
  1416. batch_norm_param {
  1417. use_global_stats: true
  1418. moving_average_fraction: 0.9
  1419. eps: 0.001
  1420. }
  1421. }
  1422. layer {
  1423. bottom: "res_4_block1_conv_sep_batchnorm"
  1424. top: "res_4_block1_conv_sep_batchnorm"
  1425. name: "res_4_block1_conv_sep_batchnorm_scale"
  1426. type: "Scale"
  1427. scale_param { bias_term: true }
  1428. }
  1429.  
  1430. layer {
  1431. bottom: "res_4_block1_conv_sep_batchnorm"
  1432. top: "res_4_block1_conv_sep_relu"
  1433. name: "res_4_block1_conv_sep_relu"
  1434. type: "PReLU"
  1435. }
  1436.  
  1437. layer {
  1438. bottom: "res_4_block1_conv_sep_relu"
  1439. top: "res_4_block1_conv_dw_conv2d"
  1440. name: "res_4_block1_conv_dw_conv2d"
  1441. type: "Convolution"
  1442. convolution_param {
  1443. num_output: 256
  1444. kernel_size: 3
  1445. pad: 1
  1446. group: 256
  1447. engine:CAFFE
  1448. stride: 1
  1449. bias_term: false
  1450. }
  1451. }
  1452.  
  1453. layer {
  1454. bottom: "res_4_block1_conv_dw_conv2d"
  1455. top: "res_4_block1_conv_dw_batchnorm"
  1456. name: "res_4_block1_conv_dw_batchnorm"
  1457. type: "BatchNorm"
  1458. batch_norm_param {
  1459. use_global_stats: true
  1460. moving_average_fraction: 0.9
  1461. eps: 0.001
  1462. }
  1463. }
  1464. layer {
  1465. bottom: "res_4_block1_conv_dw_batchnorm"
  1466. top: "res_4_block1_conv_dw_batchnorm"
  1467. name: "res_4_block1_conv_dw_batchnorm_scale"
  1468. type: "Scale"
  1469. scale_param { bias_term: true }
  1470. }
  1471.  
  1472. layer {
  1473. bottom: "res_4_block1_conv_dw_batchnorm"
  1474. top: "res_4_block1_conv_dw_relu"
  1475. name: "res_4_block1_conv_dw_relu"
  1476. type: "PReLU"
  1477. }
  1478.  
  1479. layer {
  1480. bottom: "res_4_block1_conv_dw_relu"
  1481. top: "res_4_block1_conv_proj_conv2d"
  1482. name: "res_4_block1_conv_proj_conv2d"
  1483. type: "Convolution"
  1484. convolution_param {
  1485. num_output: 256
  1486. kernel_size: 1
  1487. pad: 0
  1488. stride: 1
  1489. bias_term: false
  1490. }
  1491. }
  1492.  
  1493. layer {
  1494. bottom: "res_4_block1_conv_proj_conv2d"
  1495. top: "res_4_block1_conv_proj_batchnorm"
  1496. name: "res_4_block1_conv_proj_batchnorm"
  1497. type: "BatchNorm"
  1498. batch_norm_param {
  1499. use_global_stats: true
  1500. moving_average_fraction: 0.9
  1501. eps: 0.001
  1502. }
  1503. }
  1504. layer {
  1505. bottom: "res_4_block1_conv_proj_batchnorm"
  1506. top: "res_4_block1_conv_proj_batchnorm"
  1507. name: "res_4_block1_conv_proj_batchnorm_scale"
  1508. type: "Scale"
  1509. scale_param { bias_term: true }
  1510. }
  1511.  
  1512. layer {
  1513. name: "_plus9"
  1514. type: "Eltwise"
  1515. bottom: "res_4_block1_conv_proj_batchnorm"
  1516. bottom: "_plus8"
  1517. top: "_plus9"
  1518. eltwise_param { operation: SUM }
  1519. }
  1520.  
  1521. layer {
  1522. bottom: "_plus9"
  1523. top: "res_4_block2_conv_sep_conv2d"
  1524. name: "res_4_block2_conv_sep_conv2d"
  1525. type: "Convolution"
  1526. convolution_param {
  1527. num_output: 256
  1528. kernel_size: 1
  1529. pad: 0
  1530. stride: 1
  1531. bias_term: false
  1532. }
  1533. }
  1534.  
  1535. layer {
  1536. bottom: "res_4_block2_conv_sep_conv2d"
  1537. top: "res_4_block2_conv_sep_batchnorm"
  1538. name: "res_4_block2_conv_sep_batchnorm"
  1539. type: "BatchNorm"
  1540. batch_norm_param {
  1541. use_global_stats: true
  1542. moving_average_fraction: 0.9
  1543. eps: 0.001
  1544. }
  1545. }
  1546. layer {
  1547. bottom: "res_4_block2_conv_sep_batchnorm"
  1548. top: "res_4_block2_conv_sep_batchnorm"
  1549. name: "res_4_block2_conv_sep_batchnorm_scale"
  1550. type: "Scale"
  1551. scale_param { bias_term: true }
  1552. }
  1553.  
  1554. layer {
  1555. bottom: "res_4_block2_conv_sep_batchnorm"
  1556. top: "res_4_block2_conv_sep_relu"
  1557. name: "res_4_block2_conv_sep_relu"
  1558. type: "PReLU"
  1559. }
  1560.  
  1561. layer {
  1562. bottom: "res_4_block2_conv_sep_relu"
  1563. top: "res_4_block2_conv_dw_conv2d"
  1564. name: "res_4_block2_conv_dw_conv2d"
  1565. type: "Convolution"
  1566. convolution_param {
  1567. num_output: 256
  1568. kernel_size: 3
  1569. pad: 1
  1570. group: 256
  1571. engine:CAFFE
  1572. stride: 1
  1573. bias_term: false
  1574. }
  1575. }
  1576.  
  1577. layer {
  1578. bottom: "res_4_block2_conv_dw_conv2d"
  1579. top: "res_4_block2_conv_dw_batchnorm"
  1580. name: "res_4_block2_conv_dw_batchnorm"
  1581. type: "BatchNorm"
  1582. batch_norm_param {
  1583. use_global_stats: true
  1584. moving_average_fraction: 0.9
  1585. eps: 0.001
  1586. }
  1587. }
  1588. layer {
  1589. bottom: "res_4_block2_conv_dw_batchnorm"
  1590. top: "res_4_block2_conv_dw_batchnorm"
  1591. name: "res_4_block2_conv_dw_batchnorm_scale"
  1592. type: "Scale"
  1593. scale_param { bias_term: true }
  1594. }
  1595.  
  1596. layer {
  1597. bottom: "res_4_block2_conv_dw_batchnorm"
  1598. top: "res_4_block2_conv_dw_relu"
  1599. name: "res_4_block2_conv_dw_relu"
  1600. type: "PReLU"
  1601. }
  1602.  
  1603. layer {
  1604. bottom: "res_4_block2_conv_dw_relu"
  1605. top: "res_4_block2_conv_proj_conv2d"
  1606. name: "res_4_block2_conv_proj_conv2d"
  1607. type: "Convolution"
  1608. convolution_param {
  1609. num_output: 256
  1610. kernel_size: 1
  1611. pad: 0
  1612. stride: 1
  1613. bias_term: false
  1614. }
  1615. }
  1616.  
  1617. layer {
  1618. bottom: "res_4_block2_conv_proj_conv2d"
  1619. top: "res_4_block2_conv_proj_batchnorm"
  1620. name: "res_4_block2_conv_proj_batchnorm"
  1621. type: "BatchNorm"
  1622. batch_norm_param {
  1623. use_global_stats: true
  1624. moving_average_fraction: 0.9
  1625. eps: 0.001
  1626. }
  1627. }
  1628. layer {
  1629. bottom: "res_4_block2_conv_proj_batchnorm"
  1630. top: "res_4_block2_conv_proj_batchnorm"
  1631. name: "res_4_block2_conv_proj_batchnorm_scale"
  1632. type: "Scale"
  1633. scale_param { bias_term: true }
  1634. }
  1635.  
  1636. layer {
  1637. name: "_plus10"
  1638. type: "Eltwise"
  1639. bottom: "res_4_block2_conv_proj_batchnorm"
  1640. bottom: "_plus9"
  1641. top: "_plus10"
  1642. eltwise_param { operation: SUM }
  1643. }
  1644.  
  1645. layer {
  1646. bottom: "_plus10"
  1647. top: "res_4_block3_conv_sep_conv2d"
  1648. name: "res_4_block3_conv_sep_conv2d"
  1649. type: "Convolution"
  1650. convolution_param {
  1651. num_output: 256
  1652. kernel_size: 1
  1653. pad: 0
  1654. stride: 1
  1655. bias_term: false
  1656. }
  1657. }
  1658.  
  1659. layer {
  1660. bottom: "res_4_block3_conv_sep_conv2d"
  1661. top: "res_4_block3_conv_sep_batchnorm"
  1662. name: "res_4_block3_conv_sep_batchnorm"
  1663. type: "BatchNorm"
  1664. batch_norm_param {
  1665. use_global_stats: true
  1666. moving_average_fraction: 0.9
  1667. eps: 0.001
  1668. }
  1669. }
  1670. layer {
  1671. bottom: "res_4_block3_conv_sep_batchnorm"
  1672. top: "res_4_block3_conv_sep_batchnorm"
  1673. name: "res_4_block3_conv_sep_batchnorm_scale"
  1674. type: "Scale"
  1675. scale_param { bias_term: true }
  1676. }
  1677.  
  1678. layer {
  1679. bottom: "res_4_block3_conv_sep_batchnorm"
  1680. top: "res_4_block3_conv_sep_relu"
  1681. name: "res_4_block3_conv_sep_relu"
  1682. type: "PReLU"
  1683. }
  1684.  
  1685. layer {
  1686. bottom: "res_4_block3_conv_sep_relu"
  1687. top: "res_4_block3_conv_dw_conv2d"
  1688. name: "res_4_block3_conv_dw_conv2d"
  1689. type: "Convolution"
  1690. convolution_param {
  1691. num_output: 256
  1692. kernel_size: 3
  1693. pad: 1
  1694. group: 256
  1695. engine:CAFFE
  1696. stride: 1
  1697. bias_term: false
  1698. }
  1699. }
  1700.  
  1701. layer {
  1702. bottom: "res_4_block3_conv_dw_conv2d"
  1703. top: "res_4_block3_conv_dw_batchnorm"
  1704. name: "res_4_block3_conv_dw_batchnorm"
  1705. type: "BatchNorm"
  1706. batch_norm_param {
  1707. use_global_stats: true
  1708. moving_average_fraction: 0.9
  1709. eps: 0.001
  1710. }
  1711. }
  1712. layer {
  1713. bottom: "res_4_block3_conv_dw_batchnorm"
  1714. top: "res_4_block3_conv_dw_batchnorm"
  1715. name: "res_4_block3_conv_dw_batchnorm_scale"
  1716. type: "Scale"
  1717. scale_param { bias_term: true }
  1718. }
  1719.  
  1720. layer {
  1721. bottom: "res_4_block3_conv_dw_batchnorm"
  1722. top: "res_4_block3_conv_dw_relu"
  1723. name: "res_4_block3_conv_dw_relu"
  1724. type: "PReLU"
  1725. }
  1726.  
  1727. layer {
  1728. bottom: "res_4_block3_conv_dw_relu"
  1729. top: "res_4_block3_conv_proj_conv2d"
  1730. name: "res_4_block3_conv_proj_conv2d"
  1731. type: "Convolution"
  1732. convolution_param {
  1733. num_output: 256
  1734. kernel_size: 1
  1735. pad: 0
  1736. stride: 1
  1737. bias_term: false
  1738. }
  1739. }
  1740.  
  1741. layer {
  1742. bottom: "res_4_block3_conv_proj_conv2d"
  1743. top: "res_4_block3_conv_proj_batchnorm"
  1744. name: "res_4_block3_conv_proj_batchnorm"
  1745. type: "BatchNorm"
  1746. batch_norm_param {
  1747. use_global_stats: true
  1748. moving_average_fraction: 0.9
  1749. eps: 0.001
  1750. }
  1751. }
  1752. layer {
  1753. bottom: "res_4_block3_conv_proj_batchnorm"
  1754. top: "res_4_block3_conv_proj_batchnorm"
  1755. name: "res_4_block3_conv_proj_batchnorm_scale"
  1756. type: "Scale"
  1757. scale_param { bias_term: true }
  1758. }
  1759.  
  1760. layer {
  1761. name: "_plus11"
  1762. type: "Eltwise"
  1763. bottom: "res_4_block3_conv_proj_batchnorm"
  1764. bottom: "_plus10"
  1765. top: "_plus11"
  1766. eltwise_param { operation: SUM }
  1767. }
  1768.  
  1769. layer {
  1770. bottom: "_plus11"
  1771. top: "res_4_block4_conv_sep_conv2d"
  1772. name: "res_4_block4_conv_sep_conv2d"
  1773. type: "Convolution"
  1774. convolution_param {
  1775. num_output: 256
  1776. kernel_size: 1
  1777. pad: 0
  1778. stride: 1
  1779. bias_term: false
  1780. }
  1781. }
  1782.  
  1783. layer {
  1784. bottom: "res_4_block4_conv_sep_conv2d"
  1785. top: "res_4_block4_conv_sep_batchnorm"
  1786. name: "res_4_block4_conv_sep_batchnorm"
  1787. type: "BatchNorm"
  1788. batch_norm_param {
  1789. use_global_stats: true
  1790. moving_average_fraction: 0.9
  1791. eps: 0.001
  1792. }
  1793. }
  1794. layer {
  1795. bottom: "res_4_block4_conv_sep_batchnorm"
  1796. top: "res_4_block4_conv_sep_batchnorm"
  1797. name: "res_4_block4_conv_sep_batchnorm_scale"
  1798. type: "Scale"
  1799. scale_param { bias_term: true }
  1800. }
  1801.  
  1802. layer {
  1803. bottom: "res_4_block4_conv_sep_batchnorm"
  1804. top: "res_4_block4_conv_sep_relu"
  1805. name: "res_4_block4_conv_sep_relu"
  1806. type: "PReLU"
  1807. }
  1808.  
  1809. layer {
  1810. bottom: "res_4_block4_conv_sep_relu"
  1811. top: "res_4_block4_conv_dw_conv2d"
  1812. name: "res_4_block4_conv_dw_conv2d"
  1813. type: "Convolution"
  1814. convolution_param {
  1815. num_output: 256
  1816. kernel_size: 3
  1817. pad: 1
  1818. group: 256
  1819. engine:CAFFE
  1820. stride: 1
  1821. bias_term: false
  1822. }
  1823. }
  1824.  
  1825. layer {
  1826. bottom: "res_4_block4_conv_dw_conv2d"
  1827. top: "res_4_block4_conv_dw_batchnorm"
  1828. name: "res_4_block4_conv_dw_batchnorm"
  1829. type: "BatchNorm"
  1830. batch_norm_param {
  1831. use_global_stats: true
  1832. moving_average_fraction: 0.9
  1833. eps: 0.001
  1834. }
  1835. }
  1836. layer {
  1837. bottom: "res_4_block4_conv_dw_batchnorm"
  1838. top: "res_4_block4_conv_dw_batchnorm"
  1839. name: "res_4_block4_conv_dw_batchnorm_scale"
  1840. type: "Scale"
  1841. scale_param { bias_term: true }
  1842. }
  1843.  
  1844. layer {
  1845. bottom: "res_4_block4_conv_dw_batchnorm"
  1846. top: "res_4_block4_conv_dw_relu"
  1847. name: "res_4_block4_conv_dw_relu"
  1848. type: "PReLU"
  1849. }
  1850.  
  1851. layer {
  1852. bottom: "res_4_block4_conv_dw_relu"
  1853. top: "res_4_block4_conv_proj_conv2d"
  1854. name: "res_4_block4_conv_proj_conv2d"
  1855. type: "Convolution"
  1856. convolution_param {
  1857. num_output: 256
  1858. kernel_size: 1
  1859. pad: 0
  1860. stride: 1
  1861. bias_term: false
  1862. }
  1863. }
  1864.  
  1865. layer {
  1866. bottom: "res_4_block4_conv_proj_conv2d"
  1867. top: "res_4_block4_conv_proj_batchnorm"
  1868. name: "res_4_block4_conv_proj_batchnorm"
  1869. type: "BatchNorm"
  1870. batch_norm_param {
  1871. use_global_stats: true
  1872. moving_average_fraction: 0.9
  1873. eps: 0.001
  1874. }
  1875. }
  1876. layer {
  1877. bottom: "res_4_block4_conv_proj_batchnorm"
  1878. top: "res_4_block4_conv_proj_batchnorm"
  1879. name: "res_4_block4_conv_proj_batchnorm_scale"
  1880. type: "Scale"
  1881. scale_param { bias_term: true }
  1882. }
  1883.  
  1884. layer {
  1885. name: "_plus12"
  1886. type: "Eltwise"
  1887. bottom: "res_4_block4_conv_proj_batchnorm"
  1888. bottom: "_plus11"
  1889. top: "_plus12"
  1890. eltwise_param { operation: SUM }
  1891. }
  1892.  
  1893. layer {
  1894. bottom: "_plus12"
  1895. top: "res_4_block5_conv_sep_conv2d"
  1896. name: "res_4_block5_conv_sep_conv2d"
  1897. type: "Convolution"
  1898. convolution_param {
  1899. num_output: 256
  1900. kernel_size: 1
  1901. pad: 0
  1902. stride: 1
  1903. bias_term: false
  1904. }
  1905. }
  1906.  
  1907. layer {
  1908. bottom: "res_4_block5_conv_sep_conv2d"
  1909. top: "res_4_block5_conv_sep_batchnorm"
  1910. name: "res_4_block5_conv_sep_batchnorm"
  1911. type: "BatchNorm"
  1912. batch_norm_param {
  1913. use_global_stats: true
  1914. moving_average_fraction: 0.9
  1915. eps: 0.001
  1916. }
  1917. }
  1918. layer {
  1919. bottom: "res_4_block5_conv_sep_batchnorm"
  1920. top: "res_4_block5_conv_sep_batchnorm"
  1921. name: "res_4_block5_conv_sep_batchnorm_scale"
  1922. type: "Scale"
  1923. scale_param { bias_term: true }
  1924. }
  1925.  
  1926. layer {
  1927. bottom: "res_4_block5_conv_sep_batchnorm"
  1928. top: "res_4_block5_conv_sep_relu"
  1929. name: "res_4_block5_conv_sep_relu"
  1930. type: "PReLU"
  1931. }
  1932.  
  1933. layer {
  1934. bottom: "res_4_block5_conv_sep_relu"
  1935. top: "res_4_block5_conv_dw_conv2d"
  1936. name: "res_4_block5_conv_dw_conv2d"
  1937. type: "Convolution"
  1938. convolution_param {
  1939. num_output: 256
  1940. kernel_size: 3
  1941. pad: 1
  1942. group: 256
  1943. engine:CAFFE
  1944. stride: 1
  1945. bias_term: false
  1946. }
  1947. }
  1948.  
  1949. layer {
  1950. bottom: "res_4_block5_conv_dw_conv2d"
  1951. top: "res_4_block5_conv_dw_batchnorm"
  1952. name: "res_4_block5_conv_dw_batchnorm"
  1953. type: "BatchNorm"
  1954. batch_norm_param {
  1955. use_global_stats: true
  1956. moving_average_fraction: 0.9
  1957. eps: 0.001
  1958. }
  1959. }
  1960. layer {
  1961. bottom: "res_4_block5_conv_dw_batchnorm"
  1962. top: "res_4_block5_conv_dw_batchnorm"
  1963. name: "res_4_block5_conv_dw_batchnorm_scale"
  1964. type: "Scale"
  1965. scale_param { bias_term: true }
  1966. }
  1967.  
  1968. layer {
  1969. bottom: "res_4_block5_conv_dw_batchnorm"
  1970. top: "res_4_block5_conv_dw_relu"
  1971. name: "res_4_block5_conv_dw_relu"
  1972. type: "PReLU"
  1973. }
  1974.  
  1975. layer {
  1976. bottom: "res_4_block5_conv_dw_relu"
  1977. top: "res_4_block5_conv_proj_conv2d"
  1978. name: "res_4_block5_conv_proj_conv2d"
  1979. type: "Convolution"
  1980. convolution_param {
  1981. num_output: 256
  1982. kernel_size: 1
  1983. pad: 0
  1984. stride: 1
  1985. bias_term: false
  1986. }
  1987. }
  1988.  
  1989. layer {
  1990. bottom: "res_4_block5_conv_proj_conv2d"
  1991. top: "res_4_block5_conv_proj_batchnorm"
  1992. name: "res_4_block5_conv_proj_batchnorm"
  1993. type: "BatchNorm"
  1994. batch_norm_param {
  1995. use_global_stats: true
  1996. moving_average_fraction: 0.9
  1997. eps: 0.001
  1998. }
  1999. }
  2000. layer {
  2001. bottom: "res_4_block5_conv_proj_batchnorm"
  2002. top: "res_4_block5_conv_proj_batchnorm"
  2003. name: "res_4_block5_conv_proj_batchnorm_scale"
  2004. type: "Scale"
  2005. scale_param { bias_term: true }
  2006. }
  2007.  
  2008. layer {
  2009. name: "_plus13"
  2010. type: "Eltwise"
  2011. bottom: "res_4_block5_conv_proj_batchnorm"
  2012. bottom: "_plus12"
  2013. top: "_plus13"
  2014. eltwise_param { operation: SUM }
  2015. }
  2016.  
  2017. layer {
  2018. bottom: "_plus13"
  2019. top: "res_4_block6_conv_sep_conv2d"
  2020. name: "res_4_block6_conv_sep_conv2d"
  2021. type: "Convolution"
  2022. convolution_param {
  2023. num_output: 256
  2024. kernel_size: 1
  2025. pad: 0
  2026. stride: 1
  2027. bias_term: false
  2028. }
  2029. }
  2030.  
  2031. layer {
  2032. bottom: "res_4_block6_conv_sep_conv2d"
  2033. top: "res_4_block6_conv_sep_batchnorm"
  2034. name: "res_4_block6_conv_sep_batchnorm"
  2035. type: "BatchNorm"
  2036. batch_norm_param {
  2037. use_global_stats: true
  2038. moving_average_fraction: 0.9
  2039. eps: 0.001
  2040. }
  2041. }
  2042. layer {
  2043. bottom: "res_4_block6_conv_sep_batchnorm"
  2044. top: "res_4_block6_conv_sep_batchnorm"
  2045. name: "res_4_block6_conv_sep_batchnorm_scale"
  2046. type: "Scale"
  2047. scale_param { bias_term: true }
  2048. }
  2049.  
  2050. layer {
  2051. bottom: "res_4_block6_conv_sep_batchnorm"
  2052. top: "res_4_block6_conv_sep_relu"
  2053. name: "res_4_block6_conv_sep_relu"
  2054. type: "PReLU"
  2055. }
  2056.  
  2057. layer {
  2058. bottom: "res_4_block6_conv_sep_relu"
  2059. top: "res_4_block6_conv_dw_conv2d"
  2060. name: "res_4_block6_conv_dw_conv2d"
  2061. type: "Convolution"
  2062. convolution_param {
  2063. num_output: 256
  2064. kernel_size: 3
  2065. pad: 1
  2066. group: 256
  2067. engine:CAFFE
  2068. stride: 1
  2069. bias_term: false
  2070. }
  2071. }
  2072.  
  2073. layer {
  2074. bottom: "res_4_block6_conv_dw_conv2d"
  2075. top: "res_4_block6_conv_dw_batchnorm"
  2076. name: "res_4_block6_conv_dw_batchnorm"
  2077. type: "BatchNorm"
  2078. batch_norm_param {
  2079. use_global_stats: true
  2080. moving_average_fraction: 0.9
  2081. eps: 0.001
  2082. }
  2083. }
  2084. layer {
  2085. bottom: "res_4_block6_conv_dw_batchnorm"
  2086. top: "res_4_block6_conv_dw_batchnorm"
  2087. name: "res_4_block6_conv_dw_batchnorm_scale"
  2088. type: "Scale"
  2089. scale_param { bias_term: true }
  2090. }
  2091.  
  2092. layer {
  2093. bottom: "res_4_block6_conv_dw_batchnorm"
  2094. top: "res_4_block6_conv_dw_relu"
  2095. name: "res_4_block6_conv_dw_relu"
  2096. type: "PReLU"
  2097. }
  2098.  
  2099. layer {
  2100. bottom: "res_4_block6_conv_dw_relu"
  2101. top: "res_4_block6_conv_proj_conv2d"
  2102. name: "res_4_block6_conv_proj_conv2d"
  2103. type: "Convolution"
  2104. convolution_param {
  2105. num_output: 256
  2106. kernel_size: 1
  2107. pad: 0
  2108. stride: 1
  2109. bias_term: false
  2110. }
  2111. }
  2112.  
  2113. layer {
  2114. bottom: "res_4_block6_conv_proj_conv2d"
  2115. top: "res_4_block6_conv_proj_batchnorm"
  2116. name: "res_4_block6_conv_proj_batchnorm"
  2117. type: "BatchNorm"
  2118. batch_norm_param {
  2119. use_global_stats: true
  2120. moving_average_fraction: 0.9
  2121. eps: 0.001
  2122. }
  2123. }
  2124. layer {
  2125. bottom: "res_4_block6_conv_proj_batchnorm"
  2126. top: "res_4_block6_conv_proj_batchnorm"
  2127. name: "res_4_block6_conv_proj_batchnorm_scale"
  2128. type: "Scale"
  2129. scale_param { bias_term: true }
  2130. }
  2131.  
  2132. layer {
  2133. name: "_plus14"
  2134. type: "Eltwise"
  2135. bottom: "res_4_block6_conv_proj_batchnorm"
  2136. bottom: "_plus13"
  2137. top: "_plus14"
  2138. eltwise_param { operation: SUM }
  2139. }
  2140.  
  2141. layer {
  2142. bottom: "_plus14"
  2143. top: "res_4_block7_conv_sep_conv2d"
  2144. name: "res_4_block7_conv_sep_conv2d"
  2145. type: "Convolution"
  2146. convolution_param {
  2147. num_output: 256
  2148. kernel_size: 1
  2149. pad: 0
  2150. stride: 1
  2151. bias_term: false
  2152. }
  2153. }
  2154.  
  2155. layer {
  2156. bottom: "res_4_block7_conv_sep_conv2d"
  2157. top: "res_4_block7_conv_sep_batchnorm"
  2158. name: "res_4_block7_conv_sep_batchnorm"
  2159. type: "BatchNorm"
  2160. batch_norm_param {
  2161. use_global_stats: true
  2162. moving_average_fraction: 0.9
  2163. eps: 0.001
  2164. }
  2165. }
  2166. layer {
  2167. bottom: "res_4_block7_conv_sep_batchnorm"
  2168. top: "res_4_block7_conv_sep_batchnorm"
  2169. name: "res_4_block7_conv_sep_batchnorm_scale"
  2170. type: "Scale"
  2171. scale_param { bias_term: true }
  2172. }
  2173.  
  2174. layer {
  2175. bottom: "res_4_block7_conv_sep_batchnorm"
  2176. top: "res_4_block7_conv_sep_relu"
  2177. name: "res_4_block7_conv_sep_relu"
  2178. type: "PReLU"
  2179. }
  2180.  
  2181. layer {
  2182. bottom: "res_4_block7_conv_sep_relu"
  2183. top: "res_4_block7_conv_dw_conv2d"
  2184. name: "res_4_block7_conv_dw_conv2d"
  2185. type: "Convolution"
  2186. convolution_param {
  2187. num_output: 256
  2188. kernel_size: 3
  2189. pad: 1
  2190. group: 256
  2191. engine:CAFFE
  2192. stride: 1
  2193. bias_term: false
  2194. }
  2195. }
  2196.  
  2197. layer {
  2198. bottom: "res_4_block7_conv_dw_conv2d"
  2199. top: "res_4_block7_conv_dw_batchnorm"
  2200. name: "res_4_block7_conv_dw_batchnorm"
  2201. type: "BatchNorm"
  2202. batch_norm_param {
  2203. use_global_stats: true
  2204. moving_average_fraction: 0.9
  2205. eps: 0.001
  2206. }
  2207. }
  2208. layer {
  2209. bottom: "res_4_block7_conv_dw_batchnorm"
  2210. top: "res_4_block7_conv_dw_batchnorm"
  2211. name: "res_4_block7_conv_dw_batchnorm_scale"
  2212. type: "Scale"
  2213. scale_param { bias_term: true }
  2214. }
  2215.  
  2216. layer {
  2217. bottom: "res_4_block7_conv_dw_batchnorm"
  2218. top: "res_4_block7_conv_dw_relu"
  2219. name: "res_4_block7_conv_dw_relu"
  2220. type: "PReLU"
  2221. }
  2222.  
  2223. layer {
  2224. bottom: "res_4_block7_conv_dw_relu"
  2225. top: "res_4_block7_conv_proj_conv2d"
  2226. name: "res_4_block7_conv_proj_conv2d"
  2227. type: "Convolution"
  2228. convolution_param {
  2229. num_output: 256
  2230. kernel_size: 1
  2231. pad: 0
  2232. stride: 1
  2233. bias_term: false
  2234. }
  2235. }
  2236.  
  2237. layer {
  2238. bottom: "res_4_block7_conv_proj_conv2d"
  2239. top: "res_4_block7_conv_proj_batchnorm"
  2240. name: "res_4_block7_conv_proj_batchnorm"
  2241. type: "BatchNorm"
  2242. batch_norm_param {
  2243. use_global_stats: true
  2244. moving_average_fraction: 0.9
  2245. eps: 0.001
  2246. }
  2247. }
  2248. layer {
  2249. bottom: "res_4_block7_conv_proj_batchnorm"
  2250. top: "res_4_block7_conv_proj_batchnorm"
  2251. name: "res_4_block7_conv_proj_batchnorm_scale"
  2252. type: "Scale"
  2253. scale_param { bias_term: true }
  2254. }
  2255.  
  2256. layer {
  2257. name: "_plus15"
  2258. type: "Eltwise"
  2259. bottom: "res_4_block7_conv_proj_batchnorm"
  2260. bottom: "_plus14"
  2261. top: "_plus15"
  2262. eltwise_param { operation: SUM }
  2263. }
  2264.  
  2265. layer {
  2266. bottom: "_plus15"
  2267. top: "res_4_block8_conv_sep_conv2d"
  2268. name: "res_4_block8_conv_sep_conv2d"
  2269. type: "Convolution"
  2270. convolution_param {
  2271. num_output: 256
  2272. kernel_size: 1
  2273. pad: 0
  2274. stride: 1
  2275. bias_term: false
  2276. }
  2277. }
  2278.  
  2279. layer {
  2280. bottom: "res_4_block8_conv_sep_conv2d"
  2281. top: "res_4_block8_conv_sep_batchnorm"
  2282. name: "res_4_block8_conv_sep_batchnorm"
  2283. type: "BatchNorm"
  2284. batch_norm_param {
  2285. use_global_stats: true
  2286. moving_average_fraction: 0.9
  2287. eps: 0.001
  2288. }
  2289. }
  2290. layer {
  2291. bottom: "res_4_block8_conv_sep_batchnorm"
  2292. top: "res_4_block8_conv_sep_batchnorm"
  2293. name: "res_4_block8_conv_sep_batchnorm_scale"
  2294. type: "Scale"
  2295. scale_param { bias_term: true }
  2296. }
  2297.  
  2298. layer {
  2299. bottom: "res_4_block8_conv_sep_batchnorm"
  2300. top: "res_4_block8_conv_sep_relu"
  2301. name: "res_4_block8_conv_sep_relu"
  2302. type: "PReLU"
  2303. }
  2304.  
  2305. layer {
  2306. bottom: "res_4_block8_conv_sep_relu"
  2307. top: "res_4_block8_conv_dw_conv2d"
  2308. name: "res_4_block8_conv_dw_conv2d"
  2309. type: "Convolution"
  2310. convolution_param {
  2311. num_output: 256
  2312. kernel_size: 3
  2313. pad: 1
  2314. group: 256
  2315. engine:CAFFE
  2316. stride: 1
  2317. bias_term: false
  2318. }
  2319. }
  2320.  
  2321. layer {
  2322. bottom: "res_4_block8_conv_dw_conv2d"
  2323. top: "res_4_block8_conv_dw_batchnorm"
  2324. name: "res_4_block8_conv_dw_batchnorm"
  2325. type: "BatchNorm"
  2326. batch_norm_param {
  2327. use_global_stats: true
  2328. moving_average_fraction: 0.9
  2329. eps: 0.001
  2330. }
  2331. }
  2332. layer {
  2333. bottom: "res_4_block8_conv_dw_batchnorm"
  2334. top: "res_4_block8_conv_dw_batchnorm"
  2335. name: "res_4_block8_conv_dw_batchnorm_scale"
  2336. type: "Scale"
  2337. scale_param { bias_term: true }
  2338. }
  2339.  
  2340. layer {
  2341. bottom: "res_4_block8_conv_dw_batchnorm"
  2342. top: "res_4_block8_conv_dw_relu"
  2343. name: "res_4_block8_conv_dw_relu"
  2344. type: "PReLU"
  2345. }
  2346.  
  2347. layer {
  2348. bottom: "res_4_block8_conv_dw_relu"
  2349. top: "res_4_block8_conv_proj_conv2d"
  2350. name: "res_4_block8_conv_proj_conv2d"
  2351. type: "Convolution"
  2352. convolution_param {
  2353. num_output: 256
  2354. kernel_size: 1
  2355. pad: 0
  2356. stride: 1
  2357. bias_term: false
  2358. }
  2359. }
  2360.  
  2361. layer {
  2362. bottom: "res_4_block8_conv_proj_conv2d"
  2363. top: "res_4_block8_conv_proj_batchnorm"
  2364. name: "res_4_block8_conv_proj_batchnorm"
  2365. type: "BatchNorm"
  2366. batch_norm_param {
  2367. use_global_stats: true
  2368. moving_average_fraction: 0.9
  2369. eps: 0.001
  2370. }
  2371. }
  2372. layer {
  2373. bottom: "res_4_block8_conv_proj_batchnorm"
  2374. top: "res_4_block8_conv_proj_batchnorm"
  2375. name: "res_4_block8_conv_proj_batchnorm_scale"
  2376. type: "Scale"
  2377. scale_param { bias_term: true }
  2378. }
  2379.  
  2380. layer {
  2381. name: "_plus16"
  2382. type: "Eltwise"
  2383. bottom: "res_4_block8_conv_proj_batchnorm"
  2384. bottom: "_plus15"
  2385. top: "_plus16"
  2386. eltwise_param { operation: SUM }
  2387. }
  2388.  
  2389. layer {
  2390. bottom: "_plus16"
  2391. top: "res_4_block9_conv_sep_conv2d"
  2392. name: "res_4_block9_conv_sep_conv2d"
  2393. type: "Convolution"
  2394. convolution_param {
  2395. num_output: 256
  2396. kernel_size: 1
  2397. pad: 0
  2398. stride: 1
  2399. bias_term: false
  2400. }
  2401. }
  2402.  
  2403. layer {
  2404. bottom: "res_4_block9_conv_sep_conv2d"
  2405. top: "res_4_block9_conv_sep_batchnorm"
  2406. name: "res_4_block9_conv_sep_batchnorm"
  2407. type: "BatchNorm"
  2408. batch_norm_param {
  2409. use_global_stats: true
  2410. moving_average_fraction: 0.9
  2411. eps: 0.001
  2412. }
  2413. }
  2414. layer {
  2415. bottom: "res_4_block9_conv_sep_batchnorm"
  2416. top: "res_4_block9_conv_sep_batchnorm"
  2417. name: "res_4_block9_conv_sep_batchnorm_scale"
  2418. type: "Scale"
  2419. scale_param { bias_term: true }
  2420. }
  2421.  
  2422. layer {
  2423. bottom: "res_4_block9_conv_sep_batchnorm"
  2424. top: "res_4_block9_conv_sep_relu"
  2425. name: "res_4_block9_conv_sep_relu"
  2426. type: "PReLU"
  2427. }
  2428.  
  2429. layer {
  2430. bottom: "res_4_block9_conv_sep_relu"
  2431. top: "res_4_block9_conv_dw_conv2d"
  2432. name: "res_4_block9_conv_dw_conv2d"
  2433. type: "Convolution"
  2434. convolution_param {
  2435. num_output: 256
  2436. kernel_size: 3
  2437. pad: 1
  2438. group: 256
  2439. engine:CAFFE
  2440. stride: 1
  2441. bias_term: false
  2442. }
  2443. }
  2444.  
  2445. layer {
  2446. bottom: "res_4_block9_conv_dw_conv2d"
  2447. top: "res_4_block9_conv_dw_batchnorm"
  2448. name: "res_4_block9_conv_dw_batchnorm"
  2449. type: "BatchNorm"
  2450. batch_norm_param {
  2451. use_global_stats: true
  2452. moving_average_fraction: 0.9
  2453. eps: 0.001
  2454. }
  2455. }
  2456. layer {
  2457. bottom: "res_4_block9_conv_dw_batchnorm"
  2458. top: "res_4_block9_conv_dw_batchnorm"
  2459. name: "res_4_block9_conv_dw_batchnorm_scale"
  2460. type: "Scale"
  2461. scale_param { bias_term: true }
  2462. }
  2463.  
  2464. layer {
  2465. bottom: "res_4_block9_conv_dw_batchnorm"
  2466. top: "res_4_block9_conv_dw_relu"
  2467. name: "res_4_block9_conv_dw_relu"
  2468. type: "PReLU"
  2469. }
  2470.  
  2471. layer {
  2472. bottom: "res_4_block9_conv_dw_relu"
  2473. top: "res_4_block9_conv_proj_conv2d"
  2474. name: "res_4_block9_conv_proj_conv2d"
  2475. type: "Convolution"
  2476. convolution_param {
  2477. num_output: 256
  2478. kernel_size: 1
  2479. pad: 0
  2480. stride: 1
  2481. bias_term: false
  2482. }
  2483. }
  2484.  
  2485. layer {
  2486. bottom: "res_4_block9_conv_proj_conv2d"
  2487. top: "res_4_block9_conv_proj_batchnorm"
  2488. name: "res_4_block9_conv_proj_batchnorm"
  2489. type: "BatchNorm"
  2490. batch_norm_param {
  2491. use_global_stats: true
  2492. moving_average_fraction: 0.9
  2493. eps: 0.001
  2494. }
  2495. }
  2496. layer {
  2497. bottom: "res_4_block9_conv_proj_batchnorm"
  2498. top: "res_4_block9_conv_proj_batchnorm"
  2499. name: "res_4_block9_conv_proj_batchnorm_scale"
  2500. type: "Scale"
  2501. scale_param { bias_term: true }
  2502. }
  2503.  
  2504. layer {
  2505. name: "_plus17"
  2506. type: "Eltwise"
  2507. bottom: "res_4_block9_conv_proj_batchnorm"
  2508. bottom: "_plus16"
  2509. top: "_plus17"
  2510. eltwise_param { operation: SUM }
  2511. }
  2512.  
  2513. layer {
  2514. bottom: "_plus17"
  2515. top: "dconv_45_conv_sep_conv2d"
  2516. name: "dconv_45_conv_sep_conv2d"
  2517. type: "Convolution"
  2518. convolution_param {
  2519. num_output: 512
  2520. kernel_size: 1
  2521. pad: 0
  2522. stride: 1
  2523. bias_term: false
  2524. }
  2525. }
  2526.  
  2527. layer {
  2528. bottom: "dconv_45_conv_sep_conv2d"
  2529. top: "dconv_45_conv_sep_batchnorm"
  2530. name: "dconv_45_conv_sep_batchnorm"
  2531. type: "BatchNorm"
  2532. batch_norm_param {
  2533. use_global_stats: true
  2534. moving_average_fraction: 0.9
  2535. eps: 0.001
  2536. }
  2537. }
  2538. layer {
  2539. bottom: "dconv_45_conv_sep_batchnorm"
  2540. top: "dconv_45_conv_sep_batchnorm"
  2541. name: "dconv_45_conv_sep_batchnorm_scale"
  2542. type: "Scale"
  2543. scale_param { bias_term: true }
  2544. }
  2545.  
  2546. layer {
  2547. bottom: "dconv_45_conv_sep_batchnorm"
  2548. top: "dconv_45_conv_sep_relu"
  2549. name: "dconv_45_conv_sep_relu"
  2550. type: "PReLU"
  2551. }
  2552.  
  2553. layer {
  2554. bottom: "dconv_45_conv_sep_relu"
  2555. top: "dconv_45_conv_dw_conv2d"
  2556. name: "dconv_45_conv_dw_conv2d"
  2557. type: "Convolution"
  2558. convolution_param {
  2559. num_output: 512
  2560. kernel_size: 3
  2561. pad: 1
  2562. group: 512
  2563. engine:CAFFE
  2564. stride: 2
  2565. bias_term: false
  2566. }
  2567. }
  2568.  
  2569. layer {
  2570. bottom: "dconv_45_conv_dw_conv2d"
  2571. top: "dconv_45_conv_dw_batchnorm"
  2572. name: "dconv_45_conv_dw_batchnorm"
  2573. type: "BatchNorm"
  2574. batch_norm_param {
  2575. use_global_stats: true
  2576. moving_average_fraction: 0.9
  2577. eps: 0.001
  2578. }
  2579. }
  2580. layer {
  2581. bottom: "dconv_45_conv_dw_batchnorm"
  2582. top: "dconv_45_conv_dw_batchnorm"
  2583. name: "dconv_45_conv_dw_batchnorm_scale"
  2584. type: "Scale"
  2585. scale_param { bias_term: true }
  2586. }
  2587.  
  2588. layer {
  2589. bottom: "dconv_45_conv_dw_batchnorm"
  2590. top: "dconv_45_conv_dw_relu"
  2591. name: "dconv_45_conv_dw_relu"
  2592. type: "PReLU"
  2593. }
  2594.  
  2595. layer {
  2596. bottom: "dconv_45_conv_dw_relu"
  2597. top: "dconv_45_conv_proj_conv2d"
  2598. name: "dconv_45_conv_proj_conv2d"
  2599. type: "Convolution"
  2600. convolution_param {
  2601. num_output: 512
  2602. kernel_size: 1
  2603. pad: 0
  2604. stride: 1
  2605. bias_term: false
  2606. }
  2607. }
  2608.  
  2609. layer {
  2610. bottom: "dconv_45_conv_proj_conv2d"
  2611. top: "dconv_45_conv_proj_batchnorm"
  2612. name: "dconv_45_conv_proj_batchnorm"
  2613. type: "BatchNorm"
  2614. batch_norm_param {
  2615. use_global_stats: true
  2616. moving_average_fraction: 0.9
  2617. eps: 0.001
  2618. }
  2619. }
  2620. layer {
  2621. bottom: "dconv_45_conv_proj_batchnorm"
  2622. top: "dconv_45_conv_proj_batchnorm"
  2623. name: "dconv_45_conv_proj_batchnorm_scale"
  2624. type: "Scale"
  2625. scale_param { bias_term: true }
  2626. }
  2627.  
  2628. layer {
  2629. bottom: "dconv_45_conv_proj_batchnorm"
  2630. top: "res_5_block0_conv_sep_conv2d"
  2631. name: "res_5_block0_conv_sep_conv2d"
  2632. type: "Convolution"
  2633. convolution_param {
  2634. num_output: 512
  2635. kernel_size: 1
  2636. pad: 0
  2637. stride: 1
  2638. bias_term: false
  2639. }
  2640. }
  2641.  
  2642. layer {
  2643. bottom: "res_5_block0_conv_sep_conv2d"
  2644. top: "res_5_block0_conv_sep_batchnorm"
  2645. name: "res_5_block0_conv_sep_batchnorm"
  2646. type: "BatchNorm"
  2647. batch_norm_param {
  2648. use_global_stats: true
  2649. moving_average_fraction: 0.9
  2650. eps: 0.001
  2651. }
  2652. }
  2653. layer {
  2654. bottom: "res_5_block0_conv_sep_batchnorm"
  2655. top: "res_5_block0_conv_sep_batchnorm"
  2656. name: "res_5_block0_conv_sep_batchnorm_scale"
  2657. type: "Scale"
  2658. scale_param { bias_term: true }
  2659. }
  2660.  
  2661. layer {
  2662. bottom: "res_5_block0_conv_sep_batchnorm"
  2663. top: "res_5_block0_conv_sep_relu"
  2664. name: "res_5_block0_conv_sep_relu"
  2665. type: "PReLU"
  2666. }
  2667.  
  2668. layer {
  2669. bottom: "res_5_block0_conv_sep_relu"
  2670. top: "res_5_block0_conv_dw_conv2d"
  2671. name: "res_5_block0_conv_dw_conv2d"
  2672. type: "Convolution"
  2673. convolution_param {
  2674. num_output: 512
  2675. kernel_size: 3
  2676. pad: 1
  2677. group: 512
  2678. engine:CAFFE
  2679. stride: 1
  2680. bias_term: false
  2681. }
  2682. }
  2683.  
  2684. layer {
  2685. bottom: "res_5_block0_conv_dw_conv2d"
  2686. top: "res_5_block0_conv_dw_batchnorm"
  2687. name: "res_5_block0_conv_dw_batchnorm"
  2688. type: "BatchNorm"
  2689. batch_norm_param {
  2690. use_global_stats: true
  2691. moving_average_fraction: 0.9
  2692. eps: 0.001
  2693. }
  2694. }
  2695. layer {
  2696. bottom: "res_5_block0_conv_dw_batchnorm"
  2697. top: "res_5_block0_conv_dw_batchnorm"
  2698. name: "res_5_block0_conv_dw_batchnorm_scale"
  2699. type: "Scale"
  2700. scale_param { bias_term: true }
  2701. }
  2702.  
  2703. layer {
  2704. bottom: "res_5_block0_conv_dw_batchnorm"
  2705. top: "res_5_block0_conv_dw_relu"
  2706. name: "res_5_block0_conv_dw_relu"
  2707. type: "PReLU"
  2708. }
  2709.  
  2710. layer {
  2711. bottom: "res_5_block0_conv_dw_relu"
  2712. top: "res_5_block0_conv_proj_conv2d"
  2713. name: "res_5_block0_conv_proj_conv2d"
  2714. type: "Convolution"
  2715. convolution_param {
  2716. num_output: 512
  2717. kernel_size: 1
  2718. pad: 0
  2719. stride: 1
  2720. bias_term: false
  2721. }
  2722. }
  2723.  
  2724. layer {
  2725. bottom: "res_5_block0_conv_proj_conv2d"
  2726. top: "res_5_block0_conv_proj_batchnorm"
  2727. name: "res_5_block0_conv_proj_batchnorm"
  2728. type: "BatchNorm"
  2729. batch_norm_param {
  2730. use_global_stats: true
  2731. moving_average_fraction: 0.9
  2732. eps: 0.001
  2733. }
  2734. }
  2735. layer {
  2736. bottom: "res_5_block0_conv_proj_batchnorm"
  2737. top: "res_5_block0_conv_proj_batchnorm"
  2738. name: "res_5_block0_conv_proj_batchnorm_scale"
  2739. type: "Scale"
  2740. scale_param { bias_term: true }
  2741. }
  2742.  
  2743. layer {
  2744. name: "_plus18"
  2745. type: "Eltwise"
  2746. bottom: "res_5_block0_conv_proj_batchnorm"
  2747. bottom: "dconv_45_conv_proj_batchnorm"
  2748. top: "_plus18"
  2749. eltwise_param { operation: SUM }
  2750. }
  2751.  
  2752. layer {
  2753. bottom: "_plus18"
  2754. top: "res_5_block1_conv_sep_conv2d"
  2755. name: "res_5_block1_conv_sep_conv2d"
  2756. type: "Convolution"
  2757. convolution_param {
  2758. num_output: 512
  2759. kernel_size: 1
  2760. pad: 0
  2761. stride: 1
  2762. bias_term: false
  2763. }
  2764. }
  2765.  
  2766. layer {
  2767. bottom: "res_5_block1_conv_sep_conv2d"
  2768. top: "res_5_block1_conv_sep_batchnorm"
  2769. name: "res_5_block1_conv_sep_batchnorm"
  2770. type: "BatchNorm"
  2771. batch_norm_param {
  2772. use_global_stats: true
  2773. moving_average_fraction: 0.9
  2774. eps: 0.001
  2775. }
  2776. }
  2777. layer {
  2778. bottom: "res_5_block1_conv_sep_batchnorm"
  2779. top: "res_5_block1_conv_sep_batchnorm"
  2780. name: "res_5_block1_conv_sep_batchnorm_scale"
  2781. type: "Scale"
  2782. scale_param { bias_term: true }
  2783. }
  2784.  
  2785. layer {
  2786. bottom: "res_5_block1_conv_sep_batchnorm"
  2787. top: "res_5_block1_conv_sep_relu"
  2788. name: "res_5_block1_conv_sep_relu"
  2789. type: "PReLU"
  2790. }
  2791.  
  2792. layer {
  2793. bottom: "res_5_block1_conv_sep_relu"
  2794. top: "res_5_block1_conv_dw_conv2d"
  2795. name: "res_5_block1_conv_dw_conv2d"
  2796. type: "Convolution"
  2797. convolution_param {
  2798. num_output: 512
  2799. kernel_size: 3
  2800. pad: 1
  2801. group: 512
  2802. engine:CAFFE
  2803. stride: 1
  2804. bias_term: false
  2805. }
  2806. }
  2807.  
  2808. layer {
  2809. bottom: "res_5_block1_conv_dw_conv2d"
  2810. top: "res_5_block1_conv_dw_batchnorm"
  2811. name: "res_5_block1_conv_dw_batchnorm"
  2812. type: "BatchNorm"
  2813. batch_norm_param {
  2814. use_global_stats: true
  2815. moving_average_fraction: 0.9
  2816. eps: 0.001
  2817. }
  2818. }
  2819. layer {
  2820. bottom: "res_5_block1_conv_dw_batchnorm"
  2821. top: "res_5_block1_conv_dw_batchnorm"
  2822. name: "res_5_block1_conv_dw_batchnorm_scale"
  2823. type: "Scale"
  2824. scale_param { bias_term: true }
  2825. }
  2826.  
  2827. layer {
  2828. bottom: "res_5_block1_conv_dw_batchnorm"
  2829. top: "res_5_block1_conv_dw_relu"
  2830. name: "res_5_block1_conv_dw_relu"
  2831. type: "PReLU"
  2832. }
  2833.  
  2834. layer {
  2835. bottom: "res_5_block1_conv_dw_relu"
  2836. top: "res_5_block1_conv_proj_conv2d"
  2837. name: "res_5_block1_conv_proj_conv2d"
  2838. type: "Convolution"
  2839. convolution_param {
  2840. num_output: 512
  2841. kernel_size: 1
  2842. pad: 0
  2843. stride: 1
  2844. bias_term: false
  2845. }
  2846. }
  2847.  
  2848. layer {
  2849. bottom: "res_5_block1_conv_proj_conv2d"
  2850. top: "res_5_block1_conv_proj_batchnorm"
  2851. name: "res_5_block1_conv_proj_batchnorm"
  2852. type: "BatchNorm"
  2853. batch_norm_param {
  2854. use_global_stats: true
  2855. moving_average_fraction: 0.9
  2856. eps: 0.001
  2857. }
  2858. }
  2859. layer {
  2860. bottom: "res_5_block1_conv_proj_batchnorm"
  2861. top: "res_5_block1_conv_proj_batchnorm"
  2862. name: "res_5_block1_conv_proj_batchnorm_scale"
  2863. type: "Scale"
  2864. scale_param { bias_term: true }
  2865. }
  2866.  
  2867. layer {
  2868. name: "_plus19"
  2869. type: "Eltwise"
  2870. bottom: "res_5_block1_conv_proj_batchnorm"
  2871. bottom: "_plus18"
  2872. top: "_plus19"
  2873. eltwise_param { operation: SUM }
  2874. }
  2875.  
  2876. layer {
  2877. bottom: "_plus19"
  2878. top: "conv_6sep_conv2d"
  2879. name: "conv_6sep_conv2d"
  2880. type: "Convolution"
  2881. convolution_param {
  2882. num_output: 512
  2883. kernel_size: 1
  2884. pad: 0
  2885. stride: 1
  2886. bias_term: false
  2887. }
  2888. }
  2889.  
  2890. layer {
  2891. bottom: "conv_6sep_conv2d"
  2892. top: "conv_6sep_batchnorm"
  2893. name: "conv_6sep_batchnorm"
  2894. type: "BatchNorm"
  2895. batch_norm_param {
  2896. use_global_stats: true
  2897. moving_average_fraction: 0.9
  2898. eps: 0.001
  2899. }
  2900. }
  2901. layer {
  2902. bottom: "conv_6sep_batchnorm"
  2903. top: "conv_6sep_batchnorm"
  2904. name: "conv_6sep_batchnorm_scale"
  2905. type: "Scale"
  2906. scale_param { bias_term: true }
  2907. }
  2908.  
  2909. layer {
  2910. bottom: "conv_6sep_batchnorm"
  2911. top: "conv_6sep_relu"
  2912. name: "conv_6sep_relu"
  2913. type: "PReLU"
  2914. }
  2915.  
  2916. layer {
  2917. bottom: "conv_6sep_relu"
  2918. top: "conv_6dw7_7_conv2d"
  2919. name: "conv_6dw7_7_conv2d"
  2920. type: "Convolution"
  2921. convolution_param {
  2922. num_output: 512
  2923. kernel_size: 7
  2924. pad: 0
  2925. group: 512
  2926. engine:CAFFE
  2927. stride: 1
  2928. bias_term: false
  2929. }
  2930. }
  2931.  
  2932. layer {
  2933. bottom: "conv_6dw7_7_conv2d"
  2934. top: "conv_6dw7_7_batchnorm"
  2935. name: "conv_6dw7_7_batchnorm"
  2936. type: "BatchNorm"
  2937. batch_norm_param {
  2938. use_global_stats: true
  2939. moving_average_fraction: 0.9
  2940. eps: 0.001
  2941. }
  2942. }
  2943. layer {
  2944. bottom: "conv_6dw7_7_batchnorm"
  2945. top: "conv_6dw7_7_batchnorm"
  2946. name: "conv_6dw7_7_batchnorm_scale"
  2947. type: "Scale"
  2948. scale_param { bias_term: true }
  2949. }
  2950.  
  2951. layer {
  2952. bottom: "conv_6dw7_7_batchnorm"
  2953. top: "pre_fc1"
  2954. name: "pre_fc1"
  2955. type: "InnerProduct"
  2956. inner_product_param {
  2957. num_output: 128
  2958. }
  2959. }
  2960.  
  2961. layer {
  2962. bottom: "pre_fc1"
  2963. top: "fc1"
  2964. name: "fc1"
  2965. type: "BatchNorm"
  2966. batch_norm_param {
  2967. use_global_stats: true
  2968. moving_average_fraction: 0.9
  2969. eps: 2e-05
  2970. }
  2971. }
  2972. layer {
  2973. bottom: "fc1"
  2974. top: "fc1"
  2975. name: "fc1_scale"
  2976. type: "Scale"
  2977. scale_param { bias_term: true }
  2978. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement