Advertisement
Guest User

Untitled

a guest
Feb 22nd, 2019
71
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 45.19 KB | None | 0 0
  1. name: "mxnet-mdoel"
  2. layer {
  3. name: "data"
  4. type: "Input"
  5. top: "data"
  6. input_param {
  7. shape: { dim: 1 dim: 3 dim: 112 dim: 112 }
  8. }
  9. }
  10.  
  11. layer {
  12. bottom: "data"
  13. top: "conv0"
  14. name: "conv0"
  15. type: "Convolution"
  16. convolution_param {
  17. num_output: 64
  18. kernel_size: 3
  19. pad: 1
  20. stride: 1
  21. bias_term: true
  22. }
  23. }
  24.  
  25. layer {
  26. bottom: "conv0"
  27. top: "bn0"
  28. name: "bn0"
  29. type: "BatchNorm"
  30. batch_norm_param {
  31. use_global_stats: true
  32. moving_average_fraction: 0.9
  33. eps: 2e-05
  34. }
  35. }
  36. layer {
  37. bottom: "bn0"
  38. top: "bn0"
  39. name: "bn0_scale"
  40. type: "Scale"
  41. scale_param { bias_term: true }
  42. }
  43.  
  44. layer {
  45. bottom: "bn0"
  46. top: "relu0"
  47. name: "relu0"
  48. type: "PReLU"
  49. }
  50.  
  51. layer {
  52. bottom: "relu0"
  53. top: "stage1_unit1_bn1"
  54. name: "stage1_unit1_bn1"
  55. type: "BatchNorm"
  56. batch_norm_param {
  57. use_global_stats: true
  58. moving_average_fraction: 0.9
  59. eps: 2e-05
  60. }
  61. }
  62. layer {
  63. bottom: "stage1_unit1_bn1"
  64. top: "stage1_unit1_bn1"
  65. name: "stage1_unit1_bn1_scale"
  66. type: "Scale"
  67. scale_param { bias_term: true }
  68. }
  69.  
  70. layer {
  71. bottom: "stage1_unit1_bn1"
  72. top: "stage1_unit1_conv1"
  73. name: "stage1_unit1_conv1"
  74. type: "Convolution"
  75. convolution_param {
  76. num_output: 64
  77. kernel_size: 3
  78. pad: 1
  79. stride: 1
  80. bias_term: true
  81. }
  82. }
  83.  
  84. layer {
  85. bottom: "stage1_unit1_conv1"
  86. top: "stage1_unit1_bn2"
  87. name: "stage1_unit1_bn2"
  88. type: "BatchNorm"
  89. batch_norm_param {
  90. use_global_stats: true
  91. moving_average_fraction: 0.9
  92. eps: 2e-05
  93. }
  94. }
  95. layer {
  96. bottom: "stage1_unit1_bn2"
  97. top: "stage1_unit1_bn2"
  98. name: "stage1_unit1_bn2_scale"
  99. type: "Scale"
  100. scale_param { bias_term: true }
  101. }
  102.  
  103. layer {
  104. bottom: "stage1_unit1_bn2"
  105. top: "stage1_unit1_relu1"
  106. name: "stage1_unit1_relu1"
  107. type: "PReLU"
  108. }
  109.  
  110. layer {
  111. bottom: "stage1_unit1_relu1"
  112. top: "stage1_unit1_conv2"
  113. name: "stage1_unit1_conv2"
  114. type: "Convolution"
  115. convolution_param {
  116. num_output: 64
  117. kernel_size: 3
  118. pad: 1
  119. stride: 2
  120. bias_term: true
  121. }
  122. }
  123.  
  124. layer {
  125. bottom: "stage1_unit1_conv2"
  126. top: "stage1_unit1_bn3"
  127. name: "stage1_unit1_bn3"
  128. type: "BatchNorm"
  129. batch_norm_param {
  130. use_global_stats: true
  131. moving_average_fraction: 0.9
  132. eps: 2e-05
  133. }
  134. }
  135. layer {
  136. bottom: "stage1_unit1_bn3"
  137. top: "stage1_unit1_bn3"
  138. name: "stage1_unit1_bn3_scale"
  139. type: "Scale"
  140. scale_param { bias_term: true }
  141. }
  142.  
  143. layer {
  144. bottom: "relu0"
  145. top: "stage1_unit1_conv1sc"
  146. name: "stage1_unit1_conv1sc"
  147. type: "Convolution"
  148. convolution_param {
  149. num_output: 64
  150. kernel_size: 1
  151. stride: 2
  152. bias_term: true
  153. }
  154. }
  155.  
  156. layer {
  157. bottom: "stage1_unit1_conv1sc"
  158. top: "stage1_unit1_sc"
  159. name: "stage1_unit1_sc"
  160. type: "BatchNorm"
  161. batch_norm_param {
  162. use_global_stats: true
  163. moving_average_fraction: 0.9
  164. eps: 2e-05
  165. }
  166. }
  167. layer {
  168. bottom: "stage1_unit1_sc"
  169. top: "stage1_unit1_sc"
  170. name: "stage1_unit1_sc_scale"
  171. type: "Scale"
  172. scale_param { bias_term: true }
  173. }
  174.  
  175. layer {
  176. name: "_plus0"
  177. type: "Eltwise"
  178. bottom: "stage1_unit1_bn3"
  179. bottom: "stage1_unit1_sc"
  180. top: "_plus0"
  181. eltwise_param { operation: SUM }
  182. }
  183.  
  184. layer {
  185. bottom: "_plus0"
  186. top: "stage1_unit2_bn1"
  187. name: "stage1_unit2_bn1"
  188. type: "BatchNorm"
  189. batch_norm_param {
  190. use_global_stats: true
  191. moving_average_fraction: 0.9
  192. eps: 2e-05
  193. }
  194. }
  195. layer {
  196. bottom: "stage1_unit2_bn1"
  197. top: "stage1_unit2_bn1"
  198. name: "stage1_unit2_bn1_scale"
  199. type: "Scale"
  200. scale_param { bias_term: true }
  201. }
  202.  
  203. layer {
  204. bottom: "stage1_unit2_bn1"
  205. top: "stage1_unit2_conv1"
  206. name: "stage1_unit2_conv1"
  207. type: "Convolution"
  208. convolution_param {
  209. num_output: 64
  210. kernel_size: 3
  211. pad: 1
  212. stride: 1
  213. bias_term: true
  214. }
  215. }
  216.  
  217. layer {
  218. bottom: "stage1_unit2_conv1"
  219. top: "stage1_unit2_bn2"
  220. name: "stage1_unit2_bn2"
  221. type: "BatchNorm"
  222. batch_norm_param {
  223. use_global_stats: true
  224. moving_average_fraction: 0.9
  225. eps: 2e-05
  226. }
  227. }
  228. layer {
  229. bottom: "stage1_unit2_bn2"
  230. top: "stage1_unit2_bn2"
  231. name: "stage1_unit2_bn2_scale"
  232. type: "Scale"
  233. scale_param { bias_term: true }
  234. }
  235.  
  236. layer {
  237. bottom: "stage1_unit2_bn2"
  238. top: "stage1_unit2_relu1"
  239. name: "stage1_unit2_relu1"
  240. type: "PReLU"
  241. }
  242.  
  243. layer {
  244. bottom: "stage1_unit2_relu1"
  245. top: "stage1_unit2_conv2"
  246. name: "stage1_unit2_conv2"
  247. type: "Convolution"
  248. convolution_param {
  249. num_output: 64
  250. kernel_size: 3
  251. pad: 1
  252. stride: 1
  253. bias_term: true
  254. }
  255. }
  256.  
  257. layer {
  258. bottom: "stage1_unit2_conv2"
  259. top: "stage1_unit2_bn3"
  260. name: "stage1_unit2_bn3"
  261. type: "BatchNorm"
  262. batch_norm_param {
  263. use_global_stats: true
  264. moving_average_fraction: 0.9
  265. eps: 2e-05
  266. }
  267. }
  268. layer {
  269. bottom: "stage1_unit2_bn3"
  270. top: "stage1_unit2_bn3"
  271. name: "stage1_unit2_bn3_scale"
  272. type: "Scale"
  273. scale_param { bias_term: true }
  274. }
  275.  
  276. layer {
  277. name: "_plus1"
  278. type: "Eltwise"
  279. bottom: "stage1_unit2_bn3"
  280. bottom: "_plus0"
  281. top: "_plus1"
  282. eltwise_param { operation: SUM }
  283. }
  284.  
  285. layer {
  286. bottom: "_plus1"
  287. top: "stage1_unit3_bn1"
  288. name: "stage1_unit3_bn1"
  289. type: "BatchNorm"
  290. batch_norm_param {
  291. use_global_stats: true
  292. moving_average_fraction: 0.9
  293. eps: 2e-05
  294. }
  295. }
  296. layer {
  297. bottom: "stage1_unit3_bn1"
  298. top: "stage1_unit3_bn1"
  299. name: "stage1_unit3_bn1_scale"
  300. type: "Scale"
  301. scale_param { bias_term: true }
  302. }
  303.  
  304. layer {
  305. bottom: "stage1_unit3_bn1"
  306. top: "stage1_unit3_conv1"
  307. name: "stage1_unit3_conv1"
  308. type: "Convolution"
  309. convolution_param {
  310. num_output: 64
  311. kernel_size: 3
  312. pad: 1
  313. stride: 1
  314. bias_term: true
  315. }
  316. }
  317.  
  318. layer {
  319. bottom: "stage1_unit3_conv1"
  320. top: "stage1_unit3_bn2"
  321. name: "stage1_unit3_bn2"
  322. type: "BatchNorm"
  323. batch_norm_param {
  324. use_global_stats: true
  325. moving_average_fraction: 0.9
  326. eps: 2e-05
  327. }
  328. }
  329. layer {
  330. bottom: "stage1_unit3_bn2"
  331. top: "stage1_unit3_bn2"
  332. name: "stage1_unit3_bn2_scale"
  333. type: "Scale"
  334. scale_param { bias_term: true }
  335. }
  336.  
  337. layer {
  338. bottom: "stage1_unit3_bn2"
  339. top: "stage1_unit3_relu1"
  340. name: "stage1_unit3_relu1"
  341. type: "PReLU"
  342. }
  343.  
  344. layer {
  345. bottom: "stage1_unit3_relu1"
  346. top: "stage1_unit3_conv2"
  347. name: "stage1_unit3_conv2"
  348. type: "Convolution"
  349. convolution_param {
  350. num_output: 64
  351. kernel_size: 3
  352. pad: 1
  353. stride: 1
  354. bias_term: true
  355. }
  356. }
  357.  
  358. layer {
  359. bottom: "stage1_unit3_conv2"
  360. top: "stage1_unit3_bn3"
  361. name: "stage1_unit3_bn3"
  362. type: "BatchNorm"
  363. batch_norm_param {
  364. use_global_stats: true
  365. moving_average_fraction: 0.9
  366. eps: 2e-05
  367. }
  368. }
  369. layer {
  370. bottom: "stage1_unit3_bn3"
  371. top: "stage1_unit3_bn3"
  372. name: "stage1_unit3_bn3_scale"
  373. type: "Scale"
  374. scale_param { bias_term: true }
  375. }
  376.  
  377. layer {
  378. name: "_plus2"
  379. type: "Eltwise"
  380. bottom: "stage1_unit3_bn3"
  381. bottom: "_plus1"
  382. top: "_plus2"
  383. eltwise_param { operation: SUM }
  384. }
  385.  
  386. layer {
  387. bottom: "_plus2"
  388. top: "stage2_unit1_bn1"
  389. name: "stage2_unit1_bn1"
  390. type: "BatchNorm"
  391. batch_norm_param {
  392. use_global_stats: true
  393. moving_average_fraction: 0.9
  394. eps: 2e-05
  395. }
  396. }
  397. layer {
  398. bottom: "stage2_unit1_bn1"
  399. top: "stage2_unit1_bn1"
  400. name: "stage2_unit1_bn1_scale"
  401. type: "Scale"
  402. scale_param { bias_term: true }
  403. }
  404.  
  405. layer {
  406. bottom: "stage2_unit1_bn1"
  407. top: "stage2_unit1_conv1"
  408. name: "stage2_unit1_conv1"
  409. type: "Convolution"
  410. convolution_param {
  411. num_output: 128
  412. kernel_size: 3
  413. pad: 1
  414. stride: 1
  415. bias_term: true
  416. }
  417. }
  418.  
  419. layer {
  420. bottom: "stage2_unit1_conv1"
  421. top: "stage2_unit1_bn2"
  422. name: "stage2_unit1_bn2"
  423. type: "BatchNorm"
  424. batch_norm_param {
  425. use_global_stats: true
  426. moving_average_fraction: 0.9
  427. eps: 2e-05
  428. }
  429. }
  430. layer {
  431. bottom: "stage2_unit1_bn2"
  432. top: "stage2_unit1_bn2"
  433. name: "stage2_unit1_bn2_scale"
  434. type: "Scale"
  435. scale_param { bias_term: true }
  436. }
  437.  
  438. layer {
  439. bottom: "stage2_unit1_bn2"
  440. top: "stage2_unit1_relu1"
  441. name: "stage2_unit1_relu1"
  442. type: "PReLU"
  443. }
  444.  
  445. layer {
  446. bottom: "stage2_unit1_relu1"
  447. top: "stage2_unit1_conv2"
  448. name: "stage2_unit1_conv2"
  449. type: "Convolution"
  450. convolution_param {
  451. num_output: 128
  452. kernel_size: 3
  453. pad: 1
  454. stride: 2
  455. bias_term: true
  456. }
  457. }
  458.  
  459. layer {
  460. bottom: "stage2_unit1_conv2"
  461. top: "stage2_unit1_bn3"
  462. name: "stage2_unit1_bn3"
  463. type: "BatchNorm"
  464. batch_norm_param {
  465. use_global_stats: true
  466. moving_average_fraction: 0.9
  467. eps: 2e-05
  468. }
  469. }
  470. layer {
  471. bottom: "stage2_unit1_bn3"
  472. top: "stage2_unit1_bn3"
  473. name: "stage2_unit1_bn3_scale"
  474. type: "Scale"
  475. scale_param { bias_term: true }
  476. }
  477.  
  478. layer {
  479. bottom: "_plus2"
  480. top: "stage2_unit1_conv1sc"
  481. name: "stage2_unit1_conv1sc"
  482. type: "Convolution"
  483. convolution_param {
  484. num_output: 128
  485. kernel_size: 1
  486. stride: 2
  487. bias_term: true
  488. }
  489. }
  490.  
  491. layer {
  492. bottom: "stage2_unit1_conv1sc"
  493. top: "stage2_unit1_sc"
  494. name: "stage2_unit1_sc"
  495. type: "BatchNorm"
  496. batch_norm_param {
  497. use_global_stats: true
  498. moving_average_fraction: 0.9
  499. eps: 2e-05
  500. }
  501. }
  502. layer {
  503. bottom: "stage2_unit1_sc"
  504. top: "stage2_unit1_sc"
  505. name: "stage2_unit1_sc_scale"
  506. type: "Scale"
  507. scale_param { bias_term: true }
  508. }
  509.  
  510. layer {
  511. name: "_plus3"
  512. type: "Eltwise"
  513. bottom: "stage2_unit1_bn3"
  514. bottom: "stage2_unit1_sc"
  515. top: "_plus3"
  516. eltwise_param { operation: SUM }
  517. }
  518.  
  519. layer {
  520. bottom: "_plus3"
  521. top: "stage2_unit2_bn1"
  522. name: "stage2_unit2_bn1"
  523. type: "BatchNorm"
  524. batch_norm_param {
  525. use_global_stats: true
  526. moving_average_fraction: 0.9
  527. eps: 2e-05
  528. }
  529. }
  530. layer {
  531. bottom: "stage2_unit2_bn1"
  532. top: "stage2_unit2_bn1"
  533. name: "stage2_unit2_bn1_scale"
  534. type: "Scale"
  535. scale_param { bias_term: true }
  536. }
  537.  
  538. layer {
  539. bottom: "stage2_unit2_bn1"
  540. top: "stage2_unit2_conv1"
  541. name: "stage2_unit2_conv1"
  542. type: "Convolution"
  543. convolution_param {
  544. num_output: 128
  545. kernel_size: 3
  546. pad: 1
  547. stride: 1
  548. bias_term: true
  549. }
  550. }
  551.  
  552. layer {
  553. bottom: "stage2_unit2_conv1"
  554. top: "stage2_unit2_bn2"
  555. name: "stage2_unit2_bn2"
  556. type: "BatchNorm"
  557. batch_norm_param {
  558. use_global_stats: true
  559. moving_average_fraction: 0.9
  560. eps: 2e-05
  561. }
  562. }
  563. layer {
  564. bottom: "stage2_unit2_bn2"
  565. top: "stage2_unit2_bn2"
  566. name: "stage2_unit2_bn2_scale"
  567. type: "Scale"
  568. scale_param { bias_term: true }
  569. }
  570.  
  571. layer {
  572. bottom: "stage2_unit2_bn2"
  573. top: "stage2_unit2_relu1"
  574. name: "stage2_unit2_relu1"
  575. type: "PReLU"
  576. }
  577.  
  578. layer {
  579. bottom: "stage2_unit2_relu1"
  580. top: "stage2_unit2_conv2"
  581. name: "stage2_unit2_conv2"
  582. type: "Convolution"
  583. convolution_param {
  584. num_output: 128
  585. kernel_size: 3
  586. pad: 1
  587. stride: 1
  588. bias_term: true
  589. }
  590. }
  591.  
  592. layer {
  593. bottom: "stage2_unit2_conv2"
  594. top: "stage2_unit2_bn3"
  595. name: "stage2_unit2_bn3"
  596. type: "BatchNorm"
  597. batch_norm_param {
  598. use_global_stats: true
  599. moving_average_fraction: 0.9
  600. eps: 2e-05
  601. }
  602. }
  603. layer {
  604. bottom: "stage2_unit2_bn3"
  605. top: "stage2_unit2_bn3"
  606. name: "stage2_unit2_bn3_scale"
  607. type: "Scale"
  608. scale_param { bias_term: true }
  609. }
  610.  
  611. layer {
  612. name: "_plus4"
  613. type: "Eltwise"
  614. bottom: "stage2_unit2_bn3"
  615. bottom: "_plus3"
  616. top: "_plus4"
  617. eltwise_param { operation: SUM }
  618. }
  619.  
  620. layer {
  621. bottom: "_plus4"
  622. top: "stage2_unit3_bn1"
  623. name: "stage2_unit3_bn1"
  624. type: "BatchNorm"
  625. batch_norm_param {
  626. use_global_stats: true
  627. moving_average_fraction: 0.9
  628. eps: 2e-05
  629. }
  630. }
  631. layer {
  632. bottom: "stage2_unit3_bn1"
  633. top: "stage2_unit3_bn1"
  634. name: "stage2_unit3_bn1_scale"
  635. type: "Scale"
  636. scale_param { bias_term: true }
  637. }
  638.  
  639. layer {
  640. bottom: "stage2_unit3_bn1"
  641. top: "stage2_unit3_conv1"
  642. name: "stage2_unit3_conv1"
  643. type: "Convolution"
  644. convolution_param {
  645. num_output: 128
  646. kernel_size: 3
  647. pad: 1
  648. stride: 1
  649. bias_term: true
  650. }
  651. }
  652.  
  653. layer {
  654. bottom: "stage2_unit3_conv1"
  655. top: "stage2_unit3_bn2"
  656. name: "stage2_unit3_bn2"
  657. type: "BatchNorm"
  658. batch_norm_param {
  659. use_global_stats: true
  660. moving_average_fraction: 0.9
  661. eps: 2e-05
  662. }
  663. }
  664. layer {
  665. bottom: "stage2_unit3_bn2"
  666. top: "stage2_unit3_bn2"
  667. name: "stage2_unit3_bn2_scale"
  668. type: "Scale"
  669. scale_param { bias_term: true }
  670. }
  671.  
  672. layer {
  673. bottom: "stage2_unit3_bn2"
  674. top: "stage2_unit3_relu1"
  675. name: "stage2_unit3_relu1"
  676. type: "PReLU"
  677. }
  678.  
  679. layer {
  680. bottom: "stage2_unit3_relu1"
  681. top: "stage2_unit3_conv2"
  682. name: "stage2_unit3_conv2"
  683. type: "Convolution"
  684. convolution_param {
  685. num_output: 128
  686. kernel_size: 3
  687. pad: 1
  688. stride: 1
  689. bias_term: true
  690. }
  691. }
  692.  
  693. layer {
  694. bottom: "stage2_unit3_conv2"
  695. top: "stage2_unit3_bn3"
  696. name: "stage2_unit3_bn3"
  697. type: "BatchNorm"
  698. batch_norm_param {
  699. use_global_stats: true
  700. moving_average_fraction: 0.9
  701. eps: 2e-05
  702. }
  703. }
  704. layer {
  705. bottom: "stage2_unit3_bn3"
  706. top: "stage2_unit3_bn3"
  707. name: "stage2_unit3_bn3_scale"
  708. type: "Scale"
  709. scale_param { bias_term: true }
  710. }
  711.  
  712. layer {
  713. name: "_plus5"
  714. type: "Eltwise"
  715. bottom: "stage2_unit3_bn3"
  716. bottom: "_plus4"
  717. top: "_plus5"
  718. eltwise_param { operation: SUM }
  719. }
  720.  
  721. layer {
  722. bottom: "_plus5"
  723. top: "stage2_unit4_bn1"
  724. name: "stage2_unit4_bn1"
  725. type: "BatchNorm"
  726. batch_norm_param {
  727. use_global_stats: true
  728. moving_average_fraction: 0.9
  729. eps: 2e-05
  730. }
  731. }
  732. layer {
  733. bottom: "stage2_unit4_bn1"
  734. top: "stage2_unit4_bn1"
  735. name: "stage2_unit4_bn1_scale"
  736. type: "Scale"
  737. scale_param { bias_term: true }
  738. }
  739.  
  740. layer {
  741. bottom: "stage2_unit4_bn1"
  742. top: "stage2_unit4_conv1"
  743. name: "stage2_unit4_conv1"
  744. type: "Convolution"
  745. convolution_param {
  746. num_output: 128
  747. kernel_size: 3
  748. pad: 1
  749. stride: 1
  750. bias_term: true
  751. }
  752. }
  753.  
  754. layer {
  755. bottom: "stage2_unit4_conv1"
  756. top: "stage2_unit4_bn2"
  757. name: "stage2_unit4_bn2"
  758. type: "BatchNorm"
  759. batch_norm_param {
  760. use_global_stats: true
  761. moving_average_fraction: 0.9
  762. eps: 2e-05
  763. }
  764. }
  765. layer {
  766. bottom: "stage2_unit4_bn2"
  767. top: "stage2_unit4_bn2"
  768. name: "stage2_unit4_bn2_scale"
  769. type: "Scale"
  770. scale_param { bias_term: true }
  771. }
  772.  
  773. layer {
  774. bottom: "stage2_unit4_bn2"
  775. top: "stage2_unit4_relu1"
  776. name: "stage2_unit4_relu1"
  777. type: "PReLU"
  778. }
  779.  
  780. layer {
  781. bottom: "stage2_unit4_relu1"
  782. top: "stage2_unit4_conv2"
  783. name: "stage2_unit4_conv2"
  784. type: "Convolution"
  785. convolution_param {
  786. num_output: 128
  787. kernel_size: 3
  788. pad: 1
  789. stride: 1
  790. bias_term: true
  791. }
  792. }
  793.  
  794. layer {
  795. bottom: "stage2_unit4_conv2"
  796. top: "stage2_unit4_bn3"
  797. name: "stage2_unit4_bn3"
  798. type: "BatchNorm"
  799. batch_norm_param {
  800. use_global_stats: true
  801. moving_average_fraction: 0.9
  802. eps: 2e-05
  803. }
  804. }
  805. layer {
  806. bottom: "stage2_unit4_bn3"
  807. top: "stage2_unit4_bn3"
  808. name: "stage2_unit4_bn3_scale"
  809. type: "Scale"
  810. scale_param { bias_term: true }
  811. }
  812.  
  813. layer {
  814. name: "_plus6"
  815. type: "Eltwise"
  816. bottom: "stage2_unit4_bn3"
  817. bottom: "_plus5"
  818. top: "_plus6"
  819. eltwise_param { operation: SUM }
  820. }
  821.  
  822. layer {
  823. bottom: "_plus6"
  824. top: "stage3_unit1_bn1"
  825. name: "stage3_unit1_bn1"
  826. type: "BatchNorm"
  827. batch_norm_param {
  828. use_global_stats: true
  829. moving_average_fraction: 0.9
  830. eps: 2e-05
  831. }
  832. }
  833. layer {
  834. bottom: "stage3_unit1_bn1"
  835. top: "stage3_unit1_bn1"
  836. name: "stage3_unit1_bn1_scale"
  837. type: "Scale"
  838. scale_param { bias_term: true }
  839. }
  840.  
  841. layer {
  842. bottom: "stage3_unit1_bn1"
  843. top: "stage3_unit1_conv1"
  844. name: "stage3_unit1_conv1"
  845. type: "Convolution"
  846. convolution_param {
  847. num_output: 256
  848. kernel_size: 3
  849. pad: 1
  850. stride: 1
  851. bias_term: true
  852. }
  853. }
  854.  
  855. layer {
  856. bottom: "stage3_unit1_conv1"
  857. top: "stage3_unit1_bn2"
  858. name: "stage3_unit1_bn2"
  859. type: "BatchNorm"
  860. batch_norm_param {
  861. use_global_stats: true
  862. moving_average_fraction: 0.9
  863. eps: 2e-05
  864. }
  865. }
  866. layer {
  867. bottom: "stage3_unit1_bn2"
  868. top: "stage3_unit1_bn2"
  869. name: "stage3_unit1_bn2_scale"
  870. type: "Scale"
  871. scale_param { bias_term: true }
  872. }
  873.  
  874. layer {
  875. bottom: "stage3_unit1_bn2"
  876. top: "stage3_unit1_relu1"
  877. name: "stage3_unit1_relu1"
  878. type: "PReLU"
  879. }
  880.  
  881. layer {
  882. bottom: "stage3_unit1_relu1"
  883. top: "stage3_unit1_conv2"
  884. name: "stage3_unit1_conv2"
  885. type: "Convolution"
  886. convolution_param {
  887. num_output: 256
  888. kernel_size: 3
  889. pad: 1
  890. stride: 2
  891. bias_term: true
  892. }
  893. }
  894.  
  895. layer {
  896. bottom: "stage3_unit1_conv2"
  897. top: "stage3_unit1_bn3"
  898. name: "stage3_unit1_bn3"
  899. type: "BatchNorm"
  900. batch_norm_param {
  901. use_global_stats: true
  902. moving_average_fraction: 0.9
  903. eps: 2e-05
  904. }
  905. }
  906. layer {
  907. bottom: "stage3_unit1_bn3"
  908. top: "stage3_unit1_bn3"
  909. name: "stage3_unit1_bn3_scale"
  910. type: "Scale"
  911. scale_param { bias_term: true }
  912. }
  913.  
  914. layer {
  915. bottom: "_plus6"
  916. top: "stage3_unit1_conv1sc"
  917. name: "stage3_unit1_conv1sc"
  918. type: "Convolution"
  919. convolution_param {
  920. num_output: 256
  921. kernel_size: 1
  922. stride: 2
  923. bias_term: true
  924. }
  925. }
  926.  
  927. layer {
  928. bottom: "stage3_unit1_conv1sc"
  929. top: "stage3_unit1_sc"
  930. name: "stage3_unit1_sc"
  931. type: "BatchNorm"
  932. batch_norm_param {
  933. use_global_stats: true
  934. moving_average_fraction: 0.9
  935. eps: 2e-05
  936. }
  937. }
  938. layer {
  939. bottom: "stage3_unit1_sc"
  940. top: "stage3_unit1_sc"
  941. name: "stage3_unit1_sc_scale"
  942. type: "Scale"
  943. scale_param { bias_term: true }
  944. }
  945.  
  946. layer {
  947. name: "_plus7"
  948. type: "Eltwise"
  949. bottom: "stage3_unit1_bn3"
  950. bottom: "stage3_unit1_sc"
  951. top: "_plus7"
  952. eltwise_param { operation: SUM }
  953. }
  954.  
  955. layer {
  956. bottom: "_plus7"
  957. top: "stage3_unit2_bn1"
  958. name: "stage3_unit2_bn1"
  959. type: "BatchNorm"
  960. batch_norm_param {
  961. use_global_stats: true
  962. moving_average_fraction: 0.9
  963. eps: 2e-05
  964. }
  965. }
  966. layer {
  967. bottom: "stage3_unit2_bn1"
  968. top: "stage3_unit2_bn1"
  969. name: "stage3_unit2_bn1_scale"
  970. type: "Scale"
  971. scale_param { bias_term: true }
  972. }
  973.  
  974. layer {
  975. bottom: "stage3_unit2_bn1"
  976. top: "stage3_unit2_conv1"
  977. name: "stage3_unit2_conv1"
  978. type: "Convolution"
  979. convolution_param {
  980. num_output: 256
  981. kernel_size: 3
  982. pad: 1
  983. stride: 1
  984. bias_term: true
  985. }
  986. }
  987.  
  988. layer {
  989. bottom: "stage3_unit2_conv1"
  990. top: "stage3_unit2_bn2"
  991. name: "stage3_unit2_bn2"
  992. type: "BatchNorm"
  993. batch_norm_param {
  994. use_global_stats: true
  995. moving_average_fraction: 0.9
  996. eps: 2e-05
  997. }
  998. }
  999. layer {
  1000. bottom: "stage3_unit2_bn2"
  1001. top: "stage3_unit2_bn2"
  1002. name: "stage3_unit2_bn2_scale"
  1003. type: "Scale"
  1004. scale_param { bias_term: true }
  1005. }
  1006.  
  1007. layer {
  1008. bottom: "stage3_unit2_bn2"
  1009. top: "stage3_unit2_relu1"
  1010. name: "stage3_unit2_relu1"
  1011. type: "PReLU"
  1012. }
  1013.  
  1014. layer {
  1015. bottom: "stage3_unit2_relu1"
  1016. top: "stage3_unit2_conv2"
  1017. name: "stage3_unit2_conv2"
  1018. type: "Convolution"
  1019. convolution_param {
  1020. num_output: 256
  1021. kernel_size: 3
  1022. pad: 1
  1023. stride: 1
  1024. bias_term: true
  1025. }
  1026. }
  1027.  
  1028. layer {
  1029. bottom: "stage3_unit2_conv2"
  1030. top: "stage3_unit2_bn3"
  1031. name: "stage3_unit2_bn3"
  1032. type: "BatchNorm"
  1033. batch_norm_param {
  1034. use_global_stats: true
  1035. moving_average_fraction: 0.9
  1036. eps: 2e-05
  1037. }
  1038. }
  1039. layer {
  1040. bottom: "stage3_unit2_bn3"
  1041. top: "stage3_unit2_bn3"
  1042. name: "stage3_unit2_bn3_scale"
  1043. type: "Scale"
  1044. scale_param { bias_term: true }
  1045. }
  1046.  
  1047. layer {
  1048. name: "_plus8"
  1049. type: "Eltwise"
  1050. bottom: "stage3_unit2_bn3"
  1051. bottom: "_plus7"
  1052. top: "_plus8"
  1053. eltwise_param { operation: SUM }
  1054. }
  1055.  
  1056. layer {
  1057. bottom: "_plus8"
  1058. top: "stage3_unit3_bn1"
  1059. name: "stage3_unit3_bn1"
  1060. type: "BatchNorm"
  1061. batch_norm_param {
  1062. use_global_stats: true
  1063. moving_average_fraction: 0.9
  1064. eps: 2e-05
  1065. }
  1066. }
  1067. layer {
  1068. bottom: "stage3_unit3_bn1"
  1069. top: "stage3_unit3_bn1"
  1070. name: "stage3_unit3_bn1_scale"
  1071. type: "Scale"
  1072. scale_param { bias_term: true }
  1073. }
  1074.  
  1075. layer {
  1076. bottom: "stage3_unit3_bn1"
  1077. top: "stage3_unit3_conv1"
  1078. name: "stage3_unit3_conv1"
  1079. type: "Convolution"
  1080. convolution_param {
  1081. num_output: 256
  1082. kernel_size: 3
  1083. pad: 1
  1084. stride: 1
  1085. bias_term: true
  1086. }
  1087. }
  1088.  
  1089. layer {
  1090. bottom: "stage3_unit3_conv1"
  1091. top: "stage3_unit3_bn2"
  1092. name: "stage3_unit3_bn2"
  1093. type: "BatchNorm"
  1094. batch_norm_param {
  1095. use_global_stats: true
  1096. moving_average_fraction: 0.9
  1097. eps: 2e-05
  1098. }
  1099. }
  1100. layer {
  1101. bottom: "stage3_unit3_bn2"
  1102. top: "stage3_unit3_bn2"
  1103. name: "stage3_unit3_bn2_scale"
  1104. type: "Scale"
  1105. scale_param { bias_term: true }
  1106. }
  1107.  
  1108. layer {
  1109. bottom: "stage3_unit3_bn2"
  1110. top: "stage3_unit3_relu1"
  1111. name: "stage3_unit3_relu1"
  1112. type: "PReLU"
  1113. }
  1114.  
  1115. layer {
  1116. bottom: "stage3_unit3_relu1"
  1117. top: "stage3_unit3_conv2"
  1118. name: "stage3_unit3_conv2"
  1119. type: "Convolution"
  1120. convolution_param {
  1121. num_output: 256
  1122. kernel_size: 3
  1123. pad: 1
  1124. stride: 1
  1125. bias_term: true
  1126. }
  1127. }
  1128.  
  1129. layer {
  1130. bottom: "stage3_unit3_conv2"
  1131. top: "stage3_unit3_bn3"
  1132. name: "stage3_unit3_bn3"
  1133. type: "BatchNorm"
  1134. batch_norm_param {
  1135. use_global_stats: true
  1136. moving_average_fraction: 0.9
  1137. eps: 2e-05
  1138. }
  1139. }
  1140. layer {
  1141. bottom: "stage3_unit3_bn3"
  1142. top: "stage3_unit3_bn3"
  1143. name: "stage3_unit3_bn3_scale"
  1144. type: "Scale"
  1145. scale_param { bias_term: true }
  1146. }
  1147.  
  1148. layer {
  1149. name: "_plus9"
  1150. type: "Eltwise"
  1151. bottom: "stage3_unit3_bn3"
  1152. bottom: "_plus8"
  1153. top: "_plus9"
  1154. eltwise_param { operation: SUM }
  1155. }
  1156.  
  1157. layer {
  1158. bottom: "_plus9"
  1159. top: "stage3_unit4_bn1"
  1160. name: "stage3_unit4_bn1"
  1161. type: "BatchNorm"
  1162. batch_norm_param {
  1163. use_global_stats: true
  1164. moving_average_fraction: 0.9
  1165. eps: 2e-05
  1166. }
  1167. }
  1168. layer {
  1169. bottom: "stage3_unit4_bn1"
  1170. top: "stage3_unit4_bn1"
  1171. name: "stage3_unit4_bn1_scale"
  1172. type: "Scale"
  1173. scale_param { bias_term: true }
  1174. }
  1175.  
  1176. layer {
  1177. bottom: "stage3_unit4_bn1"
  1178. top: "stage3_unit4_conv1"
  1179. name: "stage3_unit4_conv1"
  1180. type: "Convolution"
  1181. convolution_param {
  1182. num_output: 256
  1183. kernel_size: 3
  1184. pad: 1
  1185. stride: 1
  1186. bias_term: true
  1187. }
  1188. }
  1189.  
  1190. layer {
  1191. bottom: "stage3_unit4_conv1"
  1192. top: "stage3_unit4_bn2"
  1193. name: "stage3_unit4_bn2"
  1194. type: "BatchNorm"
  1195. batch_norm_param {
  1196. use_global_stats: true
  1197. moving_average_fraction: 0.9
  1198. eps: 2e-05
  1199. }
  1200. }
  1201. layer {
  1202. bottom: "stage3_unit4_bn2"
  1203. top: "stage3_unit4_bn2"
  1204. name: "stage3_unit4_bn2_scale"
  1205. type: "Scale"
  1206. scale_param { bias_term: true }
  1207. }
  1208.  
  1209. layer {
  1210. bottom: "stage3_unit4_bn2"
  1211. top: "stage3_unit4_relu1"
  1212. name: "stage3_unit4_relu1"
  1213. type: "PReLU"
  1214. }
  1215.  
  1216. layer {
  1217. bottom: "stage3_unit4_relu1"
  1218. top: "stage3_unit4_conv2"
  1219. name: "stage3_unit4_conv2"
  1220. type: "Convolution"
  1221. convolution_param {
  1222. num_output: 256
  1223. kernel_size: 3
  1224. pad: 1
  1225. stride: 1
  1226. bias_term: true
  1227. }
  1228. }
  1229.  
  1230. layer {
  1231. bottom: "stage3_unit4_conv2"
  1232. top: "stage3_unit4_bn3"
  1233. name: "stage3_unit4_bn3"
  1234. type: "BatchNorm"
  1235. batch_norm_param {
  1236. use_global_stats: true
  1237. moving_average_fraction: 0.9
  1238. eps: 2e-05
  1239. }
  1240. }
  1241. layer {
  1242. bottom: "stage3_unit4_bn3"
  1243. top: "stage3_unit4_bn3"
  1244. name: "stage3_unit4_bn3_scale"
  1245. type: "Scale"
  1246. scale_param { bias_term: true }
  1247. }
  1248.  
  1249. layer {
  1250. name: "_plus10"
  1251. type: "Eltwise"
  1252. bottom: "stage3_unit4_bn3"
  1253. bottom: "_plus9"
  1254. top: "_plus10"
  1255. eltwise_param { operation: SUM }
  1256. }
  1257.  
  1258. layer {
  1259. bottom: "_plus10"
  1260. top: "stage3_unit5_bn1"
  1261. name: "stage3_unit5_bn1"
  1262. type: "BatchNorm"
  1263. batch_norm_param {
  1264. use_global_stats: true
  1265. moving_average_fraction: 0.9
  1266. eps: 2e-05
  1267. }
  1268. }
  1269. layer {
  1270. bottom: "stage3_unit5_bn1"
  1271. top: "stage3_unit5_bn1"
  1272. name: "stage3_unit5_bn1_scale"
  1273. type: "Scale"
  1274. scale_param { bias_term: true }
  1275. }
  1276.  
  1277. layer {
  1278. bottom: "stage3_unit5_bn1"
  1279. top: "stage3_unit5_conv1"
  1280. name: "stage3_unit5_conv1"
  1281. type: "Convolution"
  1282. convolution_param {
  1283. num_output: 256
  1284. kernel_size: 3
  1285. pad: 1
  1286. stride: 1
  1287. bias_term: true
  1288. }
  1289. }
  1290.  
  1291. layer {
  1292. bottom: "stage3_unit5_conv1"
  1293. top: "stage3_unit5_bn2"
  1294. name: "stage3_unit5_bn2"
  1295. type: "BatchNorm"
  1296. batch_norm_param {
  1297. use_global_stats: true
  1298. moving_average_fraction: 0.9
  1299. eps: 2e-05
  1300. }
  1301. }
  1302. layer {
  1303. bottom: "stage3_unit5_bn2"
  1304. top: "stage3_unit5_bn2"
  1305. name: "stage3_unit5_bn2_scale"
  1306. type: "Scale"
  1307. scale_param { bias_term: true }
  1308. }
  1309.  
  1310. layer {
  1311. bottom: "stage3_unit5_bn2"
  1312. top: "stage3_unit5_relu1"
  1313. name: "stage3_unit5_relu1"
  1314. type: "PReLU"
  1315. }
  1316.  
  1317. layer {
  1318. bottom: "stage3_unit5_relu1"
  1319. top: "stage3_unit5_conv2"
  1320. name: "stage3_unit5_conv2"
  1321. type: "Convolution"
  1322. convolution_param {
  1323. num_output: 256
  1324. kernel_size: 3
  1325. pad: 1
  1326. stride: 1
  1327. bias_term: true
  1328. }
  1329. }
  1330.  
  1331. layer {
  1332. bottom: "stage3_unit5_conv2"
  1333. top: "stage3_unit5_bn3"
  1334. name: "stage3_unit5_bn3"
  1335. type: "BatchNorm"
  1336. batch_norm_param {
  1337. use_global_stats: true
  1338. moving_average_fraction: 0.9
  1339. eps: 2e-05
  1340. }
  1341. }
  1342. layer {
  1343. bottom: "stage3_unit5_bn3"
  1344. top: "stage3_unit5_bn3"
  1345. name: "stage3_unit5_bn3_scale"
  1346. type: "Scale"
  1347. scale_param { bias_term: true }
  1348. }
  1349.  
  1350. layer {
  1351. name: "_plus11"
  1352. type: "Eltwise"
  1353. bottom: "stage3_unit5_bn3"
  1354. bottom: "_plus10"
  1355. top: "_plus11"
  1356. eltwise_param { operation: SUM }
  1357. }
  1358.  
  1359. layer {
  1360. bottom: "_plus11"
  1361. top: "stage3_unit6_bn1"
  1362. name: "stage3_unit6_bn1"
  1363. type: "BatchNorm"
  1364. batch_norm_param {
  1365. use_global_stats: true
  1366. moving_average_fraction: 0.9
  1367. eps: 2e-05
  1368. }
  1369. }
  1370. layer {
  1371. bottom: "stage3_unit6_bn1"
  1372. top: "stage3_unit6_bn1"
  1373. name: "stage3_unit6_bn1_scale"
  1374. type: "Scale"
  1375. scale_param { bias_term: true }
  1376. }
  1377.  
  1378. layer {
  1379. bottom: "stage3_unit6_bn1"
  1380. top: "stage3_unit6_conv1"
  1381. name: "stage3_unit6_conv1"
  1382. type: "Convolution"
  1383. convolution_param {
  1384. num_output: 256
  1385. kernel_size: 3
  1386. pad: 1
  1387. stride: 1
  1388. bias_term: true
  1389. }
  1390. }
  1391.  
  1392. layer {
  1393. bottom: "stage3_unit6_conv1"
  1394. top: "stage3_unit6_bn2"
  1395. name: "stage3_unit6_bn2"
  1396. type: "BatchNorm"
  1397. batch_norm_param {
  1398. use_global_stats: true
  1399. moving_average_fraction: 0.9
  1400. eps: 2e-05
  1401. }
  1402. }
  1403. layer {
  1404. bottom: "stage3_unit6_bn2"
  1405. top: "stage3_unit6_bn2"
  1406. name: "stage3_unit6_bn2_scale"
  1407. type: "Scale"
  1408. scale_param { bias_term: true }
  1409. }
  1410.  
  1411. layer {
  1412. bottom: "stage3_unit6_bn2"
  1413. top: "stage3_unit6_relu1"
  1414. name: "stage3_unit6_relu1"
  1415. type: "PReLU"
  1416. }
  1417.  
  1418. layer {
  1419. bottom: "stage3_unit6_relu1"
  1420. top: "stage3_unit6_conv2"
  1421. name: "stage3_unit6_conv2"
  1422. type: "Convolution"
  1423. convolution_param {
  1424. num_output: 256
  1425. kernel_size: 3
  1426. pad: 1
  1427. stride: 1
  1428. bias_term: true
  1429. }
  1430. }
  1431.  
  1432. layer {
  1433. bottom: "stage3_unit6_conv2"
  1434. top: "stage3_unit6_bn3"
  1435. name: "stage3_unit6_bn3"
  1436. type: "BatchNorm"
  1437. batch_norm_param {
  1438. use_global_stats: true
  1439. moving_average_fraction: 0.9
  1440. eps: 2e-05
  1441. }
  1442. }
  1443. layer {
  1444. bottom: "stage3_unit6_bn3"
  1445. top: "stage3_unit6_bn3"
  1446. name: "stage3_unit6_bn3_scale"
  1447. type: "Scale"
  1448. scale_param { bias_term: true }
  1449. }
  1450.  
  1451. layer {
  1452. name: "_plus12"
  1453. type: "Eltwise"
  1454. bottom: "stage3_unit6_bn3"
  1455. bottom: "_plus11"
  1456. top: "_plus12"
  1457. eltwise_param { operation: SUM }
  1458. }
  1459.  
  1460. layer {
  1461. bottom: "_plus12"
  1462. top: "stage3_unit7_bn1"
  1463. name: "stage3_unit7_bn1"
  1464. type: "BatchNorm"
  1465. batch_norm_param {
  1466. use_global_stats: true
  1467. moving_average_fraction: 0.9
  1468. eps: 2e-05
  1469. }
  1470. }
  1471. layer {
  1472. bottom: "stage3_unit7_bn1"
  1473. top: "stage3_unit7_bn1"
  1474. name: "stage3_unit7_bn1_scale"
  1475. type: "Scale"
  1476. scale_param { bias_term: true }
  1477. }
  1478.  
  1479. layer {
  1480. bottom: "stage3_unit7_bn1"
  1481. top: "stage3_unit7_conv1"
  1482. name: "stage3_unit7_conv1"
  1483. type: "Convolution"
  1484. convolution_param {
  1485. num_output: 256
  1486. kernel_size: 3
  1487. pad: 1
  1488. stride: 1
  1489. bias_term: true
  1490. }
  1491. }
  1492.  
  1493. layer {
  1494. bottom: "stage3_unit7_conv1"
  1495. top: "stage3_unit7_bn2"
  1496. name: "stage3_unit7_bn2"
  1497. type: "BatchNorm"
  1498. batch_norm_param {
  1499. use_global_stats: true
  1500. moving_average_fraction: 0.9
  1501. eps: 2e-05
  1502. }
  1503. }
  1504. layer {
  1505. bottom: "stage3_unit7_bn2"
  1506. top: "stage3_unit7_bn2"
  1507. name: "stage3_unit7_bn2_scale"
  1508. type: "Scale"
  1509. scale_param { bias_term: true }
  1510. }
  1511.  
  1512. layer {
  1513. bottom: "stage3_unit7_bn2"
  1514. top: "stage3_unit7_relu1"
  1515. name: "stage3_unit7_relu1"
  1516. type: "PReLU"
  1517. }
  1518.  
  1519. layer {
  1520. bottom: "stage3_unit7_relu1"
  1521. top: "stage3_unit7_conv2"
  1522. name: "stage3_unit7_conv2"
  1523. type: "Convolution"
  1524. convolution_param {
  1525. num_output: 256
  1526. kernel_size: 3
  1527. pad: 1
  1528. stride: 1
  1529. bias_term: true
  1530. }
  1531. }
  1532.  
  1533. layer {
  1534. bottom: "stage3_unit7_conv2"
  1535. top: "stage3_unit7_bn3"
  1536. name: "stage3_unit7_bn3"
  1537. type: "BatchNorm"
  1538. batch_norm_param {
  1539. use_global_stats: true
  1540. moving_average_fraction: 0.9
  1541. eps: 2e-05
  1542. }
  1543. }
  1544. layer {
  1545. bottom: "stage3_unit7_bn3"
  1546. top: "stage3_unit7_bn3"
  1547. name: "stage3_unit7_bn3_scale"
  1548. type: "Scale"
  1549. scale_param { bias_term: true }
  1550. }
  1551.  
  1552. layer {
  1553. name: "_plus13"
  1554. type: "Eltwise"
  1555. bottom: "stage3_unit7_bn3"
  1556. bottom: "_plus12"
  1557. top: "_plus13"
  1558. eltwise_param { operation: SUM }
  1559. }
  1560.  
  1561. layer {
  1562. bottom: "_plus13"
  1563. top: "stage3_unit8_bn1"
  1564. name: "stage3_unit8_bn1"
  1565. type: "BatchNorm"
  1566. batch_norm_param {
  1567. use_global_stats: true
  1568. moving_average_fraction: 0.9
  1569. eps: 2e-05
  1570. }
  1571. }
  1572. layer {
  1573. bottom: "stage3_unit8_bn1"
  1574. top: "stage3_unit8_bn1"
  1575. name: "stage3_unit8_bn1_scale"
  1576. type: "Scale"
  1577. scale_param { bias_term: true }
  1578. }
  1579.  
  1580. layer {
  1581. bottom: "stage3_unit8_bn1"
  1582. top: "stage3_unit8_conv1"
  1583. name: "stage3_unit8_conv1"
  1584. type: "Convolution"
  1585. convolution_param {
  1586. num_output: 256
  1587. kernel_size: 3
  1588. pad: 1
  1589. stride: 1
  1590. bias_term: true
  1591. }
  1592. }
  1593.  
  1594. layer {
  1595. bottom: "stage3_unit8_conv1"
  1596. top: "stage3_unit8_bn2"
  1597. name: "stage3_unit8_bn2"
  1598. type: "BatchNorm"
  1599. batch_norm_param {
  1600. use_global_stats: true
  1601. moving_average_fraction: 0.9
  1602. eps: 2e-05
  1603. }
  1604. }
  1605. layer {
  1606. bottom: "stage3_unit8_bn2"
  1607. top: "stage3_unit8_bn2"
  1608. name: "stage3_unit8_bn2_scale"
  1609. type: "Scale"
  1610. scale_param { bias_term: true }
  1611. }
  1612.  
  1613. layer {
  1614. bottom: "stage3_unit8_bn2"
  1615. top: "stage3_unit8_relu1"
  1616. name: "stage3_unit8_relu1"
  1617. type: "PReLU"
  1618. }
  1619.  
  1620. layer {
  1621. bottom: "stage3_unit8_relu1"
  1622. top: "stage3_unit8_conv2"
  1623. name: "stage3_unit8_conv2"
  1624. type: "Convolution"
  1625. convolution_param {
  1626. num_output: 256
  1627. kernel_size: 3
  1628. pad: 1
  1629. stride: 1
  1630. bias_term: true
  1631. }
  1632. }
  1633.  
  1634. layer {
  1635. bottom: "stage3_unit8_conv2"
  1636. top: "stage3_unit8_bn3"
  1637. name: "stage3_unit8_bn3"
  1638. type: "BatchNorm"
  1639. batch_norm_param {
  1640. use_global_stats: true
  1641. moving_average_fraction: 0.9
  1642. eps: 2e-05
  1643. }
  1644. }
  1645. layer {
  1646. bottom: "stage3_unit8_bn3"
  1647. top: "stage3_unit8_bn3"
  1648. name: "stage3_unit8_bn3_scale"
  1649. type: "Scale"
  1650. scale_param { bias_term: true }
  1651. }
  1652.  
  1653. layer {
  1654. name: "_plus14"
  1655. type: "Eltwise"
  1656. bottom: "stage3_unit8_bn3"
  1657. bottom: "_plus13"
  1658. top: "_plus14"
  1659. eltwise_param { operation: SUM }
  1660. }
  1661.  
  1662. layer {
  1663. bottom: "_plus14"
  1664. top: "stage3_unit9_bn1"
  1665. name: "stage3_unit9_bn1"
  1666. type: "BatchNorm"
  1667. batch_norm_param {
  1668. use_global_stats: true
  1669. moving_average_fraction: 0.9
  1670. eps: 2e-05
  1671. }
  1672. }
  1673. layer {
  1674. bottom: "stage3_unit9_bn1"
  1675. top: "stage3_unit9_bn1"
  1676. name: "stage3_unit9_bn1_scale"
  1677. type: "Scale"
  1678. scale_param { bias_term: true }
  1679. }
  1680.  
  1681. layer {
  1682. bottom: "stage3_unit9_bn1"
  1683. top: "stage3_unit9_conv1"
  1684. name: "stage3_unit9_conv1"
  1685. type: "Convolution"
  1686. convolution_param {
  1687. num_output: 256
  1688. kernel_size: 3
  1689. pad: 1
  1690. stride: 1
  1691. bias_term: true
  1692. }
  1693. }
  1694.  
  1695. layer {
  1696. bottom: "stage3_unit9_conv1"
  1697. top: "stage3_unit9_bn2"
  1698. name: "stage3_unit9_bn2"
  1699. type: "BatchNorm"
  1700. batch_norm_param {
  1701. use_global_stats: true
  1702. moving_average_fraction: 0.9
  1703. eps: 2e-05
  1704. }
  1705. }
  1706. layer {
  1707. bottom: "stage3_unit9_bn2"
  1708. top: "stage3_unit9_bn2"
  1709. name: "stage3_unit9_bn2_scale"
  1710. type: "Scale"
  1711. scale_param { bias_term: true }
  1712. }
  1713.  
  1714. layer {
  1715. bottom: "stage3_unit9_bn2"
  1716. top: "stage3_unit9_relu1"
  1717. name: "stage3_unit9_relu1"
  1718. type: "PReLU"
  1719. }
  1720.  
  1721. layer {
  1722. bottom: "stage3_unit9_relu1"
  1723. top: "stage3_unit9_conv2"
  1724. name: "stage3_unit9_conv2"
  1725. type: "Convolution"
  1726. convolution_param {
  1727. num_output: 256
  1728. kernel_size: 3
  1729. pad: 1
  1730. stride: 1
  1731. bias_term: true
  1732. }
  1733. }
  1734.  
  1735. layer {
  1736. bottom: "stage3_unit9_conv2"
  1737. top: "stage3_unit9_bn3"
  1738. name: "stage3_unit9_bn3"
  1739. type: "BatchNorm"
  1740. batch_norm_param {
  1741. use_global_stats: true
  1742. moving_average_fraction: 0.9
  1743. eps: 2e-05
  1744. }
  1745. }
  1746. layer {
  1747. bottom: "stage3_unit9_bn3"
  1748. top: "stage3_unit9_bn3"
  1749. name: "stage3_unit9_bn3_scale"
  1750. type: "Scale"
  1751. scale_param { bias_term: true }
  1752. }
  1753.  
  1754. layer {
  1755. name: "_plus15"
  1756. type: "Eltwise"
  1757. bottom: "stage3_unit9_bn3"
  1758. bottom: "_plus14"
  1759. top: "_plus15"
  1760. eltwise_param { operation: SUM }
  1761. }
  1762.  
  1763. layer {
  1764. bottom: "_plus15"
  1765. top: "stage3_unit10_bn1"
  1766. name: "stage3_unit10_bn1"
  1767. type: "BatchNorm"
  1768. batch_norm_param {
  1769. use_global_stats: true
  1770. moving_average_fraction: 0.9
  1771. eps: 2e-05
  1772. }
  1773. }
  1774. layer {
  1775. bottom: "stage3_unit10_bn1"
  1776. top: "stage3_unit10_bn1"
  1777. name: "stage3_unit10_bn1_scale"
  1778. type: "Scale"
  1779. scale_param { bias_term: true }
  1780. }
  1781.  
  1782. layer {
  1783. bottom: "stage3_unit10_bn1"
  1784. top: "stage3_unit10_conv1"
  1785. name: "stage3_unit10_conv1"
  1786. type: "Convolution"
  1787. convolution_param {
  1788. num_output: 256
  1789. kernel_size: 3
  1790. pad: 1
  1791. stride: 1
  1792. bias_term: true
  1793. }
  1794. }
  1795.  
  1796. layer {
  1797. bottom: "stage3_unit10_conv1"
  1798. top: "stage3_unit10_bn2"
  1799. name: "stage3_unit10_bn2"
  1800. type: "BatchNorm"
  1801. batch_norm_param {
  1802. use_global_stats: true
  1803. moving_average_fraction: 0.9
  1804. eps: 2e-05
  1805. }
  1806. }
  1807. layer {
  1808. bottom: "stage3_unit10_bn2"
  1809. top: "stage3_unit10_bn2"
  1810. name: "stage3_unit10_bn2_scale"
  1811. type: "Scale"
  1812. scale_param { bias_term: true }
  1813. }
  1814.  
  1815. layer {
  1816. bottom: "stage3_unit10_bn2"
  1817. top: "stage3_unit10_relu1"
  1818. name: "stage3_unit10_relu1"
  1819. type: "PReLU"
  1820. }
  1821.  
  1822. layer {
  1823. bottom: "stage3_unit10_relu1"
  1824. top: "stage3_unit10_conv2"
  1825. name: "stage3_unit10_conv2"
  1826. type: "Convolution"
  1827. convolution_param {
  1828. num_output: 256
  1829. kernel_size: 3
  1830. pad: 1
  1831. stride: 1
  1832. bias_term: true
  1833. }
  1834. }
  1835.  
  1836. layer {
  1837. bottom: "stage3_unit10_conv2"
  1838. top: "stage3_unit10_bn3"
  1839. name: "stage3_unit10_bn3"
  1840. type: "BatchNorm"
  1841. batch_norm_param {
  1842. use_global_stats: true
  1843. moving_average_fraction: 0.9
  1844. eps: 2e-05
  1845. }
  1846. }
  1847. layer {
  1848. bottom: "stage3_unit10_bn3"
  1849. top: "stage3_unit10_bn3"
  1850. name: "stage3_unit10_bn3_scale"
  1851. type: "Scale"
  1852. scale_param { bias_term: true }
  1853. }
  1854.  
  1855. layer {
  1856. name: "_plus16"
  1857. type: "Eltwise"
  1858. bottom: "stage3_unit10_bn3"
  1859. bottom: "_plus15"
  1860. top: "_plus16"
  1861. eltwise_param { operation: SUM }
  1862. }
  1863.  
  1864. layer {
  1865. bottom: "_plus16"
  1866. top: "stage3_unit11_bn1"
  1867. name: "stage3_unit11_bn1"
  1868. type: "BatchNorm"
  1869. batch_norm_param {
  1870. use_global_stats: true
  1871. moving_average_fraction: 0.9
  1872. eps: 2e-05
  1873. }
  1874. }
  1875. layer {
  1876. bottom: "stage3_unit11_bn1"
  1877. top: "stage3_unit11_bn1"
  1878. name: "stage3_unit11_bn1_scale"
  1879. type: "Scale"
  1880. scale_param { bias_term: true }
  1881. }
  1882.  
  1883. layer {
  1884. bottom: "stage3_unit11_bn1"
  1885. top: "stage3_unit11_conv1"
  1886. name: "stage3_unit11_conv1"
  1887. type: "Convolution"
  1888. convolution_param {
  1889. num_output: 256
  1890. kernel_size: 3
  1891. pad: 1
  1892. stride: 1
  1893. bias_term: true
  1894. }
  1895. }
  1896.  
  1897. layer {
  1898. bottom: "stage3_unit11_conv1"
  1899. top: "stage3_unit11_bn2"
  1900. name: "stage3_unit11_bn2"
  1901. type: "BatchNorm"
  1902. batch_norm_param {
  1903. use_global_stats: true
  1904. moving_average_fraction: 0.9
  1905. eps: 2e-05
  1906. }
  1907. }
  1908. layer {
  1909. bottom: "stage3_unit11_bn2"
  1910. top: "stage3_unit11_bn2"
  1911. name: "stage3_unit11_bn2_scale"
  1912. type: "Scale"
  1913. scale_param { bias_term: true }
  1914. }
  1915.  
  1916. layer {
  1917. bottom: "stage3_unit11_bn2"
  1918. top: "stage3_unit11_relu1"
  1919. name: "stage3_unit11_relu1"
  1920. type: "PReLU"
  1921. }
  1922.  
  1923. layer {
  1924. bottom: "stage3_unit11_relu1"
  1925. top: "stage3_unit11_conv2"
  1926. name: "stage3_unit11_conv2"
  1927. type: "Convolution"
  1928. convolution_param {
  1929. num_output: 256
  1930. kernel_size: 3
  1931. pad: 1
  1932. stride: 1
  1933. bias_term: true
  1934. }
  1935. }
  1936.  
  1937. layer {
  1938. bottom: "stage3_unit11_conv2"
  1939. top: "stage3_unit11_bn3"
  1940. name: "stage3_unit11_bn3"
  1941. type: "BatchNorm"
  1942. batch_norm_param {
  1943. use_global_stats: true
  1944. moving_average_fraction: 0.9
  1945. eps: 2e-05
  1946. }
  1947. }
  1948. layer {
  1949. bottom: "stage3_unit11_bn3"
  1950. top: "stage3_unit11_bn3"
  1951. name: "stage3_unit11_bn3_scale"
  1952. type: "Scale"
  1953. scale_param { bias_term: true }
  1954. }
  1955.  
  1956. layer {
  1957. name: "_plus17"
  1958. type: "Eltwise"
  1959. bottom: "stage3_unit11_bn3"
  1960. bottom: "_plus16"
  1961. top: "_plus17"
  1962. eltwise_param { operation: SUM }
  1963. }
  1964.  
  1965. layer {
  1966. bottom: "_plus17"
  1967. top: "stage3_unit12_bn1"
  1968. name: "stage3_unit12_bn1"
  1969. type: "BatchNorm"
  1970. batch_norm_param {
  1971. use_global_stats: true
  1972. moving_average_fraction: 0.9
  1973. eps: 2e-05
  1974. }
  1975. }
  1976. layer {
  1977. bottom: "stage3_unit12_bn1"
  1978. top: "stage3_unit12_bn1"
  1979. name: "stage3_unit12_bn1_scale"
  1980. type: "Scale"
  1981. scale_param { bias_term: true }
  1982. }
  1983.  
  1984. layer {
  1985. bottom: "stage3_unit12_bn1"
  1986. top: "stage3_unit12_conv1"
  1987. name: "stage3_unit12_conv1"
  1988. type: "Convolution"
  1989. convolution_param {
  1990. num_output: 256
  1991. kernel_size: 3
  1992. pad: 1
  1993. stride: 1
  1994. bias_term: true
  1995. }
  1996. }
  1997.  
  1998. layer {
  1999. bottom: "stage3_unit12_conv1"
  2000. top: "stage3_unit12_bn2"
  2001. name: "stage3_unit12_bn2"
  2002. type: "BatchNorm"
  2003. batch_norm_param {
  2004. use_global_stats: true
  2005. moving_average_fraction: 0.9
  2006. eps: 2e-05
  2007. }
  2008. }
  2009. layer {
  2010. bottom: "stage3_unit12_bn2"
  2011. top: "stage3_unit12_bn2"
  2012. name: "stage3_unit12_bn2_scale"
  2013. type: "Scale"
  2014. scale_param { bias_term: true }
  2015. }
  2016.  
  2017. layer {
  2018. bottom: "stage3_unit12_bn2"
  2019. top: "stage3_unit12_relu1"
  2020. name: "stage3_unit12_relu1"
  2021. type: "PReLU"
  2022. }
  2023.  
  2024. layer {
  2025. bottom: "stage3_unit12_relu1"
  2026. top: "stage3_unit12_conv2"
  2027. name: "stage3_unit12_conv2"
  2028. type: "Convolution"
  2029. convolution_param {
  2030. num_output: 256
  2031. kernel_size: 3
  2032. pad: 1
  2033. stride: 1
  2034. bias_term: true
  2035. }
  2036. }
  2037.  
  2038. layer {
  2039. bottom: "stage3_unit12_conv2"
  2040. top: "stage3_unit12_bn3"
  2041. name: "stage3_unit12_bn3"
  2042. type: "BatchNorm"
  2043. batch_norm_param {
  2044. use_global_stats: true
  2045. moving_average_fraction: 0.9
  2046. eps: 2e-05
  2047. }
  2048. }
  2049. layer {
  2050. bottom: "stage3_unit12_bn3"
  2051. top: "stage3_unit12_bn3"
  2052. name: "stage3_unit12_bn3_scale"
  2053. type: "Scale"
  2054. scale_param { bias_term: true }
  2055. }
  2056.  
  2057. layer {
  2058. name: "_plus18"
  2059. type: "Eltwise"
  2060. bottom: "stage3_unit12_bn3"
  2061. bottom: "_plus17"
  2062. top: "_plus18"
  2063. eltwise_param { operation: SUM }
  2064. }
  2065.  
  2066. layer {
  2067. bottom: "_plus18"
  2068. top: "stage3_unit13_bn1"
  2069. name: "stage3_unit13_bn1"
  2070. type: "BatchNorm"
  2071. batch_norm_param {
  2072. use_global_stats: true
  2073. moving_average_fraction: 0.9
  2074. eps: 2e-05
  2075. }
  2076. }
  2077. layer {
  2078. bottom: "stage3_unit13_bn1"
  2079. top: "stage3_unit13_bn1"
  2080. name: "stage3_unit13_bn1_scale"
  2081. type: "Scale"
  2082. scale_param { bias_term: true }
  2083. }
  2084.  
  2085. layer {
  2086. bottom: "stage3_unit13_bn1"
  2087. top: "stage3_unit13_conv1"
  2088. name: "stage3_unit13_conv1"
  2089. type: "Convolution"
  2090. convolution_param {
  2091. num_output: 256
  2092. kernel_size: 3
  2093. pad: 1
  2094. stride: 1
  2095. bias_term: true
  2096. }
  2097. }
  2098.  
  2099. layer {
  2100. bottom: "stage3_unit13_conv1"
  2101. top: "stage3_unit13_bn2"
  2102. name: "stage3_unit13_bn2"
  2103. type: "BatchNorm"
  2104. batch_norm_param {
  2105. use_global_stats: true
  2106. moving_average_fraction: 0.9
  2107. eps: 2e-05
  2108. }
  2109. }
  2110. layer {
  2111. bottom: "stage3_unit13_bn2"
  2112. top: "stage3_unit13_bn2"
  2113. name: "stage3_unit13_bn2_scale"
  2114. type: "Scale"
  2115. scale_param { bias_term: true }
  2116. }
  2117.  
  2118. layer {
  2119. bottom: "stage3_unit13_bn2"
  2120. top: "stage3_unit13_relu1"
  2121. name: "stage3_unit13_relu1"
  2122. type: "PReLU"
  2123. }
  2124.  
  2125. layer {
  2126. bottom: "stage3_unit13_relu1"
  2127. top: "stage3_unit13_conv2"
  2128. name: "stage3_unit13_conv2"
  2129. type: "Convolution"
  2130. convolution_param {
  2131. num_output: 256
  2132. kernel_size: 3
  2133. pad: 1
  2134. stride: 1
  2135. bias_term: true
  2136. }
  2137. }
  2138.  
  2139. layer {
  2140. bottom: "stage3_unit13_conv2"
  2141. top: "stage3_unit13_bn3"
  2142. name: "stage3_unit13_bn3"
  2143. type: "BatchNorm"
  2144. batch_norm_param {
  2145. use_global_stats: true
  2146. moving_average_fraction: 0.9
  2147. eps: 2e-05
  2148. }
  2149. }
  2150. layer {
  2151. bottom: "stage3_unit13_bn3"
  2152. top: "stage3_unit13_bn3"
  2153. name: "stage3_unit13_bn3_scale"
  2154. type: "Scale"
  2155. scale_param { bias_term: true }
  2156. }
  2157.  
  2158. layer {
  2159. name: "_plus19"
  2160. type: "Eltwise"
  2161. bottom: "stage3_unit13_bn3"
  2162. bottom: "_plus18"
  2163. top: "_plus19"
  2164. eltwise_param { operation: SUM }
  2165. }
  2166.  
  2167. layer {
  2168. bottom: "_plus19"
  2169. top: "stage3_unit14_bn1"
  2170. name: "stage3_unit14_bn1"
  2171. type: "BatchNorm"
  2172. batch_norm_param {
  2173. use_global_stats: true
  2174. moving_average_fraction: 0.9
  2175. eps: 2e-05
  2176. }
  2177. }
  2178. layer {
  2179. bottom: "stage3_unit14_bn1"
  2180. top: "stage3_unit14_bn1"
  2181. name: "stage3_unit14_bn1_scale"
  2182. type: "Scale"
  2183. scale_param { bias_term: true }
  2184. }
  2185.  
  2186. layer {
  2187. bottom: "stage3_unit14_bn1"
  2188. top: "stage3_unit14_conv1"
  2189. name: "stage3_unit14_conv1"
  2190. type: "Convolution"
  2191. convolution_param {
  2192. num_output: 256
  2193. kernel_size: 3
  2194. pad: 1
  2195. stride: 1
  2196. bias_term: true
  2197. }
  2198. }
  2199.  
  2200. layer {
  2201. bottom: "stage3_unit14_conv1"
  2202. top: "stage3_unit14_bn2"
  2203. name: "stage3_unit14_bn2"
  2204. type: "BatchNorm"
  2205. batch_norm_param {
  2206. use_global_stats: true
  2207. moving_average_fraction: 0.9
  2208. eps: 2e-05
  2209. }
  2210. }
  2211. layer {
  2212. bottom: "stage3_unit14_bn2"
  2213. top: "stage3_unit14_bn2"
  2214. name: "stage3_unit14_bn2_scale"
  2215. type: "Scale"
  2216. scale_param { bias_term: true }
  2217. }
  2218.  
  2219. layer {
  2220. bottom: "stage3_unit14_bn2"
  2221. top: "stage3_unit14_relu1"
  2222. name: "stage3_unit14_relu1"
  2223. type: "PReLU"
  2224. }
  2225.  
  2226. layer {
  2227. bottom: "stage3_unit14_relu1"
  2228. top: "stage3_unit14_conv2"
  2229. name: "stage3_unit14_conv2"
  2230. type: "Convolution"
  2231. convolution_param {
  2232. num_output: 256
  2233. kernel_size: 3
  2234. pad: 1
  2235. stride: 1
  2236. bias_term: true
  2237. }
  2238. }
  2239.  
  2240. layer {
  2241. bottom: "stage3_unit14_conv2"
  2242. top: "stage3_unit14_bn3"
  2243. name: "stage3_unit14_bn3"
  2244. type: "BatchNorm"
  2245. batch_norm_param {
  2246. use_global_stats: true
  2247. moving_average_fraction: 0.9
  2248. eps: 2e-05
  2249. }
  2250. }
  2251. layer {
  2252. bottom: "stage3_unit14_bn3"
  2253. top: "stage3_unit14_bn3"
  2254. name: "stage3_unit14_bn3_scale"
  2255. type: "Scale"
  2256. scale_param { bias_term: true }
  2257. }
  2258.  
  2259. layer {
  2260. name: "_plus20"
  2261. type: "Eltwise"
  2262. bottom: "stage3_unit14_bn3"
  2263. bottom: "_plus19"
  2264. top: "_plus20"
  2265. eltwise_param { operation: SUM }
  2266. }
  2267.  
  2268. layer {
  2269. bottom: "_plus20"
  2270. top: "stage4_unit1_bn1"
  2271. name: "stage4_unit1_bn1"
  2272. type: "BatchNorm"
  2273. batch_norm_param {
  2274. use_global_stats: true
  2275. moving_average_fraction: 0.9
  2276. eps: 2e-05
  2277. }
  2278. }
  2279. layer {
  2280. bottom: "stage4_unit1_bn1"
  2281. top: "stage4_unit1_bn1"
  2282. name: "stage4_unit1_bn1_scale"
  2283. type: "Scale"
  2284. scale_param { bias_term: true }
  2285. }
  2286.  
  2287. layer {
  2288. bottom: "stage4_unit1_bn1"
  2289. top: "stage4_unit1_conv1"
  2290. name: "stage4_unit1_conv1"
  2291. type: "Convolution"
  2292. convolution_param {
  2293. num_output: 512
  2294. kernel_size: 3
  2295. pad: 1
  2296. stride: 1
  2297. bias_term: true
  2298. }
  2299. }
  2300.  
  2301. layer {
  2302. bottom: "stage4_unit1_conv1"
  2303. top: "stage4_unit1_bn2"
  2304. name: "stage4_unit1_bn2"
  2305. type: "BatchNorm"
  2306. batch_norm_param {
  2307. use_global_stats: true
  2308. moving_average_fraction: 0.9
  2309. eps: 2e-05
  2310. }
  2311. }
  2312. layer {
  2313. bottom: "stage4_unit1_bn2"
  2314. top: "stage4_unit1_bn2"
  2315. name: "stage4_unit1_bn2_scale"
  2316. type: "Scale"
  2317. scale_param { bias_term: true }
  2318. }
  2319.  
  2320. layer {
  2321. bottom: "stage4_unit1_bn2"
  2322. top: "stage4_unit1_relu1"
  2323. name: "stage4_unit1_relu1"
  2324. type: "PReLU"
  2325. }
  2326.  
  2327. layer {
  2328. bottom: "stage4_unit1_relu1"
  2329. top: "stage4_unit1_conv2"
  2330. name: "stage4_unit1_conv2"
  2331. type: "Convolution"
  2332. convolution_param {
  2333. num_output: 512
  2334. kernel_size: 3
  2335. pad: 1
  2336. stride: 2
  2337. bias_term: true
  2338. }
  2339. }
  2340.  
  2341. layer {
  2342. bottom: "stage4_unit1_conv2"
  2343. top: "stage4_unit1_bn3"
  2344. name: "stage4_unit1_bn3"
  2345. type: "BatchNorm"
  2346. batch_norm_param {
  2347. use_global_stats: true
  2348. moving_average_fraction: 0.9
  2349. eps: 2e-05
  2350. }
  2351. }
  2352. layer {
  2353. bottom: "stage4_unit1_bn3"
  2354. top: "stage4_unit1_bn3"
  2355. name: "stage4_unit1_bn3_scale"
  2356. type: "Scale"
  2357. scale_param { bias_term: true }
  2358. }
  2359.  
  2360. layer {
  2361. bottom: "_plus20"
  2362. top: "stage4_unit1_conv1sc"
  2363. name: "stage4_unit1_conv1sc"
  2364. type: "Convolution"
  2365. convolution_param {
  2366. num_output: 512
  2367. kernel_size: 1
  2368. stride: 2
  2369. bias_term: true
  2370. }
  2371. }
  2372.  
  2373. layer {
  2374. bottom: "stage4_unit1_conv1sc"
  2375. top: "stage4_unit1_sc"
  2376. name: "stage4_unit1_sc"
  2377. type: "BatchNorm"
  2378. batch_norm_param {
  2379. use_global_stats: true
  2380. moving_average_fraction: 0.9
  2381. eps: 2e-05
  2382. }
  2383. }
  2384. layer {
  2385. bottom: "stage4_unit1_sc"
  2386. top: "stage4_unit1_sc"
  2387. name: "stage4_unit1_sc_scale"
  2388. type: "Scale"
  2389. scale_param { bias_term: true }
  2390. }
  2391.  
  2392. layer {
  2393. name: "_plus21"
  2394. type: "Eltwise"
  2395. bottom: "stage4_unit1_bn3"
  2396. bottom: "stage4_unit1_sc"
  2397. top: "_plus21"
  2398. eltwise_param { operation: SUM }
  2399. }
  2400.  
  2401. layer {
  2402. bottom: "_plus21"
  2403. top: "stage4_unit2_bn1"
  2404. name: "stage4_unit2_bn1"
  2405. type: "BatchNorm"
  2406. batch_norm_param {
  2407. use_global_stats: true
  2408. moving_average_fraction: 0.9
  2409. eps: 2e-05
  2410. }
  2411. }
  2412. layer {
  2413. bottom: "stage4_unit2_bn1"
  2414. top: "stage4_unit2_bn1"
  2415. name: "stage4_unit2_bn1_scale"
  2416. type: "Scale"
  2417. scale_param { bias_term: true }
  2418. }
  2419.  
  2420. layer {
  2421. bottom: "stage4_unit2_bn1"
  2422. top: "stage4_unit2_conv1"
  2423. name: "stage4_unit2_conv1"
  2424. type: "Convolution"
  2425. convolution_param {
  2426. num_output: 512
  2427. kernel_size: 3
  2428. pad: 1
  2429. stride: 1
  2430. bias_term: true
  2431. }
  2432. }
  2433.  
  2434. layer {
  2435. bottom: "stage4_unit2_conv1"
  2436. top: "stage4_unit2_bn2"
  2437. name: "stage4_unit2_bn2"
  2438. type: "BatchNorm"
  2439. batch_norm_param {
  2440. use_global_stats: true
  2441. moving_average_fraction: 0.9
  2442. eps: 2e-05
  2443. }
  2444. }
  2445. layer {
  2446. bottom: "stage4_unit2_bn2"
  2447. top: "stage4_unit2_bn2"
  2448. name: "stage4_unit2_bn2_scale"
  2449. type: "Scale"
  2450. scale_param { bias_term: true }
  2451. }
  2452.  
  2453. layer {
  2454. bottom: "stage4_unit2_bn2"
  2455. top: "stage4_unit2_relu1"
  2456. name: "stage4_unit2_relu1"
  2457. type: "PReLU"
  2458. }
  2459.  
  2460. layer {
  2461. bottom: "stage4_unit2_relu1"
  2462. top: "stage4_unit2_conv2"
  2463. name: "stage4_unit2_conv2"
  2464. type: "Convolution"
  2465. convolution_param {
  2466. num_output: 512
  2467. kernel_size: 3
  2468. pad: 1
  2469. stride: 1
  2470. bias_term: true
  2471. }
  2472. }
  2473.  
  2474. layer {
  2475. bottom: "stage4_unit2_conv2"
  2476. top: "stage4_unit2_bn3"
  2477. name: "stage4_unit2_bn3"
  2478. type: "BatchNorm"
  2479. batch_norm_param {
  2480. use_global_stats: true
  2481. moving_average_fraction: 0.9
  2482. eps: 2e-05
  2483. }
  2484. }
  2485. layer {
  2486. bottom: "stage4_unit2_bn3"
  2487. top: "stage4_unit2_bn3"
  2488. name: "stage4_unit2_bn3_scale"
  2489. type: "Scale"
  2490. scale_param { bias_term: true }
  2491. }
  2492.  
  2493. layer {
  2494. name: "_plus22"
  2495. type: "Eltwise"
  2496. bottom: "stage4_unit2_bn3"
  2497. bottom: "_plus21"
  2498. top: "_plus22"
  2499. eltwise_param { operation: SUM }
  2500. }
  2501.  
  2502. layer {
  2503. bottom: "_plus22"
  2504. top: "stage4_unit3_bn1"
  2505. name: "stage4_unit3_bn1"
  2506. type: "BatchNorm"
  2507. batch_norm_param {
  2508. use_global_stats: true
  2509. moving_average_fraction: 0.9
  2510. eps: 2e-05
  2511. }
  2512. }
  2513. layer {
  2514. bottom: "stage4_unit3_bn1"
  2515. top: "stage4_unit3_bn1"
  2516. name: "stage4_unit3_bn1_scale"
  2517. type: "Scale"
  2518. scale_param { bias_term: true }
  2519. }
  2520.  
  2521. layer {
  2522. bottom: "stage4_unit3_bn1"
  2523. top: "stage4_unit3_conv1"
  2524. name: "stage4_unit3_conv1"
  2525. type: "Convolution"
  2526. convolution_param {
  2527. num_output: 512
  2528. kernel_size: 3
  2529. pad: 1
  2530. stride: 1
  2531. bias_term: true
  2532. }
  2533. }
  2534.  
  2535. layer {
  2536. bottom: "stage4_unit3_conv1"
  2537. top: "stage4_unit3_bn2"
  2538. name: "stage4_unit3_bn2"
  2539. type: "BatchNorm"
  2540. batch_norm_param {
  2541. use_global_stats: true
  2542. moving_average_fraction: 0.9
  2543. eps: 2e-05
  2544. }
  2545. }
  2546. layer {
  2547. bottom: "stage4_unit3_bn2"
  2548. top: "stage4_unit3_bn2"
  2549. name: "stage4_unit3_bn2_scale"
  2550. type: "Scale"
  2551. scale_param { bias_term: true }
  2552. }
  2553.  
  2554. layer {
  2555. bottom: "stage4_unit3_bn2"
  2556. top: "stage4_unit3_relu1"
  2557. name: "stage4_unit3_relu1"
  2558. type: "PReLU"
  2559. }
  2560.  
  2561. layer {
  2562. bottom: "stage4_unit3_relu1"
  2563. top: "stage4_unit3_conv2"
  2564. name: "stage4_unit3_conv2"
  2565. type: "Convolution"
  2566. convolution_param {
  2567. num_output: 512
  2568. kernel_size: 3
  2569. pad: 1
  2570. stride: 1
  2571. bias_term: true
  2572. }
  2573. }
  2574.  
  2575. layer {
  2576. bottom: "stage4_unit3_conv2"
  2577. top: "stage4_unit3_bn3"
  2578. name: "stage4_unit3_bn3"
  2579. type: "BatchNorm"
  2580. batch_norm_param {
  2581. use_global_stats: true
  2582. moving_average_fraction: 0.9
  2583. eps: 2e-05
  2584. }
  2585. }
  2586. layer {
  2587. bottom: "stage4_unit3_bn3"
  2588. top: "stage4_unit3_bn3"
  2589. name: "stage4_unit3_bn3_scale"
  2590. type: "Scale"
  2591. scale_param { bias_term: true }
  2592. }
  2593.  
  2594. layer {
  2595. name: "_plus23"
  2596. type: "Eltwise"
  2597. bottom: "stage4_unit3_bn3"
  2598. bottom: "_plus22"
  2599. top: "_plus23"
  2600. eltwise_param { operation: SUM }
  2601. }
  2602.  
  2603. layer {
  2604. bottom: "_plus23"
  2605. top: "bn1"
  2606. name: "bn1"
  2607. type: "BatchNorm"
  2608. batch_norm_param {
  2609. use_global_stats: true
  2610. moving_average_fraction: 0.9
  2611. eps: 2e-05
  2612. }
  2613. }
  2614. layer {
  2615. bottom: "bn1"
  2616. top: "bn1"
  2617. name: "bn1_scale"
  2618. type: "Scale"
  2619. scale_param { bias_term: true }
  2620. }
  2621. layer {
  2622. bottom: "bn1"
  2623. top: "dropout0"
  2624. name: "dropout0"
  2625. type: "Dropout"
  2626. dropout_param { dropout_ratio : 0.400000 }
  2627. }
  2628. layer {
  2629. bottom: "dropout0"
  2630. top: "pre_fc1"
  2631. name: "pre_fc1"
  2632. type: "InnerProduct"
  2633. inner_product_param {
  2634. num_output: 512
  2635. }
  2636. }
  2637.  
  2638. layer {
  2639. bottom: "pre_fc1"
  2640. top: "fc1"
  2641. name: "fc1"
  2642. type: "BatchNorm"
  2643. batch_norm_param {
  2644. use_global_stats: true
  2645. moving_average_fraction: 0.9
  2646. eps: 2e-05
  2647. }
  2648. }
  2649. layer {
  2650. bottom: "fc1"
  2651. top: "fc1"
  2652. name: "fc1_scale"
  2653. type: "Scale"
  2654. scale_param { bias_term: true }
  2655. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement