Advertisement
Guest User

Untitled

a guest
Jan 20th, 2017
75
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 47.16 KB | None | 0 0
  1. name: "WRN_50_2"
  2. layer {
  3. name: "Data1"
  4. type: "Data"
  5. top: "Data1"
  6. top: "Data2"
  7. include {
  8. phase: TRAIN
  9. }
  10. transform_param {
  11. mirror: true
  12. crop_size: 151
  13. mean_value: 104
  14. mean_value: 117
  15. mean_value: 123
  16. }
  17. data_param {
  18. source: "/PATH/TO/train_imagenet_156_lmdb"
  19. batch_size: 64
  20. backend: LMDB
  21. }
  22. }
  23. layer {
  24. name: "Data1"
  25. type: "Data"
  26. top: "Data1"
  27. top: "Data2"
  28. include {
  29. phase: TEST
  30. }
  31. transform_param {
  32. mirror: true
  33. crop_size: 151
  34. mean_value: 104
  35. mean_value: 117
  36. mean_value: 123
  37. }
  38. data_param {
  39. source: "/PATH/TO/val_imagenet_156_lmdb"
  40. batch_size: 64
  41. backend: LMDB
  42. }
  43. }
  44. layer {
  45. name: "Convolution1"
  46. type: "Convolution"
  47. bottom: "Data1"
  48. top: "Convolution1"
  49. convolution_param {
  50. num_output: 64
  51. pad: 3
  52. kernel_size: 7
  53. stride: 2
  54. bias_term: false
  55. }
  56. }
  57. layer {
  58. name: "BatchNorm1"
  59. type: "BatchNorm"
  60. bottom: "Convolution1"
  61. top: "Convolution1"
  62. param {
  63. lr_mult: 0
  64. decay_mult: 0
  65. }
  66. param {
  67. lr_mult: 0
  68. decay_mult: 0
  69. }
  70. param {
  71. lr_mult: 0
  72. decay_mult: 0
  73. }
  74. batch_norm_param {
  75. use_global_stats: false
  76. }
  77. }
  78. layer {
  79. name: "Scale1"
  80. type: "Scale"
  81. bottom: "Convolution1"
  82. top: "Convolution1"
  83. scale_param {
  84. bias_term: true
  85. }
  86. }
  87. layer {
  88. name: "ReLU1"
  89. type: "ReLU"
  90. bottom: "Convolution1"
  91. top: "Convolution1"
  92. }
  93. layer {
  94. name: "Pooling1"
  95. type: "Pooling"
  96. bottom: "Convolution1"
  97. top: "Convolution1"
  98. pooling_param {
  99. kernel_size: 3
  100. stride: 2
  101. pad: 1
  102. pool: MAX
  103. }
  104. }
  105.  
  106. #------------------------------------------------------------------------------
  107. #------------------------------------------------------------------------------
  108. #------------------------------------------------------------------------------
  109. #------------------------------------------------------------------------------
  110.  
  111. layer {
  112. name: "Convolution2_1_1"
  113. type: "Convolution"
  114. bottom: "Convolution1"
  115. top: "Convolution2_1_1"
  116. convolution_param {
  117. num_output: 128
  118. bias_term: false
  119. kernel_size: 1
  120. }
  121. }
  122. layer {
  123. name: "BatchNorm2_1_1"
  124. type: "BatchNorm"
  125. bottom: "Convolution2_1_1"
  126. top: "Convolution2_1_1"
  127. param {
  128. lr_mult: 0
  129. decay_mult: 0
  130. }
  131. param {
  132. lr_mult: 0
  133. decay_mult: 0
  134. }
  135. param {
  136. lr_mult: 0
  137. decay_mult: 0
  138. }
  139. batch_norm_param {
  140. use_global_stats: false
  141. }
  142. }
  143. layer {
  144. name: "Scale2_1_1"
  145. type: "Scale"
  146. bottom: "Convolution2_1_1"
  147. top: "Convolution2_1_1"
  148. scale_param {
  149. bias_term: true
  150. }
  151. }
  152. layer {
  153. name: "ReLU2_1_1"
  154. type: "ReLU"
  155. bottom: "Convolution2_1_1"
  156. top: "Convolution2_1_1"
  157. }
  158. layer {
  159. name: "Convolution2_1_2"
  160. type: "Convolution"
  161. bottom: "Convolution2_1_1"
  162. top: "Convolution2_1_2"
  163. convolution_param {
  164. num_output: 128
  165. bias_term: false
  166. pad: 1
  167. kernel_size: 3
  168. stride: 1
  169. }
  170. }
  171. layer {
  172. name: "BatchNorm2_1_2"
  173. type: "BatchNorm"
  174. bottom: "Convolution2_1_2"
  175. top: "Convolution2_1_2"
  176. param {
  177. lr_mult: 0
  178. decay_mult: 0
  179. }
  180. param {
  181. lr_mult: 0
  182. decay_mult: 0
  183. }
  184. param {
  185. lr_mult: 0
  186. decay_mult: 0
  187. }
  188. batch_norm_param {
  189. use_global_stats: false
  190. }
  191. }
  192. layer {
  193. name: "Scale2_1_2"
  194. type: "Scale"
  195. bottom: "Convolution2_1_2"
  196. top: "Convolution2_1_2"
  197. scale_param {
  198. bias_term: true
  199. }
  200. }
  201. layer {
  202. name: "ReLU2_1_2"
  203. type: "ReLU"
  204. bottom: "Convolution2_1_2"
  205. top: "Convolution2_1_2"
  206. }
  207. layer {
  208. name: "Convolution2_1_3"
  209. type: "Convolution"
  210. bottom: "Convolution2_1_2"
  211. top: "Convolution2_1_3"
  212. convolution_param {
  213. num_output: 256
  214. bias_term: false
  215. kernel_size: 1
  216. }
  217. }
  218. layer {
  219. name: "BatchNorm2_1_3"
  220. type: "BatchNorm"
  221. bottom: "Convolution2_1_3"
  222. top: "Convolution2_1_3"
  223. param {
  224. lr_mult: 0
  225. decay_mult: 0
  226. }
  227. param {
  228. lr_mult: 0
  229. decay_mult: 0
  230. }
  231. param {
  232. lr_mult: 0
  233. decay_mult: 0
  234. }
  235. batch_norm_param {
  236. use_global_stats: false
  237. }
  238. }
  239. layer {
  240. name: "Scale2_1_3"
  241. type: "Scale"
  242. bottom: "Convolution2_1_3"
  243. top: "Convolution2_1_3"
  244. scale_param {
  245. bias_term: true
  246. }
  247. }
  248.  
  249. layer {
  250. name: "Convolution2_1"
  251. type: "Convolution"
  252. bottom: "Convolution1"
  253. top: "Convolution2_1"
  254. convolution_param {
  255. num_output: 256
  256. bias_term: false
  257. kernel_size: 1
  258. }
  259. }
  260. layer {
  261. name: "BatchNorm2_1"
  262. type: "BatchNorm"
  263. bottom: "Convolution2_1"
  264. top: "Convolution2_1"
  265. param {
  266. lr_mult: 0
  267. decay_mult: 0
  268. }
  269. param {
  270. lr_mult: 0
  271. decay_mult: 0
  272. }
  273. param {
  274. lr_mult: 0
  275. decay_mult: 0
  276. }
  277. batch_norm_param {
  278. use_global_stats: false
  279. }
  280. }
  281. layer {
  282. name: "Scale2_1"
  283. type: "Scale"
  284. bottom: "Convolution2_1"
  285. top: "Convolution2_1"
  286. scale_param {
  287. bias_term: true
  288. }
  289. }
  290.  
  291. layer {
  292. name: "Sum2_1"
  293. type: "Eltwise"
  294. bottom: "Convolution2_1"
  295. bottom: "Convolution2_1_3"
  296. top: "Sum2_1"
  297. eltwise_param {
  298. operation: SUM
  299. }
  300. }
  301. layer {
  302. name: "ReLU2_1"
  303. type: "ReLU"
  304. bottom: "Sum2_1"
  305. top: "Sum2_1"
  306. }
  307.  
  308. #------------------------------------------------------------------------------
  309. #------------------------------------------------------------------------------
  310.  
  311. layer {
  312. name: "Convolution2_2_1"
  313. type: "Convolution"
  314. bottom: "Sum2_1"
  315. top: "Convolution2_2_1"
  316. convolution_param {
  317. num_output: 128
  318. bias_term: false
  319. kernel_size: 1
  320. }
  321. }
  322. layer {
  323. name: "BatchNorm2_2_1"
  324. type: "BatchNorm"
  325. bottom: "Convolution2_2_1"
  326. top: "Convolution2_2_1"
  327. param {
  328. lr_mult: 0
  329. decay_mult: 0
  330. }
  331. param {
  332. lr_mult: 0
  333. decay_mult: 0
  334. }
  335. param {
  336. lr_mult: 0
  337. decay_mult: 0
  338. }
  339. batch_norm_param {
  340. use_global_stats: false
  341. }
  342. }
  343. layer {
  344. name: "Scale2_2_1"
  345. type: "Scale"
  346. bottom: "Convolution2_2_1"
  347. top: "Convolution2_2_1"
  348. scale_param {
  349. bias_term: true
  350. }
  351. }
  352. layer {
  353. name: "ReLU2_2_1"
  354. type: "ReLU"
  355. bottom: "Convolution2_2_1"
  356. top: "Convolution2_2_1"
  357. }
  358. #------------------------------------------------------------------------------
  359. layer {
  360. name: "Convolution2_2_2"
  361. type: "Convolution"
  362. bottom: "Convolution2_2_1"
  363. top: "Convolution2_2_2"
  364. convolution_param {
  365. num_output: 128
  366. bias_term: false
  367. pad: 1
  368. kernel_size: 3
  369. stride: 1
  370. }
  371. }
  372. layer {
  373. name: "BatchNorm2_2_2"
  374. type: "BatchNorm"
  375. bottom: "Convolution2_2_2"
  376. top: "Convolution2_2_2"
  377. param {
  378. lr_mult: 0
  379. decay_mult: 0
  380. }
  381. param {
  382. lr_mult: 0
  383. decay_mult: 0
  384. }
  385. param {
  386. lr_mult: 0
  387. decay_mult: 0
  388. }
  389. batch_norm_param {
  390. use_global_stats: false
  391. }
  392. }
  393. layer {
  394. name: "Scale2_2_2"
  395. type: "Scale"
  396. bottom: "Convolution2_2_2"
  397. top: "Convolution2_2_2"
  398. scale_param {
  399. bias_term: true
  400. }
  401. }
  402. layer {
  403. name: "ReLU2_2_2"
  404. type: "ReLU"
  405. bottom: "Convolution2_2_2"
  406. top: "Convolution2_2_2"
  407. }
  408. #------------------------------------------------------------------------------
  409. layer {
  410. name: "Convolution2_2_3"
  411. type: "Convolution"
  412. bottom: "Convolution2_2_2"
  413. top: "Convolution2_2_3"
  414. convolution_param {
  415. num_output: 256
  416. bias_term: false
  417. kernel_size: 1
  418. }
  419. }
  420. layer {
  421. name: "BatchNorm2_2_3"
  422. type: "BatchNorm"
  423. bottom: "Convolution2_2_3"
  424. top: "Convolution2_2_3"
  425. param {
  426. lr_mult: 0
  427. decay_mult: 0
  428. }
  429. param {
  430. lr_mult: 0
  431. decay_mult: 0
  432. }
  433. param {
  434. lr_mult: 0
  435. decay_mult: 0
  436. }
  437. batch_norm_param {
  438. use_global_stats: false
  439. }
  440. }
  441. layer {
  442. name: "Scale2_2_3"
  443. type: "Scale"
  444. bottom: "Convolution2_2_3"
  445. top: "Convolution2_2_3"
  446. scale_param {
  447. bias_term: true
  448. }
  449. }
  450. layer {
  451. name: "Sum2_2"
  452. type: "Eltwise"
  453. bottom: "Sum2_1"
  454. bottom: "Convolution2_2_3"
  455. top: "Sum2_2"
  456. eltwise_param {
  457. operation: SUM
  458. }
  459. }
  460. layer {
  461. name: "ReLU2_2"
  462. type: "ReLU"
  463. bottom: "Sum2_2"
  464. top: "Sum2_2"
  465. }
  466.  
  467. #------------------------------------------------------------------------------
  468. #------------------------------------------------------------------------------
  469.  
  470. layer {
  471. name: "Convolution2_3_1"
  472. type: "Convolution"
  473. bottom: "Sum2_2"
  474. top: "Convolution2_3_1"
  475. convolution_param {
  476. num_output: 128
  477. bias_term: false
  478. kernel_size: 1
  479. }
  480. }
  481. layer {
  482. name: "BatchNorm2_3_1"
  483. type: "BatchNorm"
  484. bottom: "Convolution2_3_1"
  485. top: "Convolution2_3_1"
  486. param {
  487. lr_mult: 0
  488. decay_mult: 0
  489. }
  490. param {
  491. lr_mult: 0
  492. decay_mult: 0
  493. }
  494. param {
  495. lr_mult: 0
  496. decay_mult: 0
  497. }
  498. batch_norm_param {
  499. use_global_stats: false
  500. }
  501. }
  502. layer {
  503. name: "Scale2_3_1"
  504. type: "Scale"
  505. bottom: "Convolution2_3_1"
  506. top: "Convolution2_3_1"
  507. scale_param {
  508. bias_term: true
  509. }
  510. }
  511. layer {
  512. name: "ReLU2_3_1"
  513. type: "ReLU"
  514. bottom: "Convolution2_3_1"
  515. top: "Convolution2_3_1"
  516. }
  517. #------------------------------------------------------------------------------
  518. layer {
  519. name: "Convolution2_3_2"
  520. type: "Convolution"
  521. bottom: "Convolution2_3_1"
  522. top: "Convolution2_3_2"
  523. convolution_param {
  524. num_output: 128
  525. bias_term: false
  526. pad: 1
  527. kernel_size: 3
  528. stride: 1
  529. }
  530. }
  531. layer {
  532. name: "BatchNorm2_3_2"
  533. type: "BatchNorm"
  534. bottom: "Convolution2_3_2"
  535. top: "Convolution2_3_2"
  536. param {
  537. lr_mult: 0
  538. decay_mult: 0
  539. }
  540. param {
  541. lr_mult: 0
  542. decay_mult: 0
  543. }
  544. param {
  545. lr_mult: 0
  546. decay_mult: 0
  547. }
  548. batch_norm_param {
  549. use_global_stats: false
  550. }
  551. }
  552. layer {
  553. name: "Scale2_3_2"
  554. type: "Scale"
  555. bottom: "Convolution2_3_2"
  556. top: "Convolution2_3_2"
  557. scale_param {
  558. bias_term: true
  559. }
  560. }
  561. layer {
  562. name: "ReLU2_3_2"
  563. type: "ReLU"
  564. bottom: "Convolution2_3_2"
  565. top: "Convolution2_3_2"
  566. }
  567. #------------------------------------------------------------------------------
  568. layer {
  569. name: "Convolution2_3_3"
  570. type: "Convolution"
  571. bottom: "Convolution2_3_2"
  572. top: "Convolution2_3_3"
  573. convolution_param {
  574. num_output: 256
  575. bias_term: false
  576. kernel_size: 1
  577. }
  578. }
  579. layer {
  580. name: "BatchNorm2_3_3"
  581. type: "BatchNorm"
  582. bottom: "Convolution2_3_3"
  583. top: "Convolution2_3_3"
  584. param {
  585. lr_mult: 0
  586. decay_mult: 0
  587. }
  588. param {
  589. lr_mult: 0
  590. decay_mult: 0
  591. }
  592. param {
  593. lr_mult: 0
  594. decay_mult: 0
  595. }
  596. batch_norm_param {
  597. use_global_stats: false
  598. }
  599. }
  600. layer {
  601. name: "Scale2_3_3"
  602. type: "Scale"
  603. bottom: "Convolution2_3_3"
  604. top: "Convolution2_3_3"
  605. scale_param {
  606. bias_term: true
  607. }
  608. }
  609. layer {
  610. name: "Sum2_3"
  611. type: "Eltwise"
  612. bottom: "Sum2_2"
  613. bottom: "Convolution2_3_3"
  614. top: "Sum2_3"
  615. eltwise_param {
  616. operation: SUM
  617. }
  618. }
  619. layer {
  620. name: "ReLU2_3"
  621. type: "ReLU"
  622. bottom: "Sum2_3"
  623. top: "Sum2_3"
  624. }
  625.  
  626. #------------------------------------------------------------------------------
  627. #------------------------------------------------------------------------------
  628. #------------------------------------------------------------------------------
  629. #------------------------------------------------------------------------------
  630.  
  631. layer {
  632. name: "Convolution3_1_1"
  633. type: "Convolution"
  634. bottom: "Sum2_3"
  635. top: "Convolution3_1_1"
  636. convolution_param {
  637. num_output: 256
  638. bias_term: false
  639. kernel_size: 1
  640. }
  641. }
  642. layer {
  643. name: "BatchNorm3_1_1"
  644. type: "BatchNorm"
  645. bottom: "Convolution3_1_1"
  646. top: "Convolution3_1_1"
  647. param {
  648. lr_mult: 0
  649. decay_mult: 0
  650. }
  651. param {
  652. lr_mult: 0
  653. decay_mult: 0
  654. }
  655. param {
  656. lr_mult: 0
  657. decay_mult: 0
  658. }
  659. batch_norm_param {
  660. use_global_stats: false
  661. }
  662. }
  663. layer {
  664. name: "Scale3_1_1"
  665. type: "Scale"
  666. bottom: "Convolution3_1_1"
  667. top: "Convolution3_1_1"
  668. scale_param {
  669. bias_term: true
  670. }
  671. }
  672. layer {
  673. name: "ReLU3_1_1"
  674. type: "ReLU"
  675. bottom: "Convolution3_1_1"
  676. top: "Convolution3_1_1"
  677. }
  678. layer {
  679. name: "Convolution3_1_2"
  680. type: "Convolution"
  681. bottom: "Convolution3_1_1"
  682. top: "Convolution3_1_2"
  683. convolution_param {
  684. num_output: 256
  685. bias_term: false
  686. pad: 1
  687. kernel_size: 3
  688. stride: 2
  689. }
  690. }
  691. layer {
  692. name: "BatchNorm3_1_2"
  693. type: "BatchNorm"
  694. bottom: "Convolution3_1_2"
  695. top: "Convolution3_1_2"
  696. param {
  697. lr_mult: 0
  698. decay_mult: 0
  699. }
  700. param {
  701. lr_mult: 0
  702. decay_mult: 0
  703. }
  704. param {
  705. lr_mult: 0
  706. decay_mult: 0
  707. }
  708. batch_norm_param {
  709. use_global_stats: false
  710. }
  711. }
  712. layer {
  713. name: "Scale3_1_2"
  714. type: "Scale"
  715. bottom: "Convolution3_1_2"
  716. top: "Convolution3_1_2"
  717. scale_param {
  718. bias_term: true
  719. }
  720. }
  721. layer {
  722. name: "ReLU3_1_2"
  723. type: "ReLU"
  724. bottom: "Convolution3_1_2"
  725. top: "Convolution3_1_2"
  726. }
  727. layer {
  728. name: "Convolution3_1_3"
  729. type: "Convolution"
  730. bottom: "Convolution3_1_2"
  731. top: "Convolution3_1_3"
  732. convolution_param {
  733. num_output: 512
  734. bias_term: false
  735. kernel_size: 1
  736. }
  737. }
  738. layer {
  739. name: "BatchNorm3_1_3"
  740. type: "BatchNorm"
  741. bottom: "Convolution3_1_3"
  742. top: "Convolution3_1_3"
  743. param {
  744. lr_mult: 0
  745. decay_mult: 0
  746. }
  747. param {
  748. lr_mult: 0
  749. decay_mult: 0
  750. }
  751. param {
  752. lr_mult: 0
  753. decay_mult: 0
  754. }
  755. batch_norm_param {
  756. use_global_stats: false
  757. }
  758. }
  759. layer {
  760. name: "Scale3_1_3"
  761. type: "Scale"
  762. bottom: "Convolution3_1_3"
  763. top: "Convolution3_1_3"
  764. scale_param {
  765. bias_term: true
  766. }
  767. }
  768.  
  769. layer {
  770. name: "Convolution3_1"
  771. type: "Convolution"
  772. bottom: "Sum2_3"
  773. top: "Convolution3_1"
  774. convolution_param {
  775. num_output: 512
  776. bias_term: false
  777. kernel_size: 1
  778. stride: 2
  779. }
  780. }
  781. layer {
  782. name: "BatchNorm3_1"
  783. type: "BatchNorm"
  784. bottom: "Convolution3_1"
  785. top: "Convolution3_1"
  786. param {
  787. lr_mult: 0
  788. decay_mult: 0
  789. }
  790. param {
  791. lr_mult: 0
  792. decay_mult: 0
  793. }
  794. param {
  795. lr_mult: 0
  796. decay_mult: 0
  797. }
  798. batch_norm_param {
  799. use_global_stats: false
  800. }
  801. }
  802. layer {
  803. name: "Scale3_1"
  804. type: "Scale"
  805. bottom: "Convolution3_1"
  806. top: "Convolution3_1"
  807. scale_param {
  808. bias_term: true
  809. }
  810. }
  811.  
  812. layer {
  813. name: "Sum3_1"
  814. type: "Eltwise"
  815. bottom: "Convolution3_1"
  816. bottom: "Convolution3_1_3"
  817. top: "Sum3_1"
  818. eltwise_param {
  819. operation: SUM
  820. }
  821. }
  822. layer {
  823. name: "ReLU3_1"
  824. type: "ReLU"
  825. bottom: "Sum3_1"
  826. top: "Sum3_1"
  827. }
  828.  
  829. #------------------------------------------------------------------------------
  830. #------------------------------------------------------------------------------
  831.  
  832. layer {
  833. name: "Convolution3_2_1"
  834. type: "Convolution"
  835. bottom: "Sum3_1"
  836. top: "Convolution3_2_1"
  837. convolution_param {
  838. num_output: 256
  839. bias_term: false
  840. kernel_size: 1
  841. }
  842. }
  843. layer {
  844. name: "BatchNorm3_2_1"
  845. type: "BatchNorm"
  846. bottom: "Convolution3_2_1"
  847. top: "Convolution3_2_1"
  848. param {
  849. lr_mult: 0
  850. decay_mult: 0
  851. }
  852. param {
  853. lr_mult: 0
  854. decay_mult: 0
  855. }
  856. param {
  857. lr_mult: 0
  858. decay_mult: 0
  859. }
  860. batch_norm_param {
  861. use_global_stats: false
  862. }
  863. }
  864. layer {
  865. name: "Scale3_2_1"
  866. type: "Scale"
  867. bottom: "Convolution3_2_1"
  868. top: "Convolution3_2_1"
  869. scale_param {
  870. bias_term: true
  871. }
  872. }
  873. layer {
  874. name: "ReLU3_2_1"
  875. type: "ReLU"
  876. bottom: "Convolution3_2_1"
  877. top: "Convolution3_2_1"
  878. }
  879. #------------------------------------------------------------------------------
  880. layer {
  881. name: "Convolution3_2_2"
  882. type: "Convolution"
  883. bottom: "Convolution3_2_1"
  884. top: "Convolution3_2_2"
  885. convolution_param {
  886. num_output: 256
  887. bias_term: false
  888. pad: 1
  889. kernel_size: 3
  890. stride: 1
  891. }
  892. }
  893. layer {
  894. name: "BatchNorm3_2_2"
  895. type: "BatchNorm"
  896. bottom: "Convolution3_2_2"
  897. top: "Convolution3_2_2"
  898. param {
  899. lr_mult: 0
  900. decay_mult: 0
  901. }
  902. param {
  903. lr_mult: 0
  904. decay_mult: 0
  905. }
  906. param {
  907. lr_mult: 0
  908. decay_mult: 0
  909. }
  910. batch_norm_param {
  911. use_global_stats: false
  912. }
  913. }
  914. layer {
  915. name: "Scale3_2_2"
  916. type: "Scale"
  917. bottom: "Convolution3_2_2"
  918. top: "Convolution3_2_2"
  919. scale_param {
  920. bias_term: true
  921. }
  922. }
  923. layer {
  924. name: "ReLU3_2_2"
  925. type: "ReLU"
  926. bottom: "Convolution3_2_2"
  927. top: "Convolution3_2_2"
  928. }
  929. #------------------------------------------------------------------------------
  930. layer {
  931. name: "Convolution3_2_3"
  932. type: "Convolution"
  933. bottom: "Convolution3_2_2"
  934. top: "Convolution3_2_3"
  935. convolution_param {
  936. num_output: 512
  937. bias_term: false
  938. kernel_size: 1
  939. }
  940. }
  941. layer {
  942. name: "BatchNorm3_2_3"
  943. type: "BatchNorm"
  944. bottom: "Convolution3_2_3"
  945. top: "Convolution3_2_3"
  946. param {
  947. lr_mult: 0
  948. decay_mult: 0
  949. }
  950. param {
  951. lr_mult: 0
  952. decay_mult: 0
  953. }
  954. param {
  955. lr_mult: 0
  956. decay_mult: 0
  957. }
  958. batch_norm_param {
  959. use_global_stats: false
  960. }
  961. }
  962. layer {
  963. name: "Scale3_2_3"
  964. type: "Scale"
  965. bottom: "Convolution3_2_3"
  966. top: "Convolution3_2_3"
  967. scale_param {
  968. bias_term: true
  969. }
  970. }
  971. layer {
  972. name: "Sum3_2"
  973. type: "Eltwise"
  974. bottom: "Sum3_1"
  975. bottom: "Convolution3_2_3"
  976. top: "Sum3_2"
  977. eltwise_param {
  978. operation: SUM
  979. }
  980. }
  981. layer {
  982. name: "ReLU3_2"
  983. type: "ReLU"
  984. bottom: "Sum3_2"
  985. top: "Sum3_2"
  986. }
  987.  
  988. #------------------------------------------------------------------------------
  989. #------------------------------------------------------------------------------
  990.  
  991. layer {
  992. name: "Convolution3_3_1"
  993. type: "Convolution"
  994. bottom: "Sum3_2"
  995. top: "Convolution3_3_1"
  996. convolution_param {
  997. num_output: 256
  998. bias_term: false
  999. kernel_size: 1
  1000. }
  1001. }
  1002. layer {
  1003. name: "BatchNorm3_3_1"
  1004. type: "BatchNorm"
  1005. bottom: "Convolution3_3_1"
  1006. top: "Convolution3_3_1"
  1007. param {
  1008. lr_mult: 0
  1009. decay_mult: 0
  1010. }
  1011. param {
  1012. lr_mult: 0
  1013. decay_mult: 0
  1014. }
  1015. param {
  1016. lr_mult: 0
  1017. decay_mult: 0
  1018. }
  1019. batch_norm_param {
  1020. use_global_stats: false
  1021. }
  1022. }
  1023. layer {
  1024. name: "Scale3_3_1"
  1025. type: "Scale"
  1026. bottom: "Convolution3_3_1"
  1027. top: "Convolution3_3_1"
  1028. scale_param {
  1029. bias_term: true
  1030. }
  1031. }
  1032. layer {
  1033. name: "ReLU3_3_1"
  1034. type: "ReLU"
  1035. bottom: "Convolution3_3_1"
  1036. top: "Convolution3_3_1"
  1037. }
  1038. #------------------------------------------------------------------------------
  1039. layer {
  1040. name: "Convolution3_3_2"
  1041. type: "Convolution"
  1042. bottom: "Convolution3_3_1"
  1043. top: "Convolution3_3_2"
  1044. convolution_param {
  1045. num_output: 256
  1046. bias_term: false
  1047. pad: 1
  1048. kernel_size: 3
  1049. stride: 1
  1050. }
  1051. }
  1052. layer {
  1053. name: "BatchNorm3_3_2"
  1054. type: "BatchNorm"
  1055. bottom: "Convolution3_3_2"
  1056. top: "Convolution3_3_2"
  1057. param {
  1058. lr_mult: 0
  1059. decay_mult: 0
  1060. }
  1061. param {
  1062. lr_mult: 0
  1063. decay_mult: 0
  1064. }
  1065. param {
  1066. lr_mult: 0
  1067. decay_mult: 0
  1068. }
  1069. batch_norm_param {
  1070. use_global_stats: false
  1071. }
  1072. }
  1073. layer {
  1074. name: "Scale3_3_2"
  1075. type: "Scale"
  1076. bottom: "Convolution3_3_2"
  1077. top: "Convolution3_3_2"
  1078. scale_param {
  1079. bias_term: true
  1080. }
  1081. }
  1082. layer {
  1083. name: "ReLU3_3_2"
  1084. type: "ReLU"
  1085. bottom: "Convolution3_3_2"
  1086. top: "Convolution3_3_2"
  1087. }
  1088. #------------------------------------------------------------------------------
  1089. layer {
  1090. name: "Convolution3_3_3"
  1091. type: "Convolution"
  1092. bottom: "Convolution3_3_2"
  1093. top: "Convolution3_3_3"
  1094. convolution_param {
  1095. num_output: 512
  1096. bias_term: false
  1097. kernel_size: 1
  1098. }
  1099. }
  1100. layer {
  1101. name: "BatchNorm3_3_3"
  1102. type: "BatchNorm"
  1103. bottom: "Convolution3_3_3"
  1104. top: "Convolution3_3_3"
  1105. param {
  1106. lr_mult: 0
  1107. decay_mult: 0
  1108. }
  1109. param {
  1110. lr_mult: 0
  1111. decay_mult: 0
  1112. }
  1113. param {
  1114. lr_mult: 0
  1115. decay_mult: 0
  1116. }
  1117. batch_norm_param {
  1118. use_global_stats: false
  1119. }
  1120. }
  1121. layer {
  1122. name: "Scale3_3_3"
  1123. type: "Scale"
  1124. bottom: "Convolution3_3_3"
  1125. top: "Convolution3_3_3"
  1126. scale_param {
  1127. bias_term: true
  1128. }
  1129. }
  1130. layer {
  1131. name: "Sum3_3"
  1132. type: "Eltwise"
  1133. bottom: "Sum3_2"
  1134. bottom: "Convolution3_3_3"
  1135. top: "Sum3_3"
  1136. eltwise_param {
  1137. operation: SUM
  1138. }
  1139. }
  1140. layer {
  1141. name: "ReLU3_3"
  1142. type: "ReLU"
  1143. bottom: "Sum3_3"
  1144. top: "Sum3_3"
  1145. }
  1146.  
  1147. #------------------------------------------------------------------------------
  1148. #------------------------------------------------------------------------------
  1149.  
  1150. layer {
  1151. name: "Convolution3_4_1"
  1152. type: "Convolution"
  1153. bottom: "Sum3_3"
  1154. top: "Convolution3_4_1"
  1155. convolution_param {
  1156. num_output: 256
  1157. bias_term: false
  1158. kernel_size: 1
  1159. }
  1160. }
  1161. layer {
  1162. name: "BatchNorm3_4_1"
  1163. type: "BatchNorm"
  1164. bottom: "Convolution3_4_1"
  1165. top: "Convolution3_4_1"
  1166. param {
  1167. lr_mult: 0
  1168. decay_mult: 0
  1169. }
  1170. param {
  1171. lr_mult: 0
  1172. decay_mult: 0
  1173. }
  1174. param {
  1175. lr_mult: 0
  1176. decay_mult: 0
  1177. }
  1178. batch_norm_param {
  1179. use_global_stats: false
  1180. }
  1181. }
  1182. layer {
  1183. name: "Scale3_4_1"
  1184. type: "Scale"
  1185. bottom: "Convolution3_4_1"
  1186. top: "Convolution3_4_1"
  1187. scale_param {
  1188. bias_term: true
  1189. }
  1190. }
  1191. layer {
  1192. name: "ReLU3_4_1"
  1193. type: "ReLU"
  1194. bottom: "Convolution3_4_1"
  1195. top: "Convolution3_4_1"
  1196. }
  1197. #------------------------------------------------------------------------------
  1198. layer {
  1199. name: "Convolution3_4_2"
  1200. type: "Convolution"
  1201. bottom: "Convolution3_4_1"
  1202. top: "Convolution3_4_2"
  1203. convolution_param {
  1204. num_output: 256
  1205. bias_term: false
  1206. pad: 1
  1207. kernel_size: 3
  1208. stride: 1
  1209. }
  1210. }
  1211. layer {
  1212. name: "BatchNorm3_4_2"
  1213. type: "BatchNorm"
  1214. bottom: "Convolution3_4_2"
  1215. top: "Convolution3_4_2"
  1216. param {
  1217. lr_mult: 0
  1218. decay_mult: 0
  1219. }
  1220. param {
  1221. lr_mult: 0
  1222. decay_mult: 0
  1223. }
  1224. param {
  1225. lr_mult: 0
  1226. decay_mult: 0
  1227. }
  1228. batch_norm_param {
  1229. use_global_stats: false
  1230. }
  1231. }
  1232. layer {
  1233. name: "Scale3_4_2"
  1234. type: "Scale"
  1235. bottom: "Convolution3_4_2"
  1236. top: "Convolution3_4_2"
  1237. scale_param {
  1238. bias_term: true
  1239. }
  1240. }
  1241. layer {
  1242. name: "ReLU3_4_2"
  1243. type: "ReLU"
  1244. bottom: "Convolution3_4_2"
  1245. top: "Convolution3_4_2"
  1246. }
  1247. #------------------------------------------------------------------------------
  1248. layer {
  1249. name: "Convolution3_4_3"
  1250. type: "Convolution"
  1251. bottom: "Convolution3_4_2"
  1252. top: "Convolution3_4_3"
  1253. convolution_param {
  1254. num_output: 512
  1255. bias_term: false
  1256. kernel_size: 1
  1257. }
  1258. }
  1259. layer {
  1260. name: "BatchNorm3_4_3"
  1261. type: "BatchNorm"
  1262. bottom: "Convolution3_4_3"
  1263. top: "Convolution3_4_3"
  1264. param {
  1265. lr_mult: 0
  1266. decay_mult: 0
  1267. }
  1268. param {
  1269. lr_mult: 0
  1270. decay_mult: 0
  1271. }
  1272. param {
  1273. lr_mult: 0
  1274. decay_mult: 0
  1275. }
  1276. batch_norm_param {
  1277. use_global_stats: false
  1278. }
  1279. }
  1280. layer {
  1281. name: "Scale3_4_3"
  1282. type: "Scale"
  1283. bottom: "Convolution3_4_3"
  1284. top: "Convolution3_4_3"
  1285. scale_param {
  1286. bias_term: true
  1287. }
  1288. }
  1289. layer {
  1290. name: "Sum3_4"
  1291. type: "Eltwise"
  1292. bottom: "Sum3_3"
  1293. bottom: "Convolution3_4_3"
  1294. top: "Sum3_4"
  1295. eltwise_param {
  1296. operation: SUM
  1297. }
  1298. }
  1299. layer {
  1300. name: "ReLU3_4"
  1301. type: "ReLU"
  1302. bottom: "Sum3_4"
  1303. top: "Sum3_4"
  1304. }
  1305.  
  1306. #------------------------------------------------------------------------------
  1307. #------------------------------------------------------------------------------
  1308. #------------------------------------------------------------------------------
  1309. #------------------------------------------------------------------------------
  1310.  
  1311. layer {
  1312. name: "Convolution4_1_1"
  1313. type: "Convolution"
  1314. bottom: "Sum3_4"
  1315. top: "Convolution4_1_1"
  1316. convolution_param {
  1317. num_output: 512
  1318. bias_term: false
  1319. kernel_size: 1
  1320. }
  1321. }
  1322. layer {
  1323. name: "BatchNorm4_1_1"
  1324. type: "BatchNorm"
  1325. bottom: "Convolution4_1_1"
  1326. top: "Convolution4_1_1"
  1327. param {
  1328. lr_mult: 0
  1329. decay_mult: 0
  1330. }
  1331. param {
  1332. lr_mult: 0
  1333. decay_mult: 0
  1334. }
  1335. param {
  1336. lr_mult: 0
  1337. decay_mult: 0
  1338. }
  1339. batch_norm_param {
  1340. use_global_stats: false
  1341. }
  1342. }
  1343. layer {
  1344. name: "Scale4_1_1"
  1345. type: "Scale"
  1346. bottom: "Convolution4_1_1"
  1347. top: "Convolution4_1_1"
  1348. scale_param {
  1349. bias_term: true
  1350. }
  1351. }
  1352. layer {
  1353. name: "ReLU4_1_1"
  1354. type: "ReLU"
  1355. bottom: "Convolution4_1_1"
  1356. top: "Convolution4_1_1"
  1357. }
  1358. layer {
  1359. name: "Convolution4_1_2"
  1360. type: "Convolution"
  1361. bottom: "Convolution4_1_1"
  1362. top: "Convolution4_1_2"
  1363. convolution_param {
  1364. num_output: 512
  1365. bias_term: false
  1366. pad: 1
  1367. kernel_size: 3
  1368. stride: 2
  1369. }
  1370. }
  1371. layer {
  1372. name: "BatchNorm4_1_2"
  1373. type: "BatchNorm"
  1374. bottom: "Convolution4_1_2"
  1375. top: "Convolution4_1_2"
  1376. param {
  1377. lr_mult: 0
  1378. decay_mult: 0
  1379. }
  1380. param {
  1381. lr_mult: 0
  1382. decay_mult: 0
  1383. }
  1384. param {
  1385. lr_mult: 0
  1386. decay_mult: 0
  1387. }
  1388. batch_norm_param {
  1389. use_global_stats: false
  1390. }
  1391. }
  1392. layer {
  1393. name: "Scale4_1_2"
  1394. type: "Scale"
  1395. bottom: "Convolution4_1_2"
  1396. top: "Convolution4_1_2"
  1397. scale_param {
  1398. bias_term: true
  1399. }
  1400. }
  1401. layer {
  1402. name: "ReLU4_1_2"
  1403. type: "ReLU"
  1404. bottom: "Convolution4_1_2"
  1405. top: "Convolution4_1_2"
  1406. }
  1407. layer {
  1408. name: "Convolution4_1_3"
  1409. type: "Convolution"
  1410. bottom: "Convolution4_1_2"
  1411. top: "Convolution4_1_3"
  1412. convolution_param {
  1413. num_output: 1024
  1414. bias_term: false
  1415. kernel_size: 1
  1416. }
  1417. }
  1418. layer {
  1419. name: "BatchNorm4_1_3"
  1420. type: "BatchNorm"
  1421. bottom: "Convolution4_1_3"
  1422. top: "Convolution4_1_3"
  1423. param {
  1424. lr_mult: 0
  1425. decay_mult: 0
  1426. }
  1427. param {
  1428. lr_mult: 0
  1429. decay_mult: 0
  1430. }
  1431. param {
  1432. lr_mult: 0
  1433. decay_mult: 0
  1434. }
  1435. batch_norm_param {
  1436. use_global_stats: false
  1437. }
  1438. }
  1439. layer {
  1440. name: "Scale4_1_3"
  1441. type: "Scale"
  1442. bottom: "Convolution4_1_3"
  1443. top: "Convolution4_1_3"
  1444. scale_param {
  1445. bias_term: true
  1446. }
  1447. }
  1448.  
  1449. layer {
  1450. name: "Convolution4_1"
  1451. type: "Convolution"
  1452. bottom: "Sum3_4"
  1453. top: "Convolution4_1"
  1454. convolution_param {
  1455. num_output: 1024
  1456. bias_term: false
  1457. kernel_size: 1
  1458. stride: 2
  1459. }
  1460. }
  1461. layer {
  1462. name: "BatchNorm4_1"
  1463. type: "BatchNorm"
  1464. bottom: "Convolution4_1"
  1465. top: "Convolution4_1"
  1466. param {
  1467. lr_mult: 0
  1468. decay_mult: 0
  1469. }
  1470. param {
  1471. lr_mult: 0
  1472. decay_mult: 0
  1473. }
  1474. param {
  1475. lr_mult: 0
  1476. decay_mult: 0
  1477. }
  1478. batch_norm_param {
  1479. use_global_stats: false
  1480. }
  1481. }
  1482. layer {
  1483. name: "Scale4_1"
  1484. type: "Scale"
  1485. bottom: "Convolution4_1"
  1486. top: "Convolution4_1"
  1487. scale_param {
  1488. bias_term: true
  1489. }
  1490. }
  1491.  
  1492. layer {
  1493. name: "Sum4_1"
  1494. type: "Eltwise"
  1495. bottom: "Convolution4_1"
  1496. bottom: "Convolution4_1_3"
  1497. top: "Sum4_1"
  1498. eltwise_param {
  1499. operation: SUM
  1500. }
  1501. }
  1502. layer {
  1503. name: "ReLU4_1"
  1504. type: "ReLU"
  1505. bottom: "Sum4_1"
  1506. top: "Sum4_1"
  1507. }
  1508.  
  1509. #------------------------------------------------------------------------------
  1510. #------------------------------------------------------------------------------
  1511.  
  1512. layer {
  1513. name: "Convolution4_2_1"
  1514. type: "Convolution"
  1515. bottom: "Sum4_1"
  1516. top: "Convolution4_2_1"
  1517. convolution_param {
  1518. num_output: 512
  1519. bias_term: false
  1520. kernel_size: 1
  1521. }
  1522. }
  1523. layer {
  1524. name: "BatchNorm4_2_1"
  1525. type: "BatchNorm"
  1526. bottom: "Convolution4_2_1"
  1527. top: "Convolution4_2_1"
  1528. param {
  1529. lr_mult: 0
  1530. decay_mult: 0
  1531. }
  1532. param {
  1533. lr_mult: 0
  1534. decay_mult: 0
  1535. }
  1536. param {
  1537. lr_mult: 0
  1538. decay_mult: 0
  1539. }
  1540. batch_norm_param {
  1541. use_global_stats: false
  1542. }
  1543. }
  1544. layer {
  1545. name: "Scale4_2_1"
  1546. type: "Scale"
  1547. bottom: "Convolution4_2_1"
  1548. top: "Convolution4_2_1"
  1549. scale_param {
  1550. bias_term: true
  1551. }
  1552. }
  1553. layer {
  1554. name: "ReLU4_2_1"
  1555. type: "ReLU"
  1556. bottom: "Convolution4_2_1"
  1557. top: "Convolution4_2_1"
  1558. }
  1559. #------------------------------------------------------------------------------
  1560. layer {
  1561. name: "Convolution4_2_2"
  1562. type: "Convolution"
  1563. bottom: "Convolution4_2_1"
  1564. top: "Convolution4_2_2"
  1565. convolution_param {
  1566. num_output: 512
  1567. bias_term: false
  1568. pad: 1
  1569. kernel_size: 3
  1570. stride: 1
  1571. }
  1572. }
  1573. layer {
  1574. name: "BatchNorm4_2_2"
  1575. type: "BatchNorm"
  1576. bottom: "Convolution4_2_2"
  1577. top: "Convolution4_2_2"
  1578. param {
  1579. lr_mult: 0
  1580. decay_mult: 0
  1581. }
  1582. param {
  1583. lr_mult: 0
  1584. decay_mult: 0
  1585. }
  1586. param {
  1587. lr_mult: 0
  1588. decay_mult: 0
  1589. }
  1590. batch_norm_param {
  1591. use_global_stats: false
  1592. }
  1593. }
  1594. layer {
  1595. name: "Scale4_2_2"
  1596. type: "Scale"
  1597. bottom: "Convolution4_2_2"
  1598. top: "Convolution4_2_2"
  1599. scale_param {
  1600. bias_term: true
  1601. }
  1602. }
  1603. layer {
  1604. name: "ReLU4_2_2"
  1605. type: "ReLU"
  1606. bottom: "Convolution4_2_2"
  1607. top: "Convolution4_2_2"
  1608. }
  1609. #------------------------------------------------------------------------------
  1610. layer {
  1611. name: "Convolution4_2_3"
  1612. type: "Convolution"
  1613. bottom: "Convolution4_2_2"
  1614. top: "Convolution4_2_3"
  1615. convolution_param {
  1616. num_output: 1024
  1617. bias_term: false
  1618. kernel_size: 1
  1619. }
  1620. }
  1621. layer {
  1622. name: "BatchNorm4_2_3"
  1623. type: "BatchNorm"
  1624. bottom: "Convolution4_2_3"
  1625. top: "Convolution4_2_3"
  1626. param {
  1627. lr_mult: 0
  1628. decay_mult: 0
  1629. }
  1630. param {
  1631. lr_mult: 0
  1632. decay_mult: 0
  1633. }
  1634. param {
  1635. lr_mult: 0
  1636. decay_mult: 0
  1637. }
  1638. batch_norm_param {
  1639. use_global_stats: false
  1640. }
  1641. }
  1642. layer {
  1643. name: "Scale4_2_3"
  1644. type: "Scale"
  1645. bottom: "Convolution4_2_3"
  1646. top: "Convolution4_2_3"
  1647. scale_param {
  1648. bias_term: true
  1649. }
  1650. }
  1651. layer {
  1652. name: "Sum4_2"
  1653. type: "Eltwise"
  1654. bottom: "Sum4_1"
  1655. bottom: "Convolution4_2_3"
  1656. top: "Sum4_2"
  1657. eltwise_param {
  1658. operation: SUM
  1659. }
  1660. }
  1661. layer {
  1662. name: "ReLU4_2"
  1663. type: "ReLU"
  1664. bottom: "Sum4_2"
  1665. top: "Sum4_2"
  1666. }
  1667.  
  1668. #------------------------------------------------------------------------------
  1669. #------------------------------------------------------------------------------
  1670.  
  1671. layer {
  1672. name: "Convolution4_3_1"
  1673. type: "Convolution"
  1674. bottom: "Sum4_2"
  1675. top: "Convolution4_3_1"
  1676. convolution_param {
  1677. num_output: 512
  1678. bias_term: false
  1679. kernel_size: 1
  1680. }
  1681. }
  1682. layer {
  1683. name: "BatchNorm4_3_1"
  1684. type: "BatchNorm"
  1685. bottom: "Convolution4_3_1"
  1686. top: "Convolution4_3_1"
  1687. param {
  1688. lr_mult: 0
  1689. decay_mult: 0
  1690. }
  1691. param {
  1692. lr_mult: 0
  1693. decay_mult: 0
  1694. }
  1695. param {
  1696. lr_mult: 0
  1697. decay_mult: 0
  1698. }
  1699. batch_norm_param {
  1700. use_global_stats: false
  1701. }
  1702. }
  1703. layer {
  1704. name: "Scale4_3_1"
  1705. type: "Scale"
  1706. bottom: "Convolution4_3_1"
  1707. top: "Convolution4_3_1"
  1708. scale_param {
  1709. bias_term: true
  1710. }
  1711. }
  1712. layer {
  1713. name: "ReLU4_3_1"
  1714. type: "ReLU"
  1715. bottom: "Convolution4_3_1"
  1716. top: "Convolution4_3_1"
  1717. }
  1718. #------------------------------------------------------------------------------
  1719. layer {
  1720. name: "Convolution4_3_2"
  1721. type: "Convolution"
  1722. bottom: "Convolution4_3_1"
  1723. top: "Convolution4_3_2"
  1724. convolution_param {
  1725. num_output: 512
  1726. bias_term: false
  1727. pad: 1
  1728. kernel_size: 3
  1729. stride: 1
  1730. }
  1731. }
  1732. layer {
  1733. name: "BatchNorm4_3_2"
  1734. type: "BatchNorm"
  1735. bottom: "Convolution4_3_2"
  1736. top: "Convolution4_3_2"
  1737. param {
  1738. lr_mult: 0
  1739. decay_mult: 0
  1740. }
  1741. param {
  1742. lr_mult: 0
  1743. decay_mult: 0
  1744. }
  1745. param {
  1746. lr_mult: 0
  1747. decay_mult: 0
  1748. }
  1749. batch_norm_param {
  1750. use_global_stats: false
  1751. }
  1752. }
  1753. layer {
  1754. name: "Scale4_3_2"
  1755. type: "Scale"
  1756. bottom: "Convolution4_3_2"
  1757. top: "Convolution4_3_2"
  1758. scale_param {
  1759. bias_term: true
  1760. }
  1761. }
  1762. layer {
  1763. name: "ReLU4_3_2"
  1764. type: "ReLU"
  1765. bottom: "Convolution4_3_2"
  1766. top: "Convolution4_3_2"
  1767. }
  1768. #------------------------------------------------------------------------------
  1769. layer {
  1770. name: "Convolution4_3_3"
  1771. type: "Convolution"
  1772. bottom: "Convolution4_3_2"
  1773. top: "Convolution4_3_3"
  1774. convolution_param {
  1775. num_output: 1024
  1776. bias_term: false
  1777. kernel_size: 1
  1778. }
  1779. }
  1780. layer {
  1781. name: "BatchNorm4_3_3"
  1782. type: "BatchNorm"
  1783. bottom: "Convolution4_3_3"
  1784. top: "Convolution4_3_3"
  1785. param {
  1786. lr_mult: 0
  1787. decay_mult: 0
  1788. }
  1789. param {
  1790. lr_mult: 0
  1791. decay_mult: 0
  1792. }
  1793. param {
  1794. lr_mult: 0
  1795. decay_mult: 0
  1796. }
  1797. batch_norm_param {
  1798. use_global_stats: false
  1799. }
  1800. }
  1801. layer {
  1802. name: "Scale4_3_3"
  1803. type: "Scale"
  1804. bottom: "Convolution4_3_3"
  1805. top: "Convolution4_3_3"
  1806. scale_param {
  1807. bias_term: true
  1808. }
  1809. }
  1810. layer {
  1811. name: "Sum4_3"
  1812. type: "Eltwise"
  1813. bottom: "Sum4_2"
  1814. bottom: "Convolution4_3_3"
  1815. top: "Sum4_3"
  1816. eltwise_param {
  1817. operation: SUM
  1818. }
  1819. }
  1820. layer {
  1821. name: "ReLU4_3"
  1822. type: "ReLU"
  1823. bottom: "Sum4_3"
  1824. top: "Sum4_3"
  1825. }
  1826.  
  1827. #------------------------------------------------------------------------------
  1828. #------------------------------------------------------------------------------
  1829.  
  1830. layer {
  1831. name: "Convolution4_4_1"
  1832. type: "Convolution"
  1833. bottom: "Sum4_3"
  1834. top: "Convolution4_4_1"
  1835. convolution_param {
  1836. num_output: 512
  1837. bias_term: false
  1838. kernel_size: 1
  1839. }
  1840. }
  1841. layer {
  1842. name: "BatchNorm4_4_1"
  1843. type: "BatchNorm"
  1844. bottom: "Convolution4_4_1"
  1845. top: "Convolution4_4_1"
  1846. param {
  1847. lr_mult: 0
  1848. decay_mult: 0
  1849. }
  1850. param {
  1851. lr_mult: 0
  1852. decay_mult: 0
  1853. }
  1854. param {
  1855. lr_mult: 0
  1856. decay_mult: 0
  1857. }
  1858. batch_norm_param {
  1859. use_global_stats: false
  1860. }
  1861. }
  1862. layer {
  1863. name: "Scale4_4_1"
  1864. type: "Scale"
  1865. bottom: "Convolution4_4_1"
  1866. top: "Convolution4_4_1"
  1867. scale_param {
  1868. bias_term: true
  1869. }
  1870. }
  1871. layer {
  1872. name: "ReLU4_4_1"
  1873. type: "ReLU"
  1874. bottom: "Convolution4_4_1"
  1875. top: "Convolution4_4_1"
  1876. }
  1877. #------------------------------------------------------------------------------
  1878. layer {
  1879. name: "Convolution4_4_2"
  1880. type: "Convolution"
  1881. bottom: "Convolution4_4_1"
  1882. top: "Convolution4_4_2"
  1883. convolution_param {
  1884. num_output: 512
  1885. bias_term: false
  1886. pad: 1
  1887. kernel_size: 3
  1888. stride: 1
  1889. }
  1890. }
  1891. layer {
  1892. name: "BatchNorm4_4_2"
  1893. type: "BatchNorm"
  1894. bottom: "Convolution4_4_2"
  1895. top: "Convolution4_4_2"
  1896. param {
  1897. lr_mult: 0
  1898. decay_mult: 0
  1899. }
  1900. param {
  1901. lr_mult: 0
  1902. decay_mult: 0
  1903. }
  1904. param {
  1905. lr_mult: 0
  1906. decay_mult: 0
  1907. }
  1908. batch_norm_param {
  1909. use_global_stats: false
  1910. }
  1911. }
  1912. layer {
  1913. name: "Scale4_4_2"
  1914. type: "Scale"
  1915. bottom: "Convolution4_4_2"
  1916. top: "Convolution4_4_2"
  1917. scale_param {
  1918. bias_term: true
  1919. }
  1920. }
  1921. layer {
  1922. name: "ReLU4_4_2"
  1923. type: "ReLU"
  1924. bottom: "Convolution4_4_2"
  1925. top: "Convolution4_4_2"
  1926. }
  1927. #------------------------------------------------------------------------------
  1928. layer {
  1929. name: "Convolution4_4_3"
  1930. type: "Convolution"
  1931. bottom: "Convolution4_4_2"
  1932. top: "Convolution4_4_3"
  1933. convolution_param {
  1934. num_output: 1024
  1935. bias_term: false
  1936. kernel_size: 1
  1937. }
  1938. }
  1939. layer {
  1940. name: "BatchNorm4_4_3"
  1941. type: "BatchNorm"
  1942. bottom: "Convolution4_4_3"
  1943. top: "Convolution4_4_3"
  1944. param {
  1945. lr_mult: 0
  1946. decay_mult: 0
  1947. }
  1948. param {
  1949. lr_mult: 0
  1950. decay_mult: 0
  1951. }
  1952. param {
  1953. lr_mult: 0
  1954. decay_mult: 0
  1955. }
  1956. batch_norm_param {
  1957. use_global_stats: false
  1958. }
  1959. }
  1960. layer {
  1961. name: "Scale4_4_3"
  1962. type: "Scale"
  1963. bottom: "Convolution4_4_3"
  1964. top: "Convolution4_4_3"
  1965. scale_param {
  1966. bias_term: true
  1967. }
  1968. }
  1969. layer {
  1970. name: "Sum4_4"
  1971. type: "Eltwise"
  1972. bottom: "Sum4_3"
  1973. bottom: "Convolution4_4_3"
  1974. top: "Sum4_4"
  1975. eltwise_param {
  1976. operation: SUM
  1977. }
  1978. }
  1979. layer {
  1980. name: "ReLU4_4"
  1981. type: "ReLU"
  1982. bottom: "Sum4_4"
  1983. top: "Sum4_4"
  1984. }
  1985.  
  1986. #------------------------------------------------------------------------------
  1987. #------------------------------------------------------------------------------
  1988.  
  1989. layer {
  1990. name: "Convolution4_5_1"
  1991. type: "Convolution"
  1992. bottom: "Sum4_4"
  1993. top: "Convolution4_5_1"
  1994. convolution_param {
  1995. num_output: 512
  1996. bias_term: false
  1997. kernel_size: 1
  1998. }
  1999. }
  2000. layer {
  2001. name: "BatchNorm4_5_1"
  2002. type: "BatchNorm"
  2003. bottom: "Convolution4_5_1"
  2004. top: "Convolution4_5_1"
  2005. param {
  2006. lr_mult: 0
  2007. decay_mult: 0
  2008. }
  2009. param {
  2010. lr_mult: 0
  2011. decay_mult: 0
  2012. }
  2013. param {
  2014. lr_mult: 0
  2015. decay_mult: 0
  2016. }
  2017. batch_norm_param {
  2018. use_global_stats: false
  2019. }
  2020. }
  2021. layer {
  2022. name: "Scale4_5_1"
  2023. type: "Scale"
  2024. bottom: "Convolution4_5_1"
  2025. top: "Convolution4_5_1"
  2026. scale_param {
  2027. bias_term: true
  2028. }
  2029. }
  2030. layer {
  2031. name: "ReLU4_5_1"
  2032. type: "ReLU"
  2033. bottom: "Convolution4_5_1"
  2034. top: "Convolution4_5_1"
  2035. }
  2036. #------------------------------------------------------------------------------
  2037. layer {
  2038. name: "Convolution4_5_2"
  2039. type: "Convolution"
  2040. bottom: "Convolution4_5_1"
  2041. top: "Convolution4_5_2"
  2042. convolution_param {
  2043. num_output: 512
  2044. bias_term: false
  2045. pad: 1
  2046. kernel_size: 3
  2047. stride: 1
  2048. }
  2049. }
  2050. layer {
  2051. name: "BatchNorm4_5_2"
  2052. type: "BatchNorm"
  2053. bottom: "Convolution4_5_2"
  2054. top: "Convolution4_5_2"
  2055. param {
  2056. lr_mult: 0
  2057. decay_mult: 0
  2058. }
  2059. param {
  2060. lr_mult: 0
  2061. decay_mult: 0
  2062. }
  2063. param {
  2064. lr_mult: 0
  2065. decay_mult: 0
  2066. }
  2067. batch_norm_param {
  2068. use_global_stats: false
  2069. }
  2070. }
  2071. layer {
  2072. name: "Scale4_5_2"
  2073. type: "Scale"
  2074. bottom: "Convolution4_5_2"
  2075. top: "Convolution4_5_2"
  2076. scale_param {
  2077. bias_term: true
  2078. }
  2079. }
  2080. layer {
  2081. name: "ReLU4_5_2"
  2082. type: "ReLU"
  2083. bottom: "Convolution4_5_2"
  2084. top: "Convolution4_5_2"
  2085. }
  2086. #------------------------------------------------------------------------------
  2087. layer {
  2088. name: "Convolution4_5_3"
  2089. type: "Convolution"
  2090. bottom: "Convolution4_5_2"
  2091. top: "Convolution4_5_3"
  2092. convolution_param {
  2093. num_output: 1024
  2094. bias_term: false
  2095. kernel_size: 1
  2096. }
  2097. }
  2098. layer {
  2099. name: "BatchNorm4_5_3"
  2100. type: "BatchNorm"
  2101. bottom: "Convolution4_5_3"
  2102. top: "Convolution4_5_3"
  2103. param {
  2104. lr_mult: 0
  2105. decay_mult: 0
  2106. }
  2107. param {
  2108. lr_mult: 0
  2109. decay_mult: 0
  2110. }
  2111. param {
  2112. lr_mult: 0
  2113. decay_mult: 0
  2114. }
  2115. batch_norm_param {
  2116. use_global_stats: false
  2117. }
  2118. }
  2119. layer {
  2120. name: "Scale4_5_3"
  2121. type: "Scale"
  2122. bottom: "Convolution4_5_3"
  2123. top: "Convolution4_5_3"
  2124. scale_param {
  2125. bias_term: true
  2126. }
  2127. }
  2128. layer {
  2129. name: "Sum4_5"
  2130. type: "Eltwise"
  2131. bottom: "Sum4_4"
  2132. bottom: "Convolution4_5_3"
  2133. top: "Sum4_5"
  2134. eltwise_param {
  2135. operation: SUM
  2136. }
  2137. }
  2138. layer {
  2139. name: "ReLU4_5"
  2140. type: "ReLU"
  2141. bottom: "Sum4_5"
  2142. top: "Sum4_5"
  2143. }
  2144.  
  2145. #------------------------------------------------------------------------------
  2146. #------------------------------------------------------------------------------
  2147.  
  2148. layer {
  2149. name: "Convolution4_6_1"
  2150. type: "Convolution"
  2151. bottom: "Sum4_5"
  2152. top: "Convolution4_6_1"
  2153. convolution_param {
  2154. num_output: 512
  2155. bias_term: false
  2156. kernel_size: 1
  2157. }
  2158. }
  2159. layer {
  2160. name: "BatchNorm4_6_1"
  2161. type: "BatchNorm"
  2162. bottom: "Convolution4_6_1"
  2163. top: "Convolution4_6_1"
  2164. param {
  2165. lr_mult: 0
  2166. decay_mult: 0
  2167. }
  2168. param {
  2169. lr_mult: 0
  2170. decay_mult: 0
  2171. }
  2172. param {
  2173. lr_mult: 0
  2174. decay_mult: 0
  2175. }
  2176. batch_norm_param {
  2177. use_global_stats: false
  2178. }
  2179. }
  2180. layer {
  2181. name: "Scale4_6_1"
  2182. type: "Scale"
  2183. bottom: "Convolution4_6_1"
  2184. top: "Convolution4_6_1"
  2185. scale_param {
  2186. bias_term: true
  2187. }
  2188. }
  2189. layer {
  2190. name: "ReLU4_6_1"
  2191. type: "ReLU"
  2192. bottom: "Convolution4_6_1"
  2193. top: "Convolution4_6_1"
  2194. }
  2195. #------------------------------------------------------------------------------
  2196. layer {
  2197. name: "Convolution4_6_2"
  2198. type: "Convolution"
  2199. bottom: "Convolution4_6_1"
  2200. top: "Convolution4_6_2"
  2201. convolution_param {
  2202. num_output: 512
  2203. bias_term: false
  2204. pad: 1
  2205. kernel_size: 3
  2206. stride: 1
  2207. }
  2208. }
  2209. layer {
  2210. name: "BatchNorm4_6_2"
  2211. type: "BatchNorm"
  2212. bottom: "Convolution4_6_2"
  2213. top: "Convolution4_6_2"
  2214. param {
  2215. lr_mult: 0
  2216. decay_mult: 0
  2217. }
  2218. param {
  2219. lr_mult: 0
  2220. decay_mult: 0
  2221. }
  2222. param {
  2223. lr_mult: 0
  2224. decay_mult: 0
  2225. }
  2226. batch_norm_param {
  2227. use_global_stats: false
  2228. }
  2229. }
  2230. layer {
  2231. name: "Scale4_6_2"
  2232. type: "Scale"
  2233. bottom: "Convolution4_6_2"
  2234. top: "Convolution4_6_2"
  2235. scale_param {
  2236. bias_term: true
  2237. }
  2238. }
  2239. layer {
  2240. name: "ReLU4_6_2"
  2241. type: "ReLU"
  2242. bottom: "Convolution4_6_2"
  2243. top: "Convolution4_6_2"
  2244. }
  2245. #------------------------------------------------------------------------------
  2246. layer {
  2247. name: "Convolution4_6_3"
  2248. type: "Convolution"
  2249. bottom: "Convolution4_6_2"
  2250. top: "Convolution4_6_3"
  2251. convolution_param {
  2252. num_output: 1024
  2253. bias_term: false
  2254. kernel_size: 1
  2255. }
  2256. }
  2257. layer {
  2258. name: "BatchNorm4_6_3"
  2259. type: "BatchNorm"
  2260. bottom: "Convolution4_6_3"
  2261. top: "Convolution4_6_3"
  2262. param {
  2263. lr_mult: 0
  2264. decay_mult: 0
  2265. }
  2266. param {
  2267. lr_mult: 0
  2268. decay_mult: 0
  2269. }
  2270. param {
  2271. lr_mult: 0
  2272. decay_mult: 0
  2273. }
  2274. batch_norm_param {
  2275. use_global_stats: false
  2276. }
  2277. }
  2278. layer {
  2279. name: "Scale4_6_3"
  2280. type: "Scale"
  2281. bottom: "Convolution4_6_3"
  2282. top: "Convolution4_6_3"
  2283. scale_param {
  2284. bias_term: true
  2285. }
  2286. }
  2287. layer {
  2288. name: "Sum4_6"
  2289. type: "Eltwise"
  2290. bottom: "Sum4_5"
  2291. bottom: "Convolution4_6_3"
  2292. top: "Sum4_6"
  2293. eltwise_param {
  2294. operation: SUM
  2295. }
  2296. }
  2297. layer {
  2298. name: "ReLU4_6"
  2299. type: "ReLU"
  2300. bottom: "Sum4_6"
  2301. top: "Sum4_6"
  2302. }
  2303.  
  2304. #------------------------------------------------------------------------------
  2305. #------------------------------------------------------------------------------
  2306. #------------------------------------------------------------------------------
  2307. #------------------------------------------------------------------------------
  2308.  
  2309. layer {
  2310. name: "Convolution5_1_1"
  2311. type: "Convolution"
  2312. bottom: "Sum4_6"
  2313. top: "Convolution5_1_1"
  2314. convolution_param {
  2315. num_output: 1024
  2316. bias_term: false
  2317. kernel_size: 1
  2318. }
  2319. }
  2320. layer {
  2321. name: "BatchNorm5_1_1"
  2322. type: "BatchNorm"
  2323. bottom: "Convolution5_1_1"
  2324. top: "Convolution5_1_1"
  2325. param {
  2326. lr_mult: 0
  2327. decay_mult: 0
  2328. }
  2329. param {
  2330. lr_mult: 0
  2331. decay_mult: 0
  2332. }
  2333. param {
  2334. lr_mult: 0
  2335. decay_mult: 0
  2336. }
  2337. batch_norm_param {
  2338. use_global_stats: false
  2339. }
  2340. }
  2341. layer {
  2342. name: "Scale5_1_1"
  2343. type: "Scale"
  2344. bottom: "Convolution5_1_1"
  2345. top: "Convolution5_1_1"
  2346. scale_param {
  2347. bias_term: true
  2348. }
  2349. }
  2350. layer {
  2351. name: "ReLU5_1_1"
  2352. type: "ReLU"
  2353. bottom: "Convolution5_1_1"
  2354. top: "Convolution5_1_1"
  2355. }
  2356. layer {
  2357. name: "Convolution5_1_2"
  2358. type: "Convolution"
  2359. bottom: "Convolution5_1_1"
  2360. top: "Convolution5_1_2"
  2361. convolution_param {
  2362. num_output: 1024
  2363. bias_term: false
  2364. pad: 1
  2365. kernel_size: 3
  2366. stride: 2
  2367. }
  2368. }
  2369. layer {
  2370. name: "BatchNorm5_1_2"
  2371. type: "BatchNorm"
  2372. bottom: "Convolution5_1_2"
  2373. top: "Convolution5_1_2"
  2374. param {
  2375. lr_mult: 0
  2376. decay_mult: 0
  2377. }
  2378. param {
  2379. lr_mult: 0
  2380. decay_mult: 0
  2381. }
  2382. param {
  2383. lr_mult: 0
  2384. decay_mult: 0
  2385. }
  2386. batch_norm_param {
  2387. use_global_stats: false
  2388. }
  2389. }
  2390. layer {
  2391. name: "Scale5_1_2"
  2392. type: "Scale"
  2393. bottom: "Convolution5_1_2"
  2394. top: "Convolution5_1_2"
  2395. scale_param {
  2396. bias_term: true
  2397. }
  2398. }
  2399. layer {
  2400. name: "ReLU5_1_2"
  2401. type: "ReLU"
  2402. bottom: "Convolution5_1_2"
  2403. top: "Convolution5_1_2"
  2404. }
  2405. layer {
  2406. name: "Convolution5_1_3"
  2407. type: "Convolution"
  2408. bottom: "Convolution5_1_2"
  2409. top: "Convolution5_1_3"
  2410. convolution_param {
  2411. num_output: 2048
  2412. bias_term: false
  2413. kernel_size: 1
  2414. }
  2415. }
  2416. layer {
  2417. name: "BatchNorm5_1_3"
  2418. type: "BatchNorm"
  2419. bottom: "Convolution5_1_3"
  2420. top: "Convolution5_1_3"
  2421. param {
  2422. lr_mult: 0
  2423. decay_mult: 0
  2424. }
  2425. param {
  2426. lr_mult: 0
  2427. decay_mult: 0
  2428. }
  2429. param {
  2430. lr_mult: 0
  2431. decay_mult: 0
  2432. }
  2433. batch_norm_param {
  2434. use_global_stats: false
  2435. }
  2436. }
  2437. layer {
  2438. name: "Scale5_1_3"
  2439. type: "Scale"
  2440. bottom: "Convolution5_1_3"
  2441. top: "Convolution5_1_3"
  2442. scale_param {
  2443. bias_term: true
  2444. }
  2445. }
  2446.  
  2447. layer {
  2448. name: "Convolution5_1"
  2449. type: "Convolution"
  2450. bottom: "Sum4_6"
  2451. top: "Convolution5_1"
  2452. convolution_param {
  2453. num_output: 2048
  2454. bias_term: false
  2455. kernel_size: 1
  2456. stride: 2
  2457. }
  2458. }
  2459. layer {
  2460. name: "BatchNorm5_1"
  2461. type: "BatchNorm"
  2462. bottom: "Convolution5_1"
  2463. top: "Convolution5_1"
  2464. param {
  2465. lr_mult: 0
  2466. decay_mult: 0
  2467. }
  2468. param {
  2469. lr_mult: 0
  2470. decay_mult: 0
  2471. }
  2472. param {
  2473. lr_mult: 0
  2474. decay_mult: 0
  2475. }
  2476. batch_norm_param {
  2477. use_global_stats: false
  2478. }
  2479. }
  2480. layer {
  2481. name: "Scale5_1"
  2482. type: "Scale"
  2483. bottom: "Convolution5_1"
  2484. top: "Convolution5_1"
  2485. scale_param {
  2486. bias_term: true
  2487. }
  2488. }
  2489.  
  2490. layer {
  2491. name: "Sum5_1"
  2492. type: "Eltwise"
  2493. bottom: "Convolution5_1"
  2494. bottom: "Convolution5_1_3"
  2495. top: "Sum5_1"
  2496. eltwise_param {
  2497. operation: SUM
  2498. }
  2499. }
  2500. layer {
  2501. name: "ReLU5_1"
  2502. type: "ReLU"
  2503. bottom: "Sum5_1"
  2504. top: "Sum5_1"
  2505. }
  2506.  
  2507. #------------------------------------------------------------------------------
  2508. #------------------------------------------------------------------------------
  2509.  
  2510. layer {
  2511. name: "Convolution5_2_1"
  2512. type: "Convolution"
  2513. bottom: "Sum5_1"
  2514. top: "Convolution5_2_1"
  2515. convolution_param {
  2516. num_output: 1024
  2517. bias_term: false
  2518. kernel_size: 1
  2519. }
  2520. }
  2521. layer {
  2522. name: "BatchNorm5_2_1"
  2523. type: "BatchNorm"
  2524. bottom: "Convolution5_2_1"
  2525. top: "Convolution5_2_1"
  2526. param {
  2527. lr_mult: 0
  2528. decay_mult: 0
  2529. }
  2530. param {
  2531. lr_mult: 0
  2532. decay_mult: 0
  2533. }
  2534. param {
  2535. lr_mult: 0
  2536. decay_mult: 0
  2537. }
  2538. batch_norm_param {
  2539. use_global_stats: false
  2540. }
  2541. }
  2542. layer {
  2543. name: "Scale5_2_1"
  2544. type: "Scale"
  2545. bottom: "Convolution5_2_1"
  2546. top: "Convolution5_2_1"
  2547. scale_param {
  2548. bias_term: true
  2549. }
  2550. }
  2551. layer {
  2552. name: "ReLU5_2_1"
  2553. type: "ReLU"
  2554. bottom: "Convolution5_2_1"
  2555. top: "Convolution5_2_1"
  2556. }
  2557. #------------------------------------------------------------------------------
  2558. layer {
  2559. name: "Convolution5_2_2"
  2560. type: "Convolution"
  2561. bottom: "Convolution5_2_1"
  2562. top: "Convolution5_2_2"
  2563. convolution_param {
  2564. num_output: 1024
  2565. bias_term: false
  2566. pad: 1
  2567. kernel_size: 3
  2568. stride: 1
  2569. }
  2570. }
  2571. layer {
  2572. name: "BatchNorm5_2_2"
  2573. type: "BatchNorm"
  2574. bottom: "Convolution5_2_2"
  2575. top: "Convolution5_2_2"
  2576. param {
  2577. lr_mult: 0
  2578. decay_mult: 0
  2579. }
  2580. param {
  2581. lr_mult: 0
  2582. decay_mult: 0
  2583. }
  2584. param {
  2585. lr_mult: 0
  2586. decay_mult: 0
  2587. }
  2588. batch_norm_param {
  2589. use_global_stats: false
  2590. }
  2591. }
  2592. layer {
  2593. name: "Scale5_2_2"
  2594. type: "Scale"
  2595. bottom: "Convolution5_2_2"
  2596. top: "Convolution5_2_2"
  2597. scale_param {
  2598. bias_term: true
  2599. }
  2600. }
  2601. layer {
  2602. name: "ReLU5_2_2"
  2603. type: "ReLU"
  2604. bottom: "Convolution5_2_2"
  2605. top: "Convolution5_2_2"
  2606. }
  2607. #------------------------------------------------------------------------------
  2608. layer {
  2609. name: "Convolution5_2_3"
  2610. type: "Convolution"
  2611. bottom: "Convolution5_2_2"
  2612. top: "Convolution5_2_3"
  2613. convolution_param {
  2614. num_output: 2048
  2615. bias_term: false
  2616. kernel_size: 1
  2617. }
  2618. }
  2619. layer {
  2620. name: "BatchNorm5_2_3"
  2621. type: "BatchNorm"
  2622. bottom: "Convolution5_2_3"
  2623. top: "Convolution5_2_3"
  2624. param {
  2625. lr_mult: 0
  2626. decay_mult: 0
  2627. }
  2628. param {
  2629. lr_mult: 0
  2630. decay_mult: 0
  2631. }
  2632. param {
  2633. lr_mult: 0
  2634. decay_mult: 0
  2635. }
  2636. batch_norm_param {
  2637. use_global_stats: false
  2638. }
  2639. }
  2640. layer {
  2641. name: "Scale5_2_3"
  2642. type: "Scale"
  2643. bottom: "Convolution5_2_3"
  2644. top: "Convolution5_2_3"
  2645. scale_param {
  2646. bias_term: true
  2647. }
  2648. }
  2649. layer {
  2650. name: "Sum5_2"
  2651. type: "Eltwise"
  2652. bottom: "Sum5_1"
  2653. bottom: "Convolution5_2_3"
  2654. top: "Sum5_2"
  2655. eltwise_param {
  2656. operation: SUM
  2657. }
  2658. }
  2659. layer {
  2660. name: "ReLU5_2"
  2661. type: "ReLU"
  2662. bottom: "Sum5_2"
  2663. top: "Sum5_2"
  2664. }
  2665.  
  2666. #------------------------------------------------------------------------------
  2667. #------------------------------------------------------------------------------
  2668.  
  2669. layer {
  2670. name: "Convolution5_3_1"
  2671. type: "Convolution"
  2672. bottom: "Sum5_2"
  2673. top: "Convolution5_3_1"
  2674. convolution_param {
  2675. num_output: 1024
  2676. bias_term: false
  2677. kernel_size: 1
  2678. }
  2679. }
  2680. layer {
  2681. name: "BatchNorm5_3_1"
  2682. type: "BatchNorm"
  2683. bottom: "Convolution5_3_1"
  2684. top: "Convolution5_3_1"
  2685. param {
  2686. lr_mult: 0
  2687. decay_mult: 0
  2688. }
  2689. param {
  2690. lr_mult: 0
  2691. decay_mult: 0
  2692. }
  2693. param {
  2694. lr_mult: 0
  2695. decay_mult: 0
  2696. }
  2697. batch_norm_param {
  2698. use_global_stats: false
  2699. }
  2700. }
  2701. layer {
  2702. name: "Scale5_3_1"
  2703. type: "Scale"
  2704. bottom: "Convolution5_3_1"
  2705. top: "Convolution5_3_1"
  2706. scale_param {
  2707. bias_term: true
  2708. }
  2709. }
  2710. layer {
  2711. name: "ReLU5_3_1"
  2712. type: "ReLU"
  2713. bottom: "Convolution5_3_1"
  2714. top: "Convolution5_3_1"
  2715. }
  2716. #------------------------------------------------------------------------------
  2717. layer {
  2718. name: "Convolution5_3_2"
  2719. type: "Convolution"
  2720. bottom: "Convolution5_3_1"
  2721. top: "Convolution5_3_2"
  2722. convolution_param {
  2723. num_output: 1024
  2724. bias_term: false
  2725. pad: 1
  2726. kernel_size: 3
  2727. stride: 1
  2728. }
  2729. }
  2730. layer {
  2731. name: "BatchNorm5_3_2"
  2732. type: "BatchNorm"
  2733. bottom: "Convolution5_3_2"
  2734. top: "Convolution5_3_2"
  2735. param {
  2736. lr_mult: 0
  2737. decay_mult: 0
  2738. }
  2739. param {
  2740. lr_mult: 0
  2741. decay_mult: 0
  2742. }
  2743. param {
  2744. lr_mult: 0
  2745. decay_mult: 0
  2746. }
  2747. batch_norm_param {
  2748. use_global_stats: false
  2749. }
  2750. }
  2751. layer {
  2752. name: "Scale5_3_2"
  2753. type: "Scale"
  2754. bottom: "Convolution5_3_2"
  2755. top: "Convolution5_3_2"
  2756. scale_param {
  2757. bias_term: true
  2758. }
  2759. }
  2760. layer {
  2761. name: "ReLU5_3_2"
  2762. type: "ReLU"
  2763. bottom: "Convolution5_3_2"
  2764. top: "Convolution5_3_2"
  2765. }
  2766. #------------------------------------------------------------------------------
  2767. layer {
  2768. name: "Convolution5_3_3"
  2769. type: "Convolution"
  2770. bottom: "Convolution5_3_2"
  2771. top: "Convolution5_3_3"
  2772. convolution_param {
  2773. num_output: 2048
  2774. bias_term: false
  2775. kernel_size: 1
  2776. }
  2777. }
  2778. layer {
  2779. name: "BatchNorm5_3_3"
  2780. type: "BatchNorm"
  2781. bottom: "Convolution5_3_3"
  2782. top: "Convolution5_3_3"
  2783. param {
  2784. lr_mult: 0
  2785. decay_mult: 0
  2786. }
  2787. param {
  2788. lr_mult: 0
  2789. decay_mult: 0
  2790. }
  2791. param {
  2792. lr_mult: 0
  2793. decay_mult: 0
  2794. }
  2795. batch_norm_param {
  2796. use_global_stats: false
  2797. }
  2798. }
  2799. layer {
  2800. name: "Scale5_3_3"
  2801. type: "Scale"
  2802. bottom: "Convolution5_3_3"
  2803. top: "Convolution5_3_3"
  2804. scale_param {
  2805. bias_term: true
  2806. }
  2807. }
  2808. layer {
  2809. name: "Sum5_3"
  2810. type: "Eltwise"
  2811. bottom: "Sum5_2"
  2812. bottom: "Convolution5_3_3"
  2813. top: "Sum5_3"
  2814. eltwise_param {
  2815. operation: SUM
  2816. }
  2817. }
  2818. layer {
  2819. name: "ReLU5_3"
  2820. type: "ReLU"
  2821. bottom: "Sum5_3"
  2822. top: "Sum5_3"
  2823. }
  2824.  
  2825. #------------------------------------------------------------------------------
  2826. #------------------------------------------------------------------------------
  2827. #------------------------------------------------------------------------------
  2828. #------------------------------------------------------------------------------
  2829.  
  2830. layer {
  2831. name: "Pooling2"
  2832. type: "Pooling"
  2833. bottom: "Sum5_3"
  2834. top: "Pooling2"
  2835. pooling_param {
  2836. pool: AVE
  2837. kernel_size: 7
  2838. stride: 1
  2839. }
  2840. }
  2841. layer {
  2842. name: "InnerProduct1"
  2843. type: "InnerProduct"
  2844. bottom: "Pooling2"
  2845. top: "InnerProduct1"
  2846. inner_product_param {
  2847. num_output: 1000
  2848. }
  2849. }
  2850. layer {
  2851. name: "SoftmaxWithLoss1"
  2852. type: "SoftmaxWithLoss"
  2853. bottom: "InnerProduct1"
  2854. bottom: "Data2"
  2855. top: "SoftmaxWithLoss1"
  2856. }
  2857. layer {
  2858. name: "Accuracy1"
  2859. type: "Accuracy"
  2860. bottom: "InnerProduct1"
  2861. bottom: "Data2"
  2862. top: "Accuracy1"
  2863. include {
  2864. phase: TEST
  2865. }
  2866. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement