Advertisement
Guest User

Untitled

a guest
Apr 20th, 2019
93
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 50.36 KB | None | 0 0
  1. name: "MOBILENET_V2"
  2. # transform_mobilenet_v2.prototxtparam {
  3. # scale: 0.017
  4. # mirror: false
  5. # crop_size: 224
  6. # mean_value: [103.94,116.78,123.68]
  7. # }
  8. input: "data"
  9. input_dim: 1
  10. input_dim: 3
  11. input_dim: 224
  12. input_dim: 224
  13. layer {
  14. name: "conv1"
  15. type: "Convolution"
  16. bottom: "data"
  17. top: "conv1"
  18. param {
  19. lr_mult: 1
  20. decay_mult: 1
  21. }
  22. convolution_param {
  23. num_output: 32
  24. bias_term: false
  25. pad: 1
  26. kernel_size: 3
  27. stride: 2
  28. weight_filler {
  29. type: "msra"
  30. }
  31. }
  32. }
  33. layer {
  34. name: "conv1/bn"
  35. type: "BatchNorm"
  36. bottom: "conv1"
  37. top: "conv1/bn"
  38. param {
  39. lr_mult: 0
  40. decay_mult: 0
  41. }
  42. param {
  43. lr_mult: 0
  44. decay_mult: 0
  45. }
  46. param {
  47. lr_mult: 0
  48. decay_mult: 0
  49. }
  50. batch_norm_param {
  51. use_global_stats: true
  52. eps: 1e-5
  53. }
  54. }
  55. layer {
  56. name: "conv1/scale"
  57. type: "Scale"
  58. bottom: "conv1/bn"
  59. top: "conv1/bn"
  60. param {
  61. lr_mult: 1
  62. decay_mult: 0
  63. }
  64. param {
  65. lr_mult: 1
  66. decay_mult: 0
  67. }
  68. scale_param {
  69. bias_term: true
  70. }
  71. }
  72. layer {
  73. name: "relu1"
  74. type: "ReLU"
  75. bottom: "conv1/bn"
  76. top: "conv1/bn"
  77. }
  78. layer {
  79. name: "conv2_1/expand"
  80. type: "Convolution"
  81. bottom: "conv1/bn"
  82. top: "conv2_1/expand"
  83. param {
  84. lr_mult: 1
  85. decay_mult: 1
  86. }
  87. convolution_param {
  88. num_output: 32
  89. bias_term: false
  90. kernel_size: 1
  91. weight_filler {
  92. type: "msra"
  93. }
  94. }
  95. }
  96. layer {
  97. name: "conv2_1/expand/bn"
  98. type: "BatchNorm"
  99. bottom: "conv2_1/expand"
  100. top: "conv2_1/expand/bn"
  101. param {
  102. lr_mult: 0
  103. decay_mult: 0
  104. }
  105. param {
  106. lr_mult: 0
  107. decay_mult: 0
  108. }
  109. param {
  110. lr_mult: 0
  111. decay_mult: 0
  112. }
  113. batch_norm_param {
  114. use_global_stats: true
  115. eps: 1e-5
  116. }
  117. }
  118. layer {
  119. name: "conv2_1/expand/scale"
  120. type: "Scale"
  121. bottom: "conv2_1/expand/bn"
  122. top: "conv2_1/expand/bn"
  123. param {
  124. lr_mult: 1
  125. decay_mult: 0
  126. }
  127. param {
  128. lr_mult: 1
  129. decay_mult: 0
  130. }
  131. scale_param {
  132. bias_term: true
  133. }
  134. }
  135. layer {
  136. name: "relu2_1/expand"
  137. type: "ReLU"
  138. bottom: "conv2_1/expand/bn"
  139. top: "conv2_1/expand/bn"
  140. }
  141. layer {
  142. name: "conv2_1/dwise"
  143. type: "Convolution"
  144. bottom: "conv2_1/expand/bn"
  145. top: "conv2_1/dwise"
  146. param {
  147. lr_mult: 1
  148. decay_mult: 1
  149. }
  150. convolution_param {
  151. num_output: 32
  152. bias_term: false
  153. pad: 1
  154. kernel_size: 3
  155. group: 32
  156. weight_filler {
  157. type: "msra"
  158. }
  159. engine: CAFFE
  160. }
  161. }
  162. layer {
  163. name: "conv2_1/dwise/bn"
  164. type: "BatchNorm"
  165. bottom: "conv2_1/dwise"
  166. top: "conv2_1/dwise/bn"
  167. param {
  168. lr_mult: 0
  169. decay_mult: 0
  170. }
  171. param {
  172. lr_mult: 0
  173. decay_mult: 0
  174. }
  175. param {
  176. lr_mult: 0
  177. decay_mult: 0
  178. }
  179. batch_norm_param {
  180. use_global_stats: true
  181. eps: 1e-5
  182. }
  183. }
  184. layer {
  185. name: "conv2_1/dwise/scale"
  186. type: "Scale"
  187. bottom: "conv2_1/dwise/bn"
  188. top: "conv2_1/dwise/bn"
  189. param {
  190. lr_mult: 1
  191. decay_mult: 0
  192. }
  193. param {
  194. lr_mult: 1
  195. decay_mult: 0
  196. }
  197. scale_param {
  198. bias_term: true
  199. }
  200. }
  201. layer {
  202. name: "relu2_1/dwise"
  203. type: "ReLU"
  204. bottom: "conv2_1/dwise/bn"
  205. top: "conv2_1/dwise/bn"
  206. }
  207. layer {
  208. name: "conv2_1/linear"
  209. type: "Convolution"
  210. bottom: "conv2_1/dwise/bn"
  211. top: "conv2_1/linear"
  212. param {
  213. lr_mult: 1
  214. decay_mult: 1
  215. }
  216. convolution_param {
  217. num_output: 16
  218. bias_term: false
  219. kernel_size: 1
  220. weight_filler {
  221. type: "msra"
  222. }
  223. }
  224. }
  225. layer {
  226. name: "conv2_1/linear/bn"
  227. type: "BatchNorm"
  228. bottom: "conv2_1/linear"
  229. top: "conv2_1/linear/bn"
  230. param {
  231. lr_mult: 0
  232. decay_mult: 0
  233. }
  234. param {
  235. lr_mult: 0
  236. decay_mult: 0
  237. }
  238. param {
  239. lr_mult: 0
  240. decay_mult: 0
  241. }
  242. batch_norm_param {
  243. use_global_stats: true
  244. eps: 1e-5
  245. }
  246. }
  247. layer {
  248. name: "conv2_1/linear/scale"
  249. type: "Scale"
  250. bottom: "conv2_1/linear/bn"
  251. top: "conv2_1/linear/bn"
  252. param {
  253. lr_mult: 1
  254. decay_mult: 0
  255. }
  256. param {
  257. lr_mult: 1
  258. decay_mult: 0
  259. }
  260. scale_param {
  261. bias_term: true
  262. }
  263. }
  264. layer {
  265. name: "conv2_2/expand"
  266. type: "Convolution"
  267. bottom: "conv2_1/linear/bn"
  268. top: "conv2_2/expand"
  269. param {
  270. lr_mult: 1
  271. decay_mult: 1
  272. }
  273. convolution_param {
  274. num_output: 96
  275. bias_term: false
  276. kernel_size: 1
  277. weight_filler {
  278. type: "msra"
  279. }
  280. }
  281. }
  282. layer {
  283. name: "conv2_2/expand/bn"
  284. type: "BatchNorm"
  285. bottom: "conv2_2/expand"
  286. top: "conv2_2/expand/bn"
  287. param {
  288. lr_mult: 0
  289. decay_mult: 0
  290. }
  291. param {
  292. lr_mult: 0
  293. decay_mult: 0
  294. }
  295. param {
  296. lr_mult: 0
  297. decay_mult: 0
  298. }
  299. batch_norm_param {
  300. use_global_stats: true
  301. eps: 1e-5
  302. }
  303. }
  304. layer {
  305. name: "conv2_2/expand/scale"
  306. type: "Scale"
  307. bottom: "conv2_2/expand/bn"
  308. top: "conv2_2/expand/bn"
  309. param {
  310. lr_mult: 1
  311. decay_mult: 0
  312. }
  313. param {
  314. lr_mult: 1
  315. decay_mult: 0
  316. }
  317. scale_param {
  318. bias_term: true
  319. }
  320. }
  321. layer {
  322. name: "relu2_2/expand"
  323. type: "ReLU"
  324. bottom: "conv2_2/expand/bn"
  325. top: "conv2_2/expand/bn"
  326. }
  327. layer {
  328. name: "conv2_2/dwise"
  329. type: "Convolution"
  330. bottom: "conv2_2/expand/bn"
  331. top: "conv2_2/dwise"
  332. param {
  333. lr_mult: 1
  334. decay_mult: 1
  335. }
  336. convolution_param {
  337. num_output: 96
  338. bias_term: false
  339. pad: 1
  340. kernel_size: 3
  341. group: 96
  342. stride: 2
  343. weight_filler {
  344. type: "msra"
  345. }
  346. engine: CAFFE
  347. }
  348. }
  349. layer {
  350. name: "conv2_2/dwise/bn"
  351. type: "BatchNorm"
  352. bottom: "conv2_2/dwise"
  353. top: "conv2_2/dwise/bn"
  354. param {
  355. lr_mult: 0
  356. decay_mult: 0
  357. }
  358. param {
  359. lr_mult: 0
  360. decay_mult: 0
  361. }
  362. param {
  363. lr_mult: 0
  364. decay_mult: 0
  365. }
  366. batch_norm_param {
  367. use_global_stats: true
  368. eps: 1e-5
  369. }
  370. }
  371. layer {
  372. name: "conv2_2/dwise/scale"
  373. type: "Scale"
  374. bottom: "conv2_2/dwise/bn"
  375. top: "conv2_2/dwise/bn"
  376. param {
  377. lr_mult: 1
  378. decay_mult: 0
  379. }
  380. param {
  381. lr_mult: 1
  382. decay_mult: 0
  383. }
  384. scale_param {
  385. bias_term: true
  386. }
  387. }
  388. layer {
  389. name: "relu2_2/dwise"
  390. type: "ReLU"
  391. bottom: "conv2_2/dwise/bn"
  392. top: "conv2_2/dwise/bn"
  393. }
  394. layer {
  395. name: "conv2_2/linear"
  396. type: "Convolution"
  397. bottom: "conv2_2/dwise/bn"
  398. top: "conv2_2/linear"
  399. param {
  400. lr_mult: 1
  401. decay_mult: 1
  402. }
  403. convolution_param {
  404. num_output: 24
  405. bias_term: false
  406. kernel_size: 1
  407. weight_filler {
  408. type: "msra"
  409. }
  410. }
  411. }
  412. layer {
  413. name: "conv2_2/linear/bn"
  414. type: "BatchNorm"
  415. bottom: "conv2_2/linear"
  416. top: "conv2_2/linear/bn"
  417. param {
  418. lr_mult: 0
  419. decay_mult: 0
  420. }
  421. param {
  422. lr_mult: 0
  423. decay_mult: 0
  424. }
  425. param {
  426. lr_mult: 0
  427. decay_mult: 0
  428. }
  429. batch_norm_param {
  430. use_global_stats: true
  431. eps: 1e-5
  432. }
  433. }
  434. layer {
  435. name: "conv2_2/linear/scale"
  436. type: "Scale"
  437. bottom: "conv2_2/linear/bn"
  438. top: "conv2_2/linear/bn"
  439. param {
  440. lr_mult: 1
  441. decay_mult: 0
  442. }
  443. param {
  444. lr_mult: 1
  445. decay_mult: 0
  446. }
  447. scale_param {
  448. bias_term: true
  449. }
  450. }
  451. layer {
  452. name: "conv3_1/expand"
  453. type: "Convolution"
  454. bottom: "conv2_2/linear/bn"
  455. top: "conv3_1/expand"
  456. param {
  457. lr_mult: 1
  458. decay_mult: 1
  459. }
  460. convolution_param {
  461. num_output: 144
  462. bias_term: false
  463. kernel_size: 1
  464. weight_filler {
  465. type: "msra"
  466. }
  467. }
  468. }
  469. layer {
  470. name: "conv3_1/expand/bn"
  471. type: "BatchNorm"
  472. bottom: "conv3_1/expand"
  473. top: "conv3_1/expand/bn"
  474. param {
  475. lr_mult: 0
  476. decay_mult: 0
  477. }
  478. param {
  479. lr_mult: 0
  480. decay_mult: 0
  481. }
  482. param {
  483. lr_mult: 0
  484. decay_mult: 0
  485. }
  486. batch_norm_param {
  487. use_global_stats: true
  488. eps: 1e-5
  489. }
  490. }
  491. layer {
  492. name: "conv3_1/expand/scale"
  493. type: "Scale"
  494. bottom: "conv3_1/expand/bn"
  495. top: "conv3_1/expand/bn"
  496. param {
  497. lr_mult: 1
  498. decay_mult: 0
  499. }
  500. param {
  501. lr_mult: 1
  502. decay_mult: 0
  503. }
  504. scale_param {
  505. bias_term: true
  506. }
  507. }
  508. layer {
  509. name: "relu3_1/expand"
  510. type: "ReLU"
  511. bottom: "conv3_1/expand/bn"
  512. top: "conv3_1/expand/bn"
  513. }
  514. layer {
  515. name: "conv3_1/dwise"
  516. type: "Convolution"
  517. bottom: "conv3_1/expand/bn"
  518. top: "conv3_1/dwise"
  519. param {
  520. lr_mult: 1
  521. decay_mult: 1
  522. }
  523. convolution_param {
  524. num_output: 144
  525. bias_term: false
  526. pad: 1
  527. kernel_size: 3
  528. group: 144
  529. weight_filler {
  530. type: "msra"
  531. }
  532. engine: CAFFE
  533. }
  534. }
  535. layer {
  536. name: "conv3_1/dwise/bn"
  537. type: "BatchNorm"
  538. bottom: "conv3_1/dwise"
  539. top: "conv3_1/dwise/bn"
  540. param {
  541. lr_mult: 0
  542. decay_mult: 0
  543. }
  544. param {
  545. lr_mult: 0
  546. decay_mult: 0
  547. }
  548. param {
  549. lr_mult: 0
  550. decay_mult: 0
  551. }
  552. batch_norm_param {
  553. use_global_stats: true
  554. eps: 1e-5
  555. }
  556. }
  557. layer {
  558. name: "conv3_1/dwise/scale"
  559. type: "Scale"
  560. bottom: "conv3_1/dwise/bn"
  561. top: "conv3_1/dwise/bn"
  562. param {
  563. lr_mult: 1
  564. decay_mult: 0
  565. }
  566. param {
  567. lr_mult: 1
  568. decay_mult: 0
  569. }
  570. scale_param {
  571. bias_term: true
  572. }
  573. }
  574. layer {
  575. name: "relu3_1/dwise"
  576. type: "ReLU"
  577. bottom: "conv3_1/dwise/bn"
  578. top: "conv3_1/dwise/bn"
  579. }
  580. layer {
  581. name: "conv3_1/linear"
  582. type: "Convolution"
  583. bottom: "conv3_1/dwise/bn"
  584. top: "conv3_1/linear"
  585. param {
  586. lr_mult: 1
  587. decay_mult: 1
  588. }
  589. convolution_param {
  590. num_output: 24
  591. bias_term: false
  592. kernel_size: 1
  593. weight_filler {
  594. type: "msra"
  595. }
  596. }
  597. }
  598. layer {
  599. name: "conv3_1/linear/bn"
  600. type: "BatchNorm"
  601. bottom: "conv3_1/linear"
  602. top: "conv3_1/linear/bn"
  603. param {
  604. lr_mult: 0
  605. decay_mult: 0
  606. }
  607. param {
  608. lr_mult: 0
  609. decay_mult: 0
  610. }
  611. param {
  612. lr_mult: 0
  613. decay_mult: 0
  614. }
  615. batch_norm_param {
  616. use_global_stats: true
  617. eps: 1e-5
  618. }
  619. }
  620. layer {
  621. name: "conv3_1/linear/scale"
  622. type: "Scale"
  623. bottom: "conv3_1/linear/bn"
  624. top: "conv3_1/linear/bn"
  625. param {
  626. lr_mult: 1
  627. decay_mult: 0
  628. }
  629. param {
  630. lr_mult: 1
  631. decay_mult: 0
  632. }
  633. scale_param {
  634. bias_term: true
  635. }
  636. }
  637. layer {
  638. name: "block_3_1"
  639. type: "Eltwise"
  640. bottom: "conv2_2/linear/bn"
  641. bottom: "conv3_1/linear/bn"
  642. top: "block_3_1"
  643. }
  644. layer {
  645. name: "conv3_2/expand"
  646. type: "Convolution"
  647. bottom: "block_3_1"
  648. top: "conv3_2/expand"
  649. param {
  650. lr_mult: 1
  651. decay_mult: 1
  652. }
  653. convolution_param {
  654. num_output: 144
  655. bias_term: false
  656. kernel_size: 1
  657. weight_filler {
  658. type: "msra"
  659. }
  660. }
  661. }
  662. layer {
  663. name: "conv3_2/expand/bn"
  664. type: "BatchNorm"
  665. bottom: "conv3_2/expand"
  666. top: "conv3_2/expand/bn"
  667. param {
  668. lr_mult: 0
  669. decay_mult: 0
  670. }
  671. param {
  672. lr_mult: 0
  673. decay_mult: 0
  674. }
  675. param {
  676. lr_mult: 0
  677. decay_mult: 0
  678. }
  679. batch_norm_param {
  680. use_global_stats: true
  681. eps: 1e-5
  682. }
  683. }
  684. layer {
  685. name: "conv3_2/expand/scale"
  686. type: "Scale"
  687. bottom: "conv3_2/expand/bn"
  688. top: "conv3_2/expand/bn"
  689. param {
  690. lr_mult: 1
  691. decay_mult: 0
  692. }
  693. param {
  694. lr_mult: 1
  695. decay_mult: 0
  696. }
  697. scale_param {
  698. bias_term: true
  699. }
  700. }
  701. layer {
  702. name: "relu3_2/expand"
  703. type: "ReLU"
  704. bottom: "conv3_2/expand/bn"
  705. top: "conv3_2/expand/bn"
  706. }
  707. layer {
  708. name: "conv3_2/dwise"
  709. type: "Convolution"
  710. bottom: "conv3_2/expand/bn"
  711. top: "conv3_2/dwise"
  712. param {
  713. lr_mult: 1
  714. decay_mult: 1
  715. }
  716. convolution_param {
  717. num_output: 144
  718. bias_term: false
  719. pad: 1
  720. kernel_size: 3
  721. group: 144
  722. stride: 2
  723. weight_filler {
  724. type: "msra"
  725. }
  726. engine: CAFFE
  727. }
  728. }
  729. layer {
  730. name: "conv3_2/dwise/bn"
  731. type: "BatchNorm"
  732. bottom: "conv3_2/dwise"
  733. top: "conv3_2/dwise/bn"
  734. param {
  735. lr_mult: 0
  736. decay_mult: 0
  737. }
  738. param {
  739. lr_mult: 0
  740. decay_mult: 0
  741. }
  742. param {
  743. lr_mult: 0
  744. decay_mult: 0
  745. }
  746. batch_norm_param {
  747. use_global_stats: true
  748. eps: 1e-5
  749. }
  750. }
  751. layer {
  752. name: "conv3_2/dwise/scale"
  753. type: "Scale"
  754. bottom: "conv3_2/dwise/bn"
  755. top: "conv3_2/dwise/bn"
  756. param {
  757. lr_mult: 1
  758. decay_mult: 0
  759. }
  760. param {
  761. lr_mult: 1
  762. decay_mult: 0
  763. }
  764. scale_param {
  765. bias_term: true
  766. }
  767. }
  768. layer {
  769. name: "relu3_2/dwise"
  770. type: "ReLU"
  771. bottom: "conv3_2/dwise/bn"
  772. top: "conv3_2/dwise/bn"
  773. }
  774. layer {
  775. name: "conv3_2/linear"
  776. type: "Convolution"
  777. bottom: "conv3_2/dwise/bn"
  778. top: "conv3_2/linear"
  779. param {
  780. lr_mult: 1
  781. decay_mult: 1
  782. }
  783. convolution_param {
  784. num_output: 32
  785. bias_term: false
  786. kernel_size: 1
  787. weight_filler {
  788. type: "msra"
  789. }
  790. }
  791. }
  792. layer {
  793. name: "conv3_2/linear/bn"
  794. type: "BatchNorm"
  795. bottom: "conv3_2/linear"
  796. top: "conv3_2/linear/bn"
  797. param {
  798. lr_mult: 0
  799. decay_mult: 0
  800. }
  801. param {
  802. lr_mult: 0
  803. decay_mult: 0
  804. }
  805. param {
  806. lr_mult: 0
  807. decay_mult: 0
  808. }
  809. batch_norm_param {
  810. use_global_stats: true
  811. eps: 1e-5
  812. }
  813. }
  814. layer {
  815. name: "conv3_2/linear/scale"
  816. type: "Scale"
  817. bottom: "conv3_2/linear/bn"
  818. top: "conv3_2/linear/bn"
  819. param {
  820. lr_mult: 1
  821. decay_mult: 0
  822. }
  823. param {
  824. lr_mult: 1
  825. decay_mult: 0
  826. }
  827. scale_param {
  828. bias_term: true
  829. }
  830. }
  831. layer {
  832. name: "conv4_1/expand"
  833. type: "Convolution"
  834. bottom: "conv3_2/linear/bn"
  835. top: "conv4_1/expand"
  836. param {
  837. lr_mult: 1
  838. decay_mult: 1
  839. }
  840. convolution_param {
  841. num_output: 192
  842. bias_term: false
  843. kernel_size: 1
  844. weight_filler {
  845. type: "msra"
  846. }
  847. }
  848. }
  849. layer {
  850. name: "conv4_1/expand/bn"
  851. type: "BatchNorm"
  852. bottom: "conv4_1/expand"
  853. top: "conv4_1/expand/bn"
  854. param {
  855. lr_mult: 0
  856. decay_mult: 0
  857. }
  858. param {
  859. lr_mult: 0
  860. decay_mult: 0
  861. }
  862. param {
  863. lr_mult: 0
  864. decay_mult: 0
  865. }
  866. batch_norm_param {
  867. use_global_stats: true
  868. eps: 1e-5
  869. }
  870. }
  871. layer {
  872. name: "conv4_1/expand/scale"
  873. type: "Scale"
  874. bottom: "conv4_1/expand/bn"
  875. top: "conv4_1/expand/bn"
  876. param {
  877. lr_mult: 1
  878. decay_mult: 0
  879. }
  880. param {
  881. lr_mult: 1
  882. decay_mult: 0
  883. }
  884. scale_param {
  885. bias_term: true
  886. }
  887. }
  888. layer {
  889. name: "relu4_1/expand"
  890. type: "ReLU"
  891. bottom: "conv4_1/expand/bn"
  892. top: "conv4_1/expand/bn"
  893. }
  894. layer {
  895. name: "conv4_1/dwise"
  896. type: "Convolution"
  897. bottom: "conv4_1/expand/bn"
  898. top: "conv4_1/dwise"
  899. param {
  900. lr_mult: 1
  901. decay_mult: 1
  902. }
  903. convolution_param {
  904. num_output: 192
  905. bias_term: false
  906. pad: 1
  907. kernel_size: 3
  908. group: 192
  909. weight_filler {
  910. type: "msra"
  911. }
  912. engine: CAFFE
  913. }
  914. }
  915. layer {
  916. name: "conv4_1/dwise/bn"
  917. type: "BatchNorm"
  918. bottom: "conv4_1/dwise"
  919. top: "conv4_1/dwise/bn"
  920. param {
  921. lr_mult: 0
  922. decay_mult: 0
  923. }
  924. param {
  925. lr_mult: 0
  926. decay_mult: 0
  927. }
  928. param {
  929. lr_mult: 0
  930. decay_mult: 0
  931. }
  932. batch_norm_param {
  933. use_global_stats: true
  934. eps: 1e-5
  935. }
  936. }
  937. layer {
  938. name: "conv4_1/dwise/scale"
  939. type: "Scale"
  940. bottom: "conv4_1/dwise/bn"
  941. top: "conv4_1/dwise/bn"
  942. param {
  943. lr_mult: 1
  944. decay_mult: 0
  945. }
  946. param {
  947. lr_mult: 1
  948. decay_mult: 0
  949. }
  950. scale_param {
  951. bias_term: true
  952. }
  953. }
  954. layer {
  955. name: "relu4_1/dwise"
  956. type: "ReLU"
  957. bottom: "conv4_1/dwise/bn"
  958. top: "conv4_1/dwise/bn"
  959. }
  960. layer {
  961. name: "conv4_1/linear"
  962. type: "Convolution"
  963. bottom: "conv4_1/dwise/bn"
  964. top: "conv4_1/linear"
  965. param {
  966. lr_mult: 1
  967. decay_mult: 1
  968. }
  969. convolution_param {
  970. num_output: 32
  971. bias_term: false
  972. kernel_size: 1
  973. weight_filler {
  974. type: "msra"
  975. }
  976. }
  977. }
  978. layer {
  979. name: "conv4_1/linear/bn"
  980. type: "BatchNorm"
  981. bottom: "conv4_1/linear"
  982. top: "conv4_1/linear/bn"
  983. param {
  984. lr_mult: 0
  985. decay_mult: 0
  986. }
  987. param {
  988. lr_mult: 0
  989. decay_mult: 0
  990. }
  991. param {
  992. lr_mult: 0
  993. decay_mult: 0
  994. }
  995. batch_norm_param {
  996. use_global_stats: true
  997. eps: 1e-5
  998. }
  999. }
  1000. layer {
  1001. name: "conv4_1/linear/scale"
  1002. type: "Scale"
  1003. bottom: "conv4_1/linear/bn"
  1004. top: "conv4_1/linear/bn"
  1005. param {
  1006. lr_mult: 1
  1007. decay_mult: 0
  1008. }
  1009. param {
  1010. lr_mult: 1
  1011. decay_mult: 0
  1012. }
  1013. scale_param {
  1014. bias_term: true
  1015. }
  1016. }
  1017. layer {
  1018. name: "block_4_1"
  1019. type: "Eltwise"
  1020. bottom: "conv3_2/linear/bn"
  1021. bottom: "conv4_1/linear/bn"
  1022. top: "block_4_1"
  1023. }
  1024. layer {
  1025. name: "conv4_2/expand"
  1026. type: "Convolution"
  1027. bottom: "block_4_1"
  1028. top: "conv4_2/expand"
  1029. param {
  1030. lr_mult: 1
  1031. decay_mult: 1
  1032. }
  1033. convolution_param {
  1034. num_output: 192
  1035. bias_term: false
  1036. kernel_size: 1
  1037. weight_filler {
  1038. type: "msra"
  1039. }
  1040. }
  1041. }
  1042. layer {
  1043. name: "conv4_2/expand/bn"
  1044. type: "BatchNorm"
  1045. bottom: "conv4_2/expand"
  1046. top: "conv4_2/expand/bn"
  1047. param {
  1048. lr_mult: 0
  1049. decay_mult: 0
  1050. }
  1051. param {
  1052. lr_mult: 0
  1053. decay_mult: 0
  1054. }
  1055. param {
  1056. lr_mult: 0
  1057. decay_mult: 0
  1058. }
  1059. batch_norm_param {
  1060. use_global_stats: true
  1061. eps: 1e-5
  1062. }
  1063. }
  1064. layer {
  1065. name: "conv4_2/expand/scale"
  1066. type: "Scale"
  1067. bottom: "conv4_2/expand/bn"
  1068. top: "conv4_2/expand/bn"
  1069. param {
  1070. lr_mult: 1
  1071. decay_mult: 0
  1072. }
  1073. param {
  1074. lr_mult: 1
  1075. decay_mult: 0
  1076. }
  1077. scale_param {
  1078. bias_term: true
  1079. }
  1080. }
  1081. layer {
  1082. name: "relu4_2/expand"
  1083. type: "ReLU"
  1084. bottom: "conv4_2/expand/bn"
  1085. top: "conv4_2/expand/bn"
  1086. }
  1087. layer {
  1088. name: "conv4_2/dwise"
  1089. type: "Convolution"
  1090. bottom: "conv4_2/expand/bn"
  1091. top: "conv4_2/dwise"
  1092. param {
  1093. lr_mult: 1
  1094. decay_mult: 1
  1095. }
  1096. convolution_param {
  1097. num_output: 192
  1098. bias_term: false
  1099. pad: 1
  1100. kernel_size: 3
  1101. group: 192
  1102. weight_filler {
  1103. type: "msra"
  1104. }
  1105. engine: CAFFE
  1106. }
  1107. }
  1108. layer {
  1109. name: "conv4_2/dwise/bn"
  1110. type: "BatchNorm"
  1111. bottom: "conv4_2/dwise"
  1112. top: "conv4_2/dwise/bn"
  1113. param {
  1114. lr_mult: 0
  1115. decay_mult: 0
  1116. }
  1117. param {
  1118. lr_mult: 0
  1119. decay_mult: 0
  1120. }
  1121. param {
  1122. lr_mult: 0
  1123. decay_mult: 0
  1124. }
  1125. batch_norm_param {
  1126. use_global_stats: true
  1127. eps: 1e-5
  1128. }
  1129. }
  1130. layer {
  1131. name: "conv4_2/dwise/scale"
  1132. type: "Scale"
  1133. bottom: "conv4_2/dwise/bn"
  1134. top: "conv4_2/dwise/bn"
  1135. param {
  1136. lr_mult: 1
  1137. decay_mult: 0
  1138. }
  1139. param {
  1140. lr_mult: 1
  1141. decay_mult: 0
  1142. }
  1143. scale_param {
  1144. bias_term: true
  1145. }
  1146. }
  1147. layer {
  1148. name: "relu4_2/dwise"
  1149. type: "ReLU"
  1150. bottom: "conv4_2/dwise/bn"
  1151. top: "conv4_2/dwise/bn"
  1152. }
  1153. layer {
  1154. name: "conv4_2/linear"
  1155. type: "Convolution"
  1156. bottom: "conv4_2/dwise/bn"
  1157. top: "conv4_2/linear"
  1158. param {
  1159. lr_mult: 1
  1160. decay_mult: 1
  1161. }
  1162. convolution_param {
  1163. num_output: 32
  1164. bias_term: false
  1165. kernel_size: 1
  1166. weight_filler {
  1167. type: "msra"
  1168. }
  1169. }
  1170. }
  1171. layer {
  1172. name: "conv4_2/linear/bn"
  1173. type: "BatchNorm"
  1174. bottom: "conv4_2/linear"
  1175. top: "conv4_2/linear/bn"
  1176. param {
  1177. lr_mult: 0
  1178. decay_mult: 0
  1179. }
  1180. param {
  1181. lr_mult: 0
  1182. decay_mult: 0
  1183. }
  1184. param {
  1185. lr_mult: 0
  1186. decay_mult: 0
  1187. }
  1188. batch_norm_param {
  1189. use_global_stats: true
  1190. eps: 1e-5
  1191. }
  1192. }
  1193. layer {
  1194. name: "conv4_2/linear/scale"
  1195. type: "Scale"
  1196. bottom: "conv4_2/linear/bn"
  1197. top: "conv4_2/linear/bn"
  1198. param {
  1199. lr_mult: 1
  1200. decay_mult: 0
  1201. }
  1202. param {
  1203. lr_mult: 1
  1204. decay_mult: 0
  1205. }
  1206. scale_param {
  1207. bias_term: true
  1208. }
  1209. }
  1210. layer {
  1211. name: "block_4_2"
  1212. type: "Eltwise"
  1213. bottom: "block_4_1"
  1214. bottom: "conv4_2/linear/bn"
  1215. top: "block_4_2"
  1216. }
  1217. layer {
  1218. name: "conv4_3/expand"
  1219. type: "Convolution"
  1220. bottom: "block_4_2"
  1221. top: "conv4_3/expand"
  1222. param {
  1223. lr_mult: 1
  1224. decay_mult: 1
  1225. }
  1226. convolution_param {
  1227. num_output: 192
  1228. bias_term: false
  1229. kernel_size: 1
  1230. weight_filler {
  1231. type: "msra"
  1232. }
  1233. }
  1234. }
  1235. layer {
  1236. name: "conv4_3/expand/bn"
  1237. type: "BatchNorm"
  1238. bottom: "conv4_3/expand"
  1239. top: "conv4_3/expand/bn"
  1240. param {
  1241. lr_mult: 0
  1242. decay_mult: 0
  1243. }
  1244. param {
  1245. lr_mult: 0
  1246. decay_mult: 0
  1247. }
  1248. param {
  1249. lr_mult: 0
  1250. decay_mult: 0
  1251. }
  1252. batch_norm_param {
  1253. use_global_stats: true
  1254. eps: 1e-5
  1255. }
  1256. }
  1257. layer {
  1258. name: "conv4_3/expand/scale"
  1259. type: "Scale"
  1260. bottom: "conv4_3/expand/bn"
  1261. top: "conv4_3/expand/bn"
  1262. param {
  1263. lr_mult: 1
  1264. decay_mult: 0
  1265. }
  1266. param {
  1267. lr_mult: 1
  1268. decay_mult: 0
  1269. }
  1270. scale_param {
  1271. bias_term: true
  1272. }
  1273. }
  1274. layer {
  1275. name: "relu4_3/expand"
  1276. type: "ReLU"
  1277. bottom: "conv4_3/expand/bn"
  1278. top: "conv4_3/expand/bn"
  1279. }
  1280. layer {
  1281. name: "conv4_3/dwise"
  1282. type: "Convolution"
  1283. bottom: "conv4_3/expand/bn"
  1284. top: "conv4_3/dwise"
  1285. param {
  1286. lr_mult: 1
  1287. decay_mult: 1
  1288. }
  1289. convolution_param {
  1290. num_output: 192
  1291. bias_term: false
  1292. pad: 1
  1293. kernel_size: 3
  1294. group: 192
  1295. weight_filler {
  1296. type: "msra"
  1297. }
  1298. engine: CAFFE
  1299. }
  1300. }
  1301. layer {
  1302. name: "conv4_3/dwise/bn"
  1303. type: "BatchNorm"
  1304. bottom: "conv4_3/dwise"
  1305. top: "conv4_3/dwise/bn"
  1306. param {
  1307. lr_mult: 0
  1308. decay_mult: 0
  1309. }
  1310. param {
  1311. lr_mult: 0
  1312. decay_mult: 0
  1313. }
  1314. param {
  1315. lr_mult: 0
  1316. decay_mult: 0
  1317. }
  1318. batch_norm_param {
  1319. use_global_stats: true
  1320. eps: 1e-5
  1321. }
  1322. }
  1323. layer {
  1324. name: "conv4_3/dwise/scale"
  1325. type: "Scale"
  1326. bottom: "conv4_3/dwise/bn"
  1327. top: "conv4_3/dwise/bn"
  1328. param {
  1329. lr_mult: 1
  1330. decay_mult: 0
  1331. }
  1332. param {
  1333. lr_mult: 1
  1334. decay_mult: 0
  1335. }
  1336. scale_param {
  1337. bias_term: true
  1338. }
  1339. }
  1340. layer {
  1341. name: "relu4_3/dwise"
  1342. type: "ReLU"
  1343. bottom: "conv4_3/dwise/bn"
  1344. top: "conv4_3/dwise/bn"
  1345. }
  1346. layer {
  1347. name: "conv4_3/linear"
  1348. type: "Convolution"
  1349. bottom: "conv4_3/dwise/bn"
  1350. top: "conv4_3/linear"
  1351. param {
  1352. lr_mult: 1
  1353. decay_mult: 1
  1354. }
  1355. convolution_param {
  1356. num_output: 64
  1357. bias_term: false
  1358. kernel_size: 1
  1359. weight_filler {
  1360. type: "msra"
  1361. }
  1362. }
  1363. }
  1364. layer {
  1365. name: "conv4_3/linear/bn"
  1366. type: "BatchNorm"
  1367. bottom: "conv4_3/linear"
  1368. top: "conv4_3/linear/bn"
  1369. param {
  1370. lr_mult: 0
  1371. decay_mult: 0
  1372. }
  1373. param {
  1374. lr_mult: 0
  1375. decay_mult: 0
  1376. }
  1377. param {
  1378. lr_mult: 0
  1379. decay_mult: 0
  1380. }
  1381. batch_norm_param {
  1382. use_global_stats: true
  1383. eps: 1e-5
  1384. }
  1385. }
  1386. layer {
  1387. name: "conv4_3/linear/scale"
  1388. type: "Scale"
  1389. bottom: "conv4_3/linear/bn"
  1390. top: "conv4_3/linear/bn"
  1391. param {
  1392. lr_mult: 1
  1393. decay_mult: 0
  1394. }
  1395. param {
  1396. lr_mult: 1
  1397. decay_mult: 0
  1398. }
  1399. scale_param {
  1400. bias_term: true
  1401. }
  1402. }
  1403. layer {
  1404. name: "conv4_4/expand"
  1405. type: "Convolution"
  1406. bottom: "conv4_3/linear/bn"
  1407. top: "conv4_4/expand"
  1408. param {
  1409. lr_mult: 1
  1410. decay_mult: 1
  1411. }
  1412. convolution_param {
  1413. num_output: 384
  1414. bias_term: false
  1415. kernel_size: 1
  1416. weight_filler {
  1417. type: "msra"
  1418. }
  1419. }
  1420. }
  1421. layer {
  1422. name: "conv4_4/expand/bn"
  1423. type: "BatchNorm"
  1424. bottom: "conv4_4/expand"
  1425. top: "conv4_4/expand/bn"
  1426. param {
  1427. lr_mult: 0
  1428. decay_mult: 0
  1429. }
  1430. param {
  1431. lr_mult: 0
  1432. decay_mult: 0
  1433. }
  1434. param {
  1435. lr_mult: 0
  1436. decay_mult: 0
  1437. }
  1438. batch_norm_param {
  1439. use_global_stats: true
  1440. eps: 1e-5
  1441. }
  1442. }
  1443. layer {
  1444. name: "conv4_4/expand/scale"
  1445. type: "Scale"
  1446. bottom: "conv4_4/expand/bn"
  1447. top: "conv4_4/expand/bn"
  1448. param {
  1449. lr_mult: 1
  1450. decay_mult: 0
  1451. }
  1452. param {
  1453. lr_mult: 1
  1454. decay_mult: 0
  1455. }
  1456. scale_param {
  1457. bias_term: true
  1458. }
  1459. }
  1460. layer {
  1461. name: "relu4_4/expand"
  1462. type: "ReLU"
  1463. bottom: "conv4_4/expand/bn"
  1464. top: "conv4_4/expand/bn"
  1465. }
  1466. layer {
  1467. name: "conv4_4/dwise"
  1468. type: "Convolution"
  1469. bottom: "conv4_4/expand/bn"
  1470. top: "conv4_4/dwise"
  1471. param {
  1472. lr_mult: 1
  1473. decay_mult: 1
  1474. }
  1475. convolution_param {
  1476. num_output: 384
  1477. bias_term: false
  1478. pad: 1
  1479. kernel_size: 3
  1480. group: 384
  1481. weight_filler {
  1482. type: "msra"
  1483. }
  1484. engine: CAFFE
  1485. }
  1486. }
  1487. layer {
  1488. name: "conv4_4/dwise/bn"
  1489. type: "BatchNorm"
  1490. bottom: "conv4_4/dwise"
  1491. top: "conv4_4/dwise/bn"
  1492. param {
  1493. lr_mult: 0
  1494. decay_mult: 0
  1495. }
  1496. param {
  1497. lr_mult: 0
  1498. decay_mult: 0
  1499. }
  1500. param {
  1501. lr_mult: 0
  1502. decay_mult: 0
  1503. }
  1504. batch_norm_param {
  1505. use_global_stats: true
  1506. eps: 1e-5
  1507. }
  1508. }
  1509. layer {
  1510. name: "conv4_4/dwise/scale"
  1511. type: "Scale"
  1512. bottom: "conv4_4/dwise/bn"
  1513. top: "conv4_4/dwise/bn"
  1514. param {
  1515. lr_mult: 1
  1516. decay_mult: 0
  1517. }
  1518. param {
  1519. lr_mult: 1
  1520. decay_mult: 0
  1521. }
  1522. scale_param {
  1523. bias_term: true
  1524. }
  1525. }
  1526. layer {
  1527. name: "relu4_4/dwise"
  1528. type: "ReLU"
  1529. bottom: "conv4_4/dwise/bn"
  1530. top: "conv4_4/dwise/bn"
  1531. }
  1532. layer {
  1533. name: "conv4_4/linear"
  1534. type: "Convolution"
  1535. bottom: "conv4_4/dwise/bn"
  1536. top: "conv4_4/linear"
  1537. param {
  1538. lr_mult: 1
  1539. decay_mult: 1
  1540. }
  1541. convolution_param {
  1542. num_output: 64
  1543. bias_term: false
  1544. kernel_size: 1
  1545. weight_filler {
  1546. type: "msra"
  1547. }
  1548. }
  1549. }
  1550. layer {
  1551. name: "conv4_4/linear/bn"
  1552. type: "BatchNorm"
  1553. bottom: "conv4_4/linear"
  1554. top: "conv4_4/linear/bn"
  1555. param {
  1556. lr_mult: 0
  1557. decay_mult: 0
  1558. }
  1559. param {
  1560. lr_mult: 0
  1561. decay_mult: 0
  1562. }
  1563. param {
  1564. lr_mult: 0
  1565. decay_mult: 0
  1566. }
  1567. batch_norm_param {
  1568. use_global_stats: true
  1569. eps: 1e-5
  1570. }
  1571. }
  1572. layer {
  1573. name: "conv4_4/linear/scale"
  1574. type: "Scale"
  1575. bottom: "conv4_4/linear/bn"
  1576. top: "conv4_4/linear/bn"
  1577. param {
  1578. lr_mult: 1
  1579. decay_mult: 0
  1580. }
  1581. param {
  1582. lr_mult: 1
  1583. decay_mult: 0
  1584. }
  1585. scale_param {
  1586. bias_term: true
  1587. }
  1588. }
  1589. layer {
  1590. name: "block_4_4"
  1591. type: "Eltwise"
  1592. bottom: "conv4_3/linear/bn"
  1593. bottom: "conv4_4/linear/bn"
  1594. top: "block_4_4"
  1595. }
  1596. layer {
  1597. name: "conv4_5/expand"
  1598. type: "Convolution"
  1599. bottom: "block_4_4"
  1600. top: "conv4_5/expand"
  1601. param {
  1602. lr_mult: 1
  1603. decay_mult: 1
  1604. }
  1605. convolution_param {
  1606. num_output: 384
  1607. bias_term: false
  1608. kernel_size: 1
  1609. weight_filler {
  1610. type: "msra"
  1611. }
  1612. }
  1613. }
  1614. layer {
  1615. name: "conv4_5/expand/bn"
  1616. type: "BatchNorm"
  1617. bottom: "conv4_5/expand"
  1618. top: "conv4_5/expand/bn"
  1619. param {
  1620. lr_mult: 0
  1621. decay_mult: 0
  1622. }
  1623. param {
  1624. lr_mult: 0
  1625. decay_mult: 0
  1626. }
  1627. param {
  1628. lr_mult: 0
  1629. decay_mult: 0
  1630. }
  1631. batch_norm_param {
  1632. use_global_stats: true
  1633. eps: 1e-5
  1634. }
  1635. }
  1636. layer {
  1637. name: "conv4_5/expand/scale"
  1638. type: "Scale"
  1639. bottom: "conv4_5/expand/bn"
  1640. top: "conv4_5/expand/bn"
  1641. param {
  1642. lr_mult: 1
  1643. decay_mult: 0
  1644. }
  1645. param {
  1646. lr_mult: 1
  1647. decay_mult: 0
  1648. }
  1649. scale_param {
  1650. bias_term: true
  1651. }
  1652. }
  1653. layer {
  1654. name: "relu4_5/expand"
  1655. type: "ReLU"
  1656. bottom: "conv4_5/expand/bn"
  1657. top: "conv4_5/expand/bn"
  1658. }
  1659. layer {
  1660. name: "conv4_5/dwise"
  1661. type: "Convolution"
  1662. bottom: "conv4_5/expand/bn"
  1663. top: "conv4_5/dwise"
  1664. param {
  1665. lr_mult: 1
  1666. decay_mult: 1
  1667. }
  1668. convolution_param {
  1669. num_output: 384
  1670. bias_term: false
  1671. pad: 1
  1672. kernel_size: 3
  1673. group: 384
  1674. weight_filler {
  1675. type: "msra"
  1676. }
  1677. engine: CAFFE
  1678. }
  1679. }
  1680. layer {
  1681. name: "conv4_5/dwise/bn"
  1682. type: "BatchNorm"
  1683. bottom: "conv4_5/dwise"
  1684. top: "conv4_5/dwise/bn"
  1685. param {
  1686. lr_mult: 0
  1687. decay_mult: 0
  1688. }
  1689. param {
  1690. lr_mult: 0
  1691. decay_mult: 0
  1692. }
  1693. param {
  1694. lr_mult: 0
  1695. decay_mult: 0
  1696. }
  1697. batch_norm_param {
  1698. use_global_stats: true
  1699. eps: 1e-5
  1700. }
  1701. }
  1702. layer {
  1703. name: "conv4_5/dwise/scale"
  1704. type: "Scale"
  1705. bottom: "conv4_5/dwise/bn"
  1706. top: "conv4_5/dwise/bn"
  1707. param {
  1708. lr_mult: 1
  1709. decay_mult: 0
  1710. }
  1711. param {
  1712. lr_mult: 1
  1713. decay_mult: 0
  1714. }
  1715. scale_param {
  1716. bias_term: true
  1717. }
  1718. }
  1719. layer {
  1720. name: "relu4_5/dwise"
  1721. type: "ReLU"
  1722. bottom: "conv4_5/dwise/bn"
  1723. top: "conv4_5/dwise/bn"
  1724. }
  1725. layer {
  1726. name: "conv4_5/linear"
  1727. type: "Convolution"
  1728. bottom: "conv4_5/dwise/bn"
  1729. top: "conv4_5/linear"
  1730. param {
  1731. lr_mult: 1
  1732. decay_mult: 1
  1733. }
  1734. convolution_param {
  1735. num_output: 64
  1736. bias_term: false
  1737. kernel_size: 1
  1738. weight_filler {
  1739. type: "msra"
  1740. }
  1741. }
  1742. }
  1743. layer {
  1744. name: "conv4_5/linear/bn"
  1745. type: "BatchNorm"
  1746. bottom: "conv4_5/linear"
  1747. top: "conv4_5/linear/bn"
  1748. param {
  1749. lr_mult: 0
  1750. decay_mult: 0
  1751. }
  1752. param {
  1753. lr_mult: 0
  1754. decay_mult: 0
  1755. }
  1756. param {
  1757. lr_mult: 0
  1758. decay_mult: 0
  1759. }
  1760. batch_norm_param {
  1761. use_global_stats: true
  1762. eps: 1e-5
  1763. }
  1764. }
  1765. layer {
  1766. name: "conv4_5/linear/scale"
  1767. type: "Scale"
  1768. bottom: "conv4_5/linear/bn"
  1769. top: "conv4_5/linear/bn"
  1770. param {
  1771. lr_mult: 1
  1772. decay_mult: 0
  1773. }
  1774. param {
  1775. lr_mult: 1
  1776. decay_mult: 0
  1777. }
  1778. scale_param {
  1779. bias_term: true
  1780. }
  1781. }
  1782. layer {
  1783. name: "block_4_5"
  1784. type: "Eltwise"
  1785. bottom: "block_4_4"
  1786. bottom: "conv4_5/linear/bn"
  1787. top: "block_4_5"
  1788. }
  1789. layer {
  1790. name: "conv4_6/expand"
  1791. type: "Convolution"
  1792. bottom: "block_4_5"
  1793. top: "conv4_6/expand"
  1794. param {
  1795. lr_mult: 1
  1796. decay_mult: 1
  1797. }
  1798. convolution_param {
  1799. num_output: 384
  1800. bias_term: false
  1801. kernel_size: 1
  1802. weight_filler {
  1803. type: "msra"
  1804. }
  1805. }
  1806. }
  1807. layer {
  1808. name: "conv4_6/expand/bn"
  1809. type: "BatchNorm"
  1810. bottom: "conv4_6/expand"
  1811. top: "conv4_6/expand/bn"
  1812. param {
  1813. lr_mult: 0
  1814. decay_mult: 0
  1815. }
  1816. param {
  1817. lr_mult: 0
  1818. decay_mult: 0
  1819. }
  1820. param {
  1821. lr_mult: 0
  1822. decay_mult: 0
  1823. }
  1824. batch_norm_param {
  1825. use_global_stats: true
  1826. eps: 1e-5
  1827. }
  1828. }
  1829. layer {
  1830. name: "conv4_6/expand/scale"
  1831. type: "Scale"
  1832. bottom: "conv4_6/expand/bn"
  1833. top: "conv4_6/expand/bn"
  1834. param {
  1835. lr_mult: 1
  1836. decay_mult: 0
  1837. }
  1838. param {
  1839. lr_mult: 1
  1840. decay_mult: 0
  1841. }
  1842. scale_param {
  1843. bias_term: true
  1844. }
  1845. }
  1846. layer {
  1847. name: "relu4_6/expand"
  1848. type: "ReLU"
  1849. bottom: "conv4_6/expand/bn"
  1850. top: "conv4_6/expand/bn"
  1851. }
  1852. layer {
  1853. name: "conv4_6/dwise"
  1854. type: "Convolution"
  1855. bottom: "conv4_6/expand/bn"
  1856. top: "conv4_6/dwise"
  1857. param {
  1858. lr_mult: 1
  1859. decay_mult: 1
  1860. }
  1861. convolution_param {
  1862. num_output: 384
  1863. bias_term: false
  1864. pad: 1
  1865. kernel_size: 3
  1866. group: 384
  1867. weight_filler {
  1868. type: "msra"
  1869. }
  1870. engine: CAFFE
  1871. }
  1872. }
  1873. layer {
  1874. name: "conv4_6/dwise/bn"
  1875. type: "BatchNorm"
  1876. bottom: "conv4_6/dwise"
  1877. top: "conv4_6/dwise/bn"
  1878. param {
  1879. lr_mult: 0
  1880. decay_mult: 0
  1881. }
  1882. param {
  1883. lr_mult: 0
  1884. decay_mult: 0
  1885. }
  1886. param {
  1887. lr_mult: 0
  1888. decay_mult: 0
  1889. }
  1890. batch_norm_param {
  1891. use_global_stats: true
  1892. eps: 1e-5
  1893. }
  1894. }
  1895. layer {
  1896. name: "conv4_6/dwise/scale"
  1897. type: "Scale"
  1898. bottom: "conv4_6/dwise/bn"
  1899. top: "conv4_6/dwise/bn"
  1900. param {
  1901. lr_mult: 1
  1902. decay_mult: 0
  1903. }
  1904. param {
  1905. lr_mult: 1
  1906. decay_mult: 0
  1907. }
  1908. scale_param {
  1909. bias_term: true
  1910. }
  1911. }
  1912. layer {
  1913. name: "relu4_6/dwise"
  1914. type: "ReLU"
  1915. bottom: "conv4_6/dwise/bn"
  1916. top: "conv4_6/dwise/bn"
  1917. }
  1918. layer {
  1919. name: "conv4_6/linear"
  1920. type: "Convolution"
  1921. bottom: "conv4_6/dwise/bn"
  1922. top: "conv4_6/linear"
  1923. param {
  1924. lr_mult: 1
  1925. decay_mult: 1
  1926. }
  1927. convolution_param {
  1928. num_output: 64
  1929. bias_term: false
  1930. kernel_size: 1
  1931. weight_filler {
  1932. type: "msra"
  1933. }
  1934. }
  1935. }
  1936. layer {
  1937. name: "conv4_6/linear/bn"
  1938. type: "BatchNorm"
  1939. bottom: "conv4_6/linear"
  1940. top: "conv4_6/linear/bn"
  1941. param {
  1942. lr_mult: 0
  1943. decay_mult: 0
  1944. }
  1945. param {
  1946. lr_mult: 0
  1947. decay_mult: 0
  1948. }
  1949. param {
  1950. lr_mult: 0
  1951. decay_mult: 0
  1952. }
  1953. batch_norm_param {
  1954. use_global_stats: true
  1955. eps: 1e-5
  1956. }
  1957. }
  1958. layer {
  1959. name: "conv4_6/linear/scale"
  1960. type: "Scale"
  1961. bottom: "conv4_6/linear/bn"
  1962. top: "conv4_6/linear/bn"
  1963. param {
  1964. lr_mult: 1
  1965. decay_mult: 0
  1966. }
  1967. param {
  1968. lr_mult: 1
  1969. decay_mult: 0
  1970. }
  1971. scale_param {
  1972. bias_term: true
  1973. }
  1974. }
  1975. layer {
  1976. name: "block_4_6"
  1977. type: "Eltwise"
  1978. bottom: "block_4_5"
  1979. bottom: "conv4_6/linear/bn"
  1980. top: "block_4_6"
  1981. }
  1982. layer {
  1983. name: "conv4_7/expand"
  1984. type: "Convolution"
  1985. bottom: "block_4_6"
  1986. top: "conv4_7/expand"
  1987. param {
  1988. lr_mult: 1
  1989. decay_mult: 1
  1990. }
  1991. convolution_param {
  1992. num_output: 384
  1993. bias_term: false
  1994. kernel_size: 1
  1995. weight_filler {
  1996. type: "msra"
  1997. }
  1998. }
  1999. }
  2000. layer {
  2001. name: "conv4_7/expand/bn"
  2002. type: "BatchNorm"
  2003. bottom: "conv4_7/expand"
  2004. top: "conv4_7/expand/bn"
  2005. param {
  2006. lr_mult: 0
  2007. decay_mult: 0
  2008. }
  2009. param {
  2010. lr_mult: 0
  2011. decay_mult: 0
  2012. }
  2013. param {
  2014. lr_mult: 0
  2015. decay_mult: 0
  2016. }
  2017. batch_norm_param {
  2018. use_global_stats: true
  2019. eps: 1e-5
  2020. }
  2021. }
  2022. layer {
  2023. name: "conv4_7/expand/scale"
  2024. type: "Scale"
  2025. bottom: "conv4_7/expand/bn"
  2026. top: "conv4_7/expand/bn"
  2027. param {
  2028. lr_mult: 1
  2029. decay_mult: 0
  2030. }
  2031. param {
  2032. lr_mult: 1
  2033. decay_mult: 0
  2034. }
  2035. scale_param {
  2036. bias_term: true
  2037. }
  2038. }
  2039. layer {
  2040. name: "relu4_7/expand"
  2041. type: "ReLU"
  2042. bottom: "conv4_7/expand/bn"
  2043. top: "conv4_7/expand/bn"
  2044. }
  2045. layer {
  2046. name: "conv4_7/dwise"
  2047. type: "Convolution"
  2048. bottom: "conv4_7/expand/bn"
  2049. top: "conv4_7/dwise"
  2050. param {
  2051. lr_mult: 1
  2052. decay_mult: 1
  2053. }
  2054. convolution_param {
  2055. num_output: 384
  2056. bias_term: false
  2057. pad: 1
  2058. kernel_size: 3
  2059. group: 384
  2060. stride: 2
  2061. weight_filler {
  2062. type: "msra"
  2063. }
  2064. engine: CAFFE
  2065. }
  2066. }
  2067. layer {
  2068. name: "conv4_7/dwise/bn"
  2069. type: "BatchNorm"
  2070. bottom: "conv4_7/dwise"
  2071. top: "conv4_7/dwise/bn"
  2072. param {
  2073. lr_mult: 0
  2074. decay_mult: 0
  2075. }
  2076. param {
  2077. lr_mult: 0
  2078. decay_mult: 0
  2079. }
  2080. param {
  2081. lr_mult: 0
  2082. decay_mult: 0
  2083. }
  2084. batch_norm_param {
  2085. use_global_stats: true
  2086. eps: 1e-5
  2087. }
  2088. }
  2089. layer {
  2090. name: "conv4_7/dwise/scale"
  2091. type: "Scale"
  2092. bottom: "conv4_7/dwise/bn"
  2093. top: "conv4_7/dwise/bn"
  2094. param {
  2095. lr_mult: 1
  2096. decay_mult: 0
  2097. }
  2098. param {
  2099. lr_mult: 1
  2100. decay_mult: 0
  2101. }
  2102. scale_param {
  2103. bias_term: true
  2104. }
  2105. }
  2106. layer {
  2107. name: "relu4_7/dwise"
  2108. type: "ReLU"
  2109. bottom: "conv4_7/dwise/bn"
  2110. top: "conv4_7/dwise/bn"
  2111. }
  2112. layer {
  2113. name: "conv4_7/linear"
  2114. type: "Convolution"
  2115. bottom: "conv4_7/dwise/bn"
  2116. top: "conv4_7/linear"
  2117. param {
  2118. lr_mult: 1
  2119. decay_mult: 1
  2120. }
  2121. convolution_param {
  2122. num_output: 96
  2123. bias_term: false
  2124. kernel_size: 1
  2125. weight_filler {
  2126. type: "msra"
  2127. }
  2128. }
  2129. }
  2130. layer {
  2131. name: "conv4_7/linear/bn"
  2132. type: "BatchNorm"
  2133. bottom: "conv4_7/linear"
  2134. top: "conv4_7/linear/bn"
  2135. param {
  2136. lr_mult: 0
  2137. decay_mult: 0
  2138. }
  2139. param {
  2140. lr_mult: 0
  2141. decay_mult: 0
  2142. }
  2143. param {
  2144. lr_mult: 0
  2145. decay_mult: 0
  2146. }
  2147. batch_norm_param {
  2148. use_global_stats: true
  2149. eps: 1e-5
  2150. }
  2151. }
  2152. layer {
  2153. name: "conv4_7/linear/scale"
  2154. type: "Scale"
  2155. bottom: "conv4_7/linear/bn"
  2156. top: "conv4_7/linear/bn"
  2157. param {
  2158. lr_mult: 1
  2159. decay_mult: 0
  2160. }
  2161. param {
  2162. lr_mult: 1
  2163. decay_mult: 0
  2164. }
  2165. scale_param {
  2166. bias_term: true
  2167. }
  2168. }
  2169. layer {
  2170. name: "conv5_1/expand"
  2171. type: "Convolution"
  2172. bottom: "conv4_7/linear/bn"
  2173. top: "conv5_1/expand"
  2174. param {
  2175. lr_mult: 1
  2176. decay_mult: 1
  2177. }
  2178. convolution_param {
  2179. num_output: 576
  2180. bias_term: false
  2181. kernel_size: 1
  2182. weight_filler {
  2183. type: "msra"
  2184. }
  2185. }
  2186. }
  2187. layer {
  2188. name: "conv5_1/expand/bn"
  2189. type: "BatchNorm"
  2190. bottom: "conv5_1/expand"
  2191. top: "conv5_1/expand/bn"
  2192. param {
  2193. lr_mult: 0
  2194. decay_mult: 0
  2195. }
  2196. param {
  2197. lr_mult: 0
  2198. decay_mult: 0
  2199. }
  2200. param {
  2201. lr_mult: 0
  2202. decay_mult: 0
  2203. }
  2204. batch_norm_param {
  2205. use_global_stats: true
  2206. eps: 1e-5
  2207. }
  2208. }
  2209. layer {
  2210. name: "conv5_1/expand/scale"
  2211. type: "Scale"
  2212. bottom: "conv5_1/expand/bn"
  2213. top: "conv5_1/expand/bn"
  2214. param {
  2215. lr_mult: 1
  2216. decay_mult: 0
  2217. }
  2218. param {
  2219. lr_mult: 1
  2220. decay_mult: 0
  2221. }
  2222. scale_param {
  2223. bias_term: true
  2224. }
  2225. }
  2226. layer {
  2227. name: "relu5_1/expand"
  2228. type: "ReLU"
  2229. bottom: "conv5_1/expand/bn"
  2230. top: "conv5_1/expand/bn"
  2231. }
  2232. layer {
  2233. name: "conv5_1/dwise"
  2234. type: "Convolution"
  2235. bottom: "conv5_1/expand/bn"
  2236. top: "conv5_1/dwise"
  2237. param {
  2238. lr_mult: 1
  2239. decay_mult: 1
  2240. }
  2241. convolution_param {
  2242. num_output: 576
  2243. bias_term: false
  2244. pad: 1
  2245. kernel_size: 3
  2246. group: 576
  2247. weight_filler {
  2248. type: "msra"
  2249. }
  2250. engine: CAFFE
  2251. }
  2252. }
  2253. layer {
  2254. name: "conv5_1/dwise/bn"
  2255. type: "BatchNorm"
  2256. bottom: "conv5_1/dwise"
  2257. top: "conv5_1/dwise/bn"
  2258. param {
  2259. lr_mult: 0
  2260. decay_mult: 0
  2261. }
  2262. param {
  2263. lr_mult: 0
  2264. decay_mult: 0
  2265. }
  2266. param {
  2267. lr_mult: 0
  2268. decay_mult: 0
  2269. }
  2270. batch_norm_param {
  2271. use_global_stats: true
  2272. eps: 1e-5
  2273. }
  2274. }
  2275. layer {
  2276. name: "conv5_1/dwise/scale"
  2277. type: "Scale"
  2278. bottom: "conv5_1/dwise/bn"
  2279. top: "conv5_1/dwise/bn"
  2280. param {
  2281. lr_mult: 1
  2282. decay_mult: 0
  2283. }
  2284. param {
  2285. lr_mult: 1
  2286. decay_mult: 0
  2287. }
  2288. scale_param {
  2289. bias_term: true
  2290. }
  2291. }
  2292. layer {
  2293. name: "relu5_1/dwise"
  2294. type: "ReLU"
  2295. bottom: "conv5_1/dwise/bn"
  2296. top: "conv5_1/dwise/bn"
  2297. }
  2298. layer {
  2299. name: "conv5_1/linear"
  2300. type: "Convolution"
  2301. bottom: "conv5_1/dwise/bn"
  2302. top: "conv5_1/linear"
  2303. param {
  2304. lr_mult: 1
  2305. decay_mult: 1
  2306. }
  2307. convolution_param {
  2308. num_output: 96
  2309. bias_term: false
  2310. kernel_size: 1
  2311. weight_filler {
  2312. type: "msra"
  2313. }
  2314. }
  2315. }
  2316. layer {
  2317. name: "conv5_1/linear/bn"
  2318. type: "BatchNorm"
  2319. bottom: "conv5_1/linear"
  2320. top: "conv5_1/linear/bn"
  2321. param {
  2322. lr_mult: 0
  2323. decay_mult: 0
  2324. }
  2325. param {
  2326. lr_mult: 0
  2327. decay_mult: 0
  2328. }
  2329. param {
  2330. lr_mult: 0
  2331. decay_mult: 0
  2332. }
  2333. batch_norm_param {
  2334. use_global_stats: true
  2335. eps: 1e-5
  2336. }
  2337. }
  2338. layer {
  2339. name: "conv5_1/linear/scale"
  2340. type: "Scale"
  2341. bottom: "conv5_1/linear/bn"
  2342. top: "conv5_1/linear/bn"
  2343. param {
  2344. lr_mult: 1
  2345. decay_mult: 0
  2346. }
  2347. param {
  2348. lr_mult: 1
  2349. decay_mult: 0
  2350. }
  2351. scale_param {
  2352. bias_term: true
  2353. }
  2354. }
  2355. layer {
  2356. name: "block_5_1"
  2357. type: "Eltwise"
  2358. bottom: "conv4_7/linear/bn"
  2359. bottom: "conv5_1/linear/bn"
  2360. top: "block_5_1"
  2361. }
  2362. layer {
  2363. name: "conv5_2/expand"
  2364. type: "Convolution"
  2365. bottom: "block_5_1"
  2366. top: "conv5_2/expand"
  2367. param {
  2368. lr_mult: 1
  2369. decay_mult: 1
  2370. }
  2371. convolution_param {
  2372. num_output: 576
  2373. bias_term: false
  2374. kernel_size: 1
  2375. weight_filler {
  2376. type: "msra"
  2377. }
  2378. }
  2379. }
  2380. layer {
  2381. name: "conv5_2/expand/bn"
  2382. type: "BatchNorm"
  2383. bottom: "conv5_2/expand"
  2384. top: "conv5_2/expand/bn"
  2385. param {
  2386. lr_mult: 0
  2387. decay_mult: 0
  2388. }
  2389. param {
  2390. lr_mult: 0
  2391. decay_mult: 0
  2392. }
  2393. param {
  2394. lr_mult: 0
  2395. decay_mult: 0
  2396. }
  2397. batch_norm_param {
  2398. use_global_stats: true
  2399. eps: 1e-5
  2400. }
  2401. }
  2402. layer {
  2403. name: "conv5_2/expand/scale"
  2404. type: "Scale"
  2405. bottom: "conv5_2/expand/bn"
  2406. top: "conv5_2/expand/bn"
  2407. param {
  2408. lr_mult: 1
  2409. decay_mult: 0
  2410. }
  2411. param {
  2412. lr_mult: 1
  2413. decay_mult: 0
  2414. }
  2415. scale_param {
  2416. bias_term: true
  2417. }
  2418. }
  2419. layer {
  2420. name: "relu5_2/expand"
  2421. type: "ReLU"
  2422. bottom: "conv5_2/expand/bn"
  2423. top: "conv5_2/expand/bn"
  2424. }
  2425. layer {
  2426. name: "conv5_2/dwise"
  2427. type: "Convolution"
  2428. bottom: "conv5_2/expand/bn"
  2429. top: "conv5_2/dwise"
  2430. param {
  2431. lr_mult: 1
  2432. decay_mult: 1
  2433. }
  2434. convolution_param {
  2435. num_output: 576
  2436. bias_term: false
  2437. pad: 1
  2438. kernel_size: 3
  2439. group: 576
  2440. weight_filler {
  2441. type: "msra"
  2442. }
  2443. engine: CAFFE
  2444. }
  2445. }
  2446. layer {
  2447. name: "conv5_2/dwise/bn"
  2448. type: "BatchNorm"
  2449. bottom: "conv5_2/dwise"
  2450. top: "conv5_2/dwise/bn"
  2451. param {
  2452. lr_mult: 0
  2453. decay_mult: 0
  2454. }
  2455. param {
  2456. lr_mult: 0
  2457. decay_mult: 0
  2458. }
  2459. param {
  2460. lr_mult: 0
  2461. decay_mult: 0
  2462. }
  2463. batch_norm_param {
  2464. use_global_stats: true
  2465. eps: 1e-5
  2466. }
  2467. }
  2468. layer {
  2469. name: "conv5_2/dwise/scale"
  2470. type: "Scale"
  2471. bottom: "conv5_2/dwise/bn"
  2472. top: "conv5_2/dwise/bn"
  2473. param {
  2474. lr_mult: 1
  2475. decay_mult: 0
  2476. }
  2477. param {
  2478. lr_mult: 1
  2479. decay_mult: 0
  2480. }
  2481. scale_param {
  2482. bias_term: true
  2483. }
  2484. }
  2485. layer {
  2486. name: "relu5_2/dwise"
  2487. type: "ReLU"
  2488. bottom: "conv5_2/dwise/bn"
  2489. top: "conv5_2/dwise/bn"
  2490. }
  2491. layer {
  2492. name: "conv5_2/linear"
  2493. type: "Convolution"
  2494. bottom: "conv5_2/dwise/bn"
  2495. top: "conv5_2/linear"
  2496. param {
  2497. lr_mult: 1
  2498. decay_mult: 1
  2499. }
  2500. convolution_param {
  2501. num_output: 96
  2502. bias_term: false
  2503. kernel_size: 1
  2504. weight_filler {
  2505. type: "msra"
  2506. }
  2507. }
  2508. }
  2509. layer {
  2510. name: "conv5_2/linear/bn"
  2511. type: "BatchNorm"
  2512. bottom: "conv5_2/linear"
  2513. top: "conv5_2/linear/bn"
  2514. param {
  2515. lr_mult: 0
  2516. decay_mult: 0
  2517. }
  2518. param {
  2519. lr_mult: 0
  2520. decay_mult: 0
  2521. }
  2522. param {
  2523. lr_mult: 0
  2524. decay_mult: 0
  2525. }
  2526. batch_norm_param {
  2527. use_global_stats: true
  2528. eps: 1e-5
  2529. }
  2530. }
  2531. layer {
  2532. name: "conv5_2/linear/scale"
  2533. type: "Scale"
  2534. bottom: "conv5_2/linear/bn"
  2535. top: "conv5_2/linear/bn"
  2536. param {
  2537. lr_mult: 1
  2538. decay_mult: 0
  2539. }
  2540. param {
  2541. lr_mult: 1
  2542. decay_mult: 0
  2543. }
  2544. scale_param {
  2545. bias_term: true
  2546. }
  2547. }
  2548. layer {
  2549. name: "block_5_2"
  2550. type: "Eltwise"
  2551. bottom: "block_5_1"
  2552. bottom: "conv5_2/linear/bn"
  2553. top: "block_5_2"
  2554. }
  2555. layer {
  2556. name: "conv5_3/expand"
  2557. type: "Convolution"
  2558. bottom: "block_5_2"
  2559. top: "conv5_3/expand"
  2560. param {
  2561. lr_mult: 1
  2562. decay_mult: 1
  2563. }
  2564. convolution_param {
  2565. num_output: 576
  2566. bias_term: false
  2567. kernel_size: 1
  2568. weight_filler {
  2569. type: "msra"
  2570. }
  2571. }
  2572. }
  2573. layer {
  2574. name: "conv5_3/expand/bn"
  2575. type: "BatchNorm"
  2576. bottom: "conv5_3/expand"
  2577. top: "conv5_3/expand/bn"
  2578. param {
  2579. lr_mult: 0
  2580. decay_mult: 0
  2581. }
  2582. param {
  2583. lr_mult: 0
  2584. decay_mult: 0
  2585. }
  2586. param {
  2587. lr_mult: 0
  2588. decay_mult: 0
  2589. }
  2590. batch_norm_param {
  2591. use_global_stats: true
  2592. eps: 1e-5
  2593. }
  2594. }
  2595. layer {
  2596. name: "conv5_3/expand/scale"
  2597. type: "Scale"
  2598. bottom: "conv5_3/expand/bn"
  2599. top: "conv5_3/expand/bn"
  2600. param {
  2601. lr_mult: 1
  2602. decay_mult: 0
  2603. }
  2604. param {
  2605. lr_mult: 1
  2606. decay_mult: 0
  2607. }
  2608. scale_param {
  2609. bias_term: true
  2610. }
  2611. }
  2612. layer {
  2613. name: "relu5_3/expand"
  2614. type: "ReLU"
  2615. bottom: "conv5_3/expand/bn"
  2616. top: "conv5_3/expand/bn"
  2617. }
  2618. layer {
  2619. name: "conv5_3/dwise"
  2620. type: "Convolution"
  2621. bottom: "conv5_3/expand/bn"
  2622. top: "conv5_3/dwise"
  2623. param {
  2624. lr_mult: 1
  2625. decay_mult: 1
  2626. }
  2627. convolution_param {
  2628. num_output: 576
  2629. bias_term: false
  2630. pad: 1
  2631. kernel_size: 3
  2632. group: 576
  2633. stride: 2
  2634. weight_filler {
  2635. type: "msra"
  2636. }
  2637. engine: CAFFE
  2638. }
  2639. }
  2640. layer {
  2641. name: "conv5_3/dwise/bn"
  2642. type: "BatchNorm"
  2643. bottom: "conv5_3/dwise"
  2644. top: "conv5_3/dwise/bn"
  2645. param {
  2646. lr_mult: 0
  2647. decay_mult: 0
  2648. }
  2649. param {
  2650. lr_mult: 0
  2651. decay_mult: 0
  2652. }
  2653. param {
  2654. lr_mult: 0
  2655. decay_mult: 0
  2656. }
  2657. batch_norm_param {
  2658. use_global_stats: true
  2659. eps: 1e-5
  2660. }
  2661. }
  2662. layer {
  2663. name: "conv5_3/dwise/scale"
  2664. type: "Scale"
  2665. bottom: "conv5_3/dwise/bn"
  2666. top: "conv5_3/dwise/bn"
  2667. param {
  2668. lr_mult: 1
  2669. decay_mult: 0
  2670. }
  2671. param {
  2672. lr_mult: 1
  2673. decay_mult: 0
  2674. }
  2675. scale_param {
  2676. bias_term: true
  2677. }
  2678. }
  2679. layer {
  2680. name: "relu5_3/dwise"
  2681. type: "ReLU"
  2682. bottom: "conv5_3/dwise/bn"
  2683. top: "conv5_3/dwise/bn"
  2684. }
  2685. layer {
  2686. name: "conv5_3/linear"
  2687. type: "Convolution"
  2688. bottom: "conv5_3/dwise/bn"
  2689. top: "conv5_3/linear"
  2690. param {
  2691. lr_mult: 1
  2692. decay_mult: 1
  2693. }
  2694. convolution_param {
  2695. num_output: 160
  2696. bias_term: false
  2697. kernel_size: 1
  2698. weight_filler {
  2699. type: "msra"
  2700. }
  2701. }
  2702. }
  2703. layer {
  2704. name: "conv5_3/linear/bn"
  2705. type: "BatchNorm"
  2706. bottom: "conv5_3/linear"
  2707. top: "conv5_3/linear/bn"
  2708. param {
  2709. lr_mult: 0
  2710. decay_mult: 0
  2711. }
  2712. param {
  2713. lr_mult: 0
  2714. decay_mult: 0
  2715. }
  2716. param {
  2717. lr_mult: 0
  2718. decay_mult: 0
  2719. }
  2720. batch_norm_param {
  2721. use_global_stats: true
  2722. eps: 1e-5
  2723. }
  2724. }
  2725. layer {
  2726. name: "conv5_3/linear/scale"
  2727. type: "Scale"
  2728. bottom: "conv5_3/linear/bn"
  2729. top: "conv5_3/linear/bn"
  2730. param {
  2731. lr_mult: 1
  2732. decay_mult: 0
  2733. }
  2734. param {
  2735. lr_mult: 1
  2736. decay_mult: 0
  2737. }
  2738. scale_param {
  2739. bias_term: true
  2740. }
  2741. }
  2742. layer {
  2743. name: "conv6_1/expand"
  2744. type: "Convolution"
  2745. bottom: "conv5_3/linear/bn"
  2746. top: "conv6_1/expand"
  2747. param {
  2748. lr_mult: 1
  2749. decay_mult: 1
  2750. }
  2751. convolution_param {
  2752. num_output: 960
  2753. bias_term: false
  2754. kernel_size: 1
  2755. weight_filler {
  2756. type: "msra"
  2757. }
  2758. }
  2759. }
  2760. layer {
  2761. name: "conv6_1/expand/bn"
  2762. type: "BatchNorm"
  2763. bottom: "conv6_1/expand"
  2764. top: "conv6_1/expand/bn"
  2765. param {
  2766. lr_mult: 0
  2767. decay_mult: 0
  2768. }
  2769. param {
  2770. lr_mult: 0
  2771. decay_mult: 0
  2772. }
  2773. param {
  2774. lr_mult: 0
  2775. decay_mult: 0
  2776. }
  2777. batch_norm_param {
  2778. use_global_stats: true
  2779. eps: 1e-5
  2780. }
  2781. }
  2782. layer {
  2783. name: "conv6_1/expand/scale"
  2784. type: "Scale"
  2785. bottom: "conv6_1/expand/bn"
  2786. top: "conv6_1/expand/bn"
  2787. param {
  2788. lr_mult: 1
  2789. decay_mult: 0
  2790. }
  2791. param {
  2792. lr_mult: 1
  2793. decay_mult: 0
  2794. }
  2795. scale_param {
  2796. bias_term: true
  2797. }
  2798. }
  2799. layer {
  2800. name: "relu6_1/expand"
  2801. type: "ReLU"
  2802. bottom: "conv6_1/expand/bn"
  2803. top: "conv6_1/expand/bn"
  2804. }
  2805. layer {
  2806. name: "conv6_1/dwise"
  2807. type: "Convolution"
  2808. bottom: "conv6_1/expand/bn"
  2809. top: "conv6_1/dwise"
  2810. param {
  2811. lr_mult: 1
  2812. decay_mult: 1
  2813. }
  2814. convolution_param {
  2815. num_output: 960
  2816. bias_term: false
  2817. pad: 1
  2818. kernel_size: 3
  2819. group: 960
  2820. weight_filler {
  2821. type: "msra"
  2822. }
  2823. engine: CAFFE
  2824. }
  2825. }
  2826. layer {
  2827. name: "conv6_1/dwise/bn"
  2828. type: "BatchNorm"
  2829. bottom: "conv6_1/dwise"
  2830. top: "conv6_1/dwise/bn"
  2831. param {
  2832. lr_mult: 0
  2833. decay_mult: 0
  2834. }
  2835. param {
  2836. lr_mult: 0
  2837. decay_mult: 0
  2838. }
  2839. param {
  2840. lr_mult: 0
  2841. decay_mult: 0
  2842. }
  2843. batch_norm_param {
  2844. use_global_stats: true
  2845. eps: 1e-5
  2846. }
  2847. }
  2848. layer {
  2849. name: "conv6_1/dwise/scale"
  2850. type: "Scale"
  2851. bottom: "conv6_1/dwise/bn"
  2852. top: "conv6_1/dwise/bn"
  2853. param {
  2854. lr_mult: 1
  2855. decay_mult: 0
  2856. }
  2857. param {
  2858. lr_mult: 1
  2859. decay_mult: 0
  2860. }
  2861. scale_param {
  2862. bias_term: true
  2863. }
  2864. }
  2865. layer {
  2866. name: "relu6_1/dwise"
  2867. type: "ReLU"
  2868. bottom: "conv6_1/dwise/bn"
  2869. top: "conv6_1/dwise/bn"
  2870. }
  2871. layer {
  2872. name: "conv6_1/linear"
  2873. type: "Convolution"
  2874. bottom: "conv6_1/dwise/bn"
  2875. top: "conv6_1/linear"
  2876. param {
  2877. lr_mult: 1
  2878. decay_mult: 1
  2879. }
  2880. convolution_param {
  2881. num_output: 160
  2882. bias_term: false
  2883. kernel_size: 1
  2884. weight_filler {
  2885. type: "msra"
  2886. }
  2887. }
  2888. }
  2889. layer {
  2890. name: "conv6_1/linear/bn"
  2891. type: "BatchNorm"
  2892. bottom: "conv6_1/linear"
  2893. top: "conv6_1/linear/bn"
  2894. param {
  2895. lr_mult: 0
  2896. decay_mult: 0
  2897. }
  2898. param {
  2899. lr_mult: 0
  2900. decay_mult: 0
  2901. }
  2902. param {
  2903. lr_mult: 0
  2904. decay_mult: 0
  2905. }
  2906. batch_norm_param {
  2907. use_global_stats: true
  2908. eps: 1e-5
  2909. }
  2910. }
  2911. layer {
  2912. name: "conv6_1/linear/scale"
  2913. type: "Scale"
  2914. bottom: "conv6_1/linear/bn"
  2915. top: "conv6_1/linear/bn"
  2916. param {
  2917. lr_mult: 1
  2918. decay_mult: 0
  2919. }
  2920. param {
  2921. lr_mult: 1
  2922. decay_mult: 0
  2923. }
  2924. scale_param {
  2925. bias_term: true
  2926. }
  2927. }
  2928. layer {
  2929. name: "block_6_1"
  2930. type: "Eltwise"
  2931. bottom: "conv5_3/linear/bn"
  2932. bottom: "conv6_1/linear/bn"
  2933. top: "block_6_1"
  2934. }
  2935. layer {
  2936. name: "conv6_2/expand"
  2937. type: "Convolution"
  2938. bottom: "block_6_1"
  2939. top: "conv6_2/expand"
  2940. param {
  2941. lr_mult: 1
  2942. decay_mult: 1
  2943. }
  2944. convolution_param {
  2945. num_output: 960
  2946. bias_term: false
  2947. kernel_size: 1
  2948. weight_filler {
  2949. type: "msra"
  2950. }
  2951. }
  2952. }
  2953. layer {
  2954. name: "conv6_2/expand/bn"
  2955. type: "BatchNorm"
  2956. bottom: "conv6_2/expand"
  2957. top: "conv6_2/expand/bn"
  2958. param {
  2959. lr_mult: 0
  2960. decay_mult: 0
  2961. }
  2962. param {
  2963. lr_mult: 0
  2964. decay_mult: 0
  2965. }
  2966. param {
  2967. lr_mult: 0
  2968. decay_mult: 0
  2969. }
  2970. batch_norm_param {
  2971. use_global_stats: true
  2972. eps: 1e-5
  2973. }
  2974. }
  2975. layer {
  2976. name: "conv6_2/expand/scale"
  2977. type: "Scale"
  2978. bottom: "conv6_2/expand/bn"
  2979. top: "conv6_2/expand/bn"
  2980. param {
  2981. lr_mult: 1
  2982. decay_mult: 0
  2983. }
  2984. param {
  2985. lr_mult: 1
  2986. decay_mult: 0
  2987. }
  2988. scale_param {
  2989. bias_term: true
  2990. }
  2991. }
  2992. layer {
  2993. name: "relu6_2/expand"
  2994. type: "ReLU"
  2995. bottom: "conv6_2/expand/bn"
  2996. top: "conv6_2/expand/bn"
  2997. }
  2998. layer {
  2999. name: "conv6_2/dwise"
  3000. type: "Convolution"
  3001. bottom: "conv6_2/expand/bn"
  3002. top: "conv6_2/dwise"
  3003. param {
  3004. lr_mult: 1
  3005. decay_mult: 1
  3006. }
  3007. convolution_param {
  3008. num_output: 960
  3009. bias_term: false
  3010. pad: 1
  3011. kernel_size: 3
  3012. group: 960
  3013. weight_filler {
  3014. type: "msra"
  3015. }
  3016. engine: CAFFE
  3017. }
  3018. }
  3019. layer {
  3020. name: "conv6_2/dwise/bn"
  3021. type: "BatchNorm"
  3022. bottom: "conv6_2/dwise"
  3023. top: "conv6_2/dwise/bn"
  3024. param {
  3025. lr_mult: 0
  3026. decay_mult: 0
  3027. }
  3028. param {
  3029. lr_mult: 0
  3030. decay_mult: 0
  3031. }
  3032. param {
  3033. lr_mult: 0
  3034. decay_mult: 0
  3035. }
  3036. batch_norm_param {
  3037. use_global_stats: true
  3038. eps: 1e-5
  3039. }
  3040. }
  3041. layer {
  3042. name: "conv6_2/dwise/scale"
  3043. type: "Scale"
  3044. bottom: "conv6_2/dwise/bn"
  3045. top: "conv6_2/dwise/bn"
  3046. param {
  3047. lr_mult: 1
  3048. decay_mult: 0
  3049. }
  3050. param {
  3051. lr_mult: 1
  3052. decay_mult: 0
  3053. }
  3054. scale_param {
  3055. bias_term: true
  3056. }
  3057. }
  3058. layer {
  3059. name: "relu6_2/dwise"
  3060. type: "ReLU"
  3061. bottom: "conv6_2/dwise/bn"
  3062. top: "conv6_2/dwise/bn"
  3063. }
  3064. layer {
  3065. name: "conv6_2/linear"
  3066. type: "Convolution"
  3067. bottom: "conv6_2/dwise/bn"
  3068. top: "conv6_2/linear"
  3069. param {
  3070. lr_mult: 1
  3071. decay_mult: 1
  3072. }
  3073. convolution_param {
  3074. num_output: 160
  3075. bias_term: false
  3076. kernel_size: 1
  3077. weight_filler {
  3078. type: "msra"
  3079. }
  3080. }
  3081. }
  3082. layer {
  3083. name: "conv6_2/linear/bn"
  3084. type: "BatchNorm"
  3085. bottom: "conv6_2/linear"
  3086. top: "conv6_2/linear/bn"
  3087. param {
  3088. lr_mult: 0
  3089. decay_mult: 0
  3090. }
  3091. param {
  3092. lr_mult: 0
  3093. decay_mult: 0
  3094. }
  3095. param {
  3096. lr_mult: 0
  3097. decay_mult: 0
  3098. }
  3099. batch_norm_param {
  3100. use_global_stats: true
  3101. eps: 1e-5
  3102. }
  3103. }
  3104. layer {
  3105. name: "conv6_2/linear/scale"
  3106. type: "Scale"
  3107. bottom: "conv6_2/linear/bn"
  3108. top: "conv6_2/linear/bn"
  3109. param {
  3110. lr_mult: 1
  3111. decay_mult: 0
  3112. }
  3113. param {
  3114. lr_mult: 1
  3115. decay_mult: 0
  3116. }
  3117. scale_param {
  3118. bias_term: true
  3119. }
  3120. }
  3121. layer {
  3122. name: "block_6_2"
  3123. type: "Eltwise"
  3124. bottom: "block_6_1"
  3125. bottom: "conv6_2/linear/bn"
  3126. top: "block_6_2"
  3127. }
  3128. layer {
  3129. name: "conv6_3/expand"
  3130. type: "Convolution"
  3131. bottom: "block_6_2"
  3132. top: "conv6_3/expand"
  3133. param {
  3134. lr_mult: 1
  3135. decay_mult: 1
  3136. }
  3137. convolution_param {
  3138. num_output: 960
  3139. bias_term: false
  3140. kernel_size: 1
  3141. weight_filler {
  3142. type: "msra"
  3143. }
  3144. }
  3145. }
  3146. layer {
  3147. name: "conv6_3/expand/bn"
  3148. type: "BatchNorm"
  3149. bottom: "conv6_3/expand"
  3150. top: "conv6_3/expand/bn"
  3151. param {
  3152. lr_mult: 0
  3153. decay_mult: 0
  3154. }
  3155. param {
  3156. lr_mult: 0
  3157. decay_mult: 0
  3158. }
  3159. param {
  3160. lr_mult: 0
  3161. decay_mult: 0
  3162. }
  3163. batch_norm_param {
  3164. use_global_stats: true
  3165. eps: 1e-5
  3166. }
  3167. }
  3168. layer {
  3169. name: "conv6_3/expand/scale"
  3170. type: "Scale"
  3171. bottom: "conv6_3/expand/bn"
  3172. top: "conv6_3/expand/bn"
  3173. param {
  3174. lr_mult: 1
  3175. decay_mult: 0
  3176. }
  3177. param {
  3178. lr_mult: 1
  3179. decay_mult: 0
  3180. }
  3181. scale_param {
  3182. bias_term: true
  3183. }
  3184. }
  3185. layer {
  3186. name: "relu6_3/expand"
  3187. type: "ReLU"
  3188. bottom: "conv6_3/expand/bn"
  3189. top: "conv6_3/expand/bn"
  3190. }
  3191. layer {
  3192. name: "conv6_3/dwise"
  3193. type: "Convolution"
  3194. bottom: "conv6_3/expand/bn"
  3195. top: "conv6_3/dwise"
  3196. param {
  3197. lr_mult: 1
  3198. decay_mult: 1
  3199. }
  3200. convolution_param {
  3201. num_output: 960
  3202. bias_term: false
  3203. pad: 1
  3204. kernel_size: 3
  3205. group: 960
  3206. weight_filler {
  3207. type: "msra"
  3208. }
  3209. engine: CAFFE
  3210. }
  3211. }
  3212. layer {
  3213. name: "conv6_3/dwise/bn"
  3214. type: "BatchNorm"
  3215. bottom: "conv6_3/dwise"
  3216. top: "conv6_3/dwise/bn"
  3217. param {
  3218. lr_mult: 0
  3219. decay_mult: 0
  3220. }
  3221. param {
  3222. lr_mult: 0
  3223. decay_mult: 0
  3224. }
  3225. param {
  3226. lr_mult: 0
  3227. decay_mult: 0
  3228. }
  3229. batch_norm_param {
  3230. use_global_stats: true
  3231. eps: 1e-5
  3232. }
  3233. }
  3234. layer {
  3235. name: "conv6_3/dwise/scale"
  3236. type: "Scale"
  3237. bottom: "conv6_3/dwise/bn"
  3238. top: "conv6_3/dwise/bn"
  3239. param {
  3240. lr_mult: 1
  3241. decay_mult: 0
  3242. }
  3243. param {
  3244. lr_mult: 1
  3245. decay_mult: 0
  3246. }
  3247. scale_param {
  3248. bias_term: true
  3249. }
  3250. }
  3251. layer {
  3252. name: "relu6_3/dwise"
  3253. type: "ReLU"
  3254. bottom: "conv6_3/dwise/bn"
  3255. top: "conv6_3/dwise/bn"
  3256. }
  3257. layer {
  3258. name: "conv6_3/linear"
  3259. type: "Convolution"
  3260. bottom: "conv6_3/dwise/bn"
  3261. top: "conv6_3/linear"
  3262. param {
  3263. lr_mult: 1
  3264. decay_mult: 1
  3265. }
  3266. convolution_param {
  3267. num_output: 320
  3268. bias_term: false
  3269. kernel_size: 1
  3270. weight_filler {
  3271. type: "msra"
  3272. }
  3273. }
  3274. }
  3275. layer {
  3276. name: "conv6_3/linear/bn"
  3277. type: "BatchNorm"
  3278. bottom: "conv6_3/linear"
  3279. top: "conv6_3/linear/bn"
  3280. param {
  3281. lr_mult: 0
  3282. decay_mult: 0
  3283. }
  3284. param {
  3285. lr_mult: 0
  3286. decay_mult: 0
  3287. }
  3288. param {
  3289. lr_mult: 0
  3290. decay_mult: 0
  3291. }
  3292. batch_norm_param {
  3293. use_global_stats: true
  3294. eps: 1e-5
  3295. }
  3296. }
  3297. layer {
  3298. name: "conv6_3/linear/scale"
  3299. type: "Scale"
  3300. bottom: "conv6_3/linear/bn"
  3301. top: "conv6_3/linear/bn"
  3302. param {
  3303. lr_mult: 1
  3304. decay_mult: 0
  3305. }
  3306. param {
  3307. lr_mult: 1
  3308. decay_mult: 0
  3309. }
  3310. scale_param {
  3311. bias_term: true
  3312. }
  3313. }
  3314. layer {
  3315. name: "conv6_4"
  3316. type: "Convolution"
  3317. bottom: "conv6_3/linear/bn"
  3318. top: "conv6_4"
  3319. param {
  3320. lr_mult: 1
  3321. decay_mult: 1
  3322. }
  3323. convolution_param {
  3324. num_output: 1280
  3325. bias_term: false
  3326. kernel_size: 1
  3327. weight_filler {
  3328. type: "msra"
  3329. }
  3330. }
  3331. }
  3332. layer {
  3333. name: "conv6_4/bn"
  3334. type: "BatchNorm"
  3335. bottom: "conv6_4"
  3336. top: "conv6_4/bn"
  3337. param {
  3338. lr_mult: 0
  3339. decay_mult: 0
  3340. }
  3341. param {
  3342. lr_mult: 0
  3343. decay_mult: 0
  3344. }
  3345. param {
  3346. lr_mult: 0
  3347. decay_mult: 0
  3348. }
  3349. batch_norm_param {
  3350. use_global_stats: true
  3351. eps: 1e-5
  3352. }
  3353. }
  3354. layer {
  3355. name: "conv6_4/scale"
  3356. type: "Scale"
  3357. bottom: "conv6_4/bn"
  3358. top: "conv6_4/bn"
  3359. param {
  3360. lr_mult: 1
  3361. decay_mult: 0
  3362. }
  3363. param {
  3364. lr_mult: 1
  3365. decay_mult: 0
  3366. }
  3367. scale_param {
  3368. bias_term: true
  3369. }
  3370. }
  3371. layer {
  3372. name: "relu6_4"
  3373. type: "ReLU"
  3374. bottom: "conv6_4/bn"
  3375. top: "conv6_4/bn"
  3376. }
  3377. layer {
  3378. name: "pool6"
  3379. type: "Pooling"
  3380. bottom: "conv6_4/bn"
  3381. top: "pool6"
  3382. pooling_param {
  3383. pool: AVE
  3384. global_pooling: true
  3385. }
  3386. }
  3387. layer {
  3388. name: "fc7"
  3389. type: "Convolution"
  3390. bottom: "pool6"
  3391. top: "fc7"
  3392. param {
  3393. lr_mult: 1
  3394. decay_mult: 1
  3395. }
  3396. param {
  3397. lr_mult: 2
  3398. decay_mult: 0
  3399. }
  3400. convolution_param {
  3401. num_output: 1000
  3402. kernel_size: 1
  3403. weight_filler {
  3404. type: "msra"
  3405. }
  3406. bias_filler {
  3407. type: "constant"
  3408. value: 0
  3409. }
  3410. }
  3411. }
  3412. layer {
  3413. name: "prob"
  3414. type: "Softmax"
  3415. bottom: "fc7"
  3416. top: "prob"
  3417. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement