Guest User

Untitled

a guest
Jun 21st, 2018
77
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 55.56 KB | None | 0 0
  1. name: "mobilenet_v2"
  2. layer {
  3. name: "data"
  4. type: "Data"
  5. top: "data"
  6. top: "label"
  7. include {
  8. phase: TRAIN
  9. }
  10. transform_param {
  11. mirror: true
  12. mean_file: "mean.binaryproto"
  13. }
  14. data_param {
  15. source: "train.lmdb"
  16. batch_size: 32
  17. backend: LMDB
  18. }
  19. }
  20. layer {
  21. name: "mobilenet_v2"
  22. type: "MemoryData"
  23. top: "data"
  24. top: "label"
  25. memory_data_param {
  26. batch_size: 32
  27. channels: 3
  28. height: 224
  29. width: 224
  30. }
  31. include {
  32. phase: TEST
  33. }
  34. }
  35. layer {
  36. name: "conv1"
  37. type: "Convolution"
  38. bottom: "data"
  39. top: "conv1"
  40. param {
  41. lr_mult: 1
  42. decay_mult: 1
  43. }
  44. convolution_param {
  45. num_output: 32
  46. bias_term: false
  47. pad: 1
  48. kernel_size: 3
  49. stride: 2
  50. weight_filler {
  51. type: "msra"
  52. }
  53. }
  54. }
  55. layer {
  56. name: "conv1/bn"
  57. type: "BatchNorm"
  58. bottom: "conv1"
  59. top: "conv1/bn"
  60. }
  61. layer {
  62. name: "conv1/scale"
  63. type: "Scale"
  64. bottom: "conv1/bn"
  65. top: "conv1/bn"
  66. param {
  67. lr_mult: 1
  68. decay_mult: 0
  69. }
  70. param {
  71. lr_mult: 1
  72. decay_mult: 0
  73. }
  74. scale_param {
  75. bias_term: true
  76. }
  77. }
  78. layer {
  79. name: "relu1"
  80. type: "ReLU"
  81. bottom: "conv1/bn"
  82. top: "conv1/bn"
  83. }
  84. layer {
  85. name: "conv2_1/expand"
  86. type: "Convolution"
  87. bottom: "conv1/bn"
  88. top: "conv2_1/expand"
  89. param {
  90. lr_mult: 1
  91. decay_mult: 1
  92. }
  93. convolution_param {
  94. num_output: 32
  95. bias_term: false
  96. kernel_size: 1
  97. weight_filler {
  98. type: "msra"
  99. }
  100. }
  101. }
  102. layer {
  103. name: "conv2_1/expand/bn"
  104. type: "BatchNorm"
  105. bottom: "conv2_1/expand"
  106. top: "conv2_1/expand/bn"
  107. }
  108. layer {
  109. name: "conv2_1/expand/scale"
  110. type: "Scale"
  111. bottom: "conv2_1/expand/bn"
  112. top: "conv2_1/expand/bn"
  113. param {
  114. lr_mult: 1
  115. decay_mult: 0
  116. }
  117. param {
  118. lr_mult: 1
  119. decay_mult: 0
  120. }
  121. scale_param {
  122. bias_term: true
  123. }
  124. }
  125. layer {
  126. name: "relu2_1/expand"
  127. type: "ReLU"
  128. bottom: "conv2_1/expand/bn"
  129. top: "conv2_1/expand/bn"
  130. }
  131. layer {
  132. name: "conv2_1/dwise"
  133. type: "ConvolutionDepthwise"
  134. bottom: "conv2_1/expand/bn"
  135. top: "conv2_1/dwise"
  136. param {
  137. lr_mult: 1
  138. decay_mult: 1
  139. }
  140. convolution_param {
  141. num_output: 32
  142. bias_term: false
  143. pad: 1
  144. kernel_size: 3
  145. group: 32
  146. weight_filler {
  147. type: "msra"
  148. }
  149. engine: CAFFE
  150. }
  151. }
  152. layer {
  153. name: "conv2_1/dwise/bn"
  154. type: "BatchNorm"
  155. bottom: "conv2_1/dwise"
  156. top: "conv2_1/dwise/bn"
  157. }
  158. layer {
  159. name: "conv2_1/dwise/scale"
  160. type: "Scale"
  161. bottom: "conv2_1/dwise/bn"
  162. top: "conv2_1/dwise/bn"
  163. param {
  164. lr_mult: 1
  165. decay_mult: 0
  166. }
  167. param {
  168. lr_mult: 1
  169. decay_mult: 0
  170. }
  171. scale_param {
  172. bias_term: true
  173. }
  174. }
  175. layer {
  176. name: "relu2_1/dwise"
  177. type: "ReLU"
  178. bottom: "conv2_1/dwise/bn"
  179. top: "conv2_1/dwise/bn"
  180. }
  181. layer {
  182. name: "conv2_1/linear"
  183. type: "Convolution"
  184. bottom: "conv2_1/dwise/bn"
  185. top: "conv2_1/linear"
  186. param {
  187. lr_mult: 1
  188. decay_mult: 1
  189. }
  190. convolution_param {
  191. num_output: 16
  192. bias_term: false
  193. kernel_size: 1
  194. weight_filler {
  195. type: "msra"
  196. }
  197. }
  198. }
  199. layer {
  200. name: "conv2_1/linear/bn"
  201. type: "BatchNorm"
  202. bottom: "conv2_1/linear"
  203. top: "conv2_1/linear/bn"
  204. }
  205. layer {
  206. name: "conv2_1/linear/scale"
  207. type: "Scale"
  208. bottom: "conv2_1/linear/bn"
  209. top: "conv2_1/linear/bn"
  210. param {
  211. lr_mult: 1
  212. decay_mult: 0
  213. }
  214. param {
  215. lr_mult: 1
  216. decay_mult: 0
  217. }
  218. scale_param {
  219. bias_term: true
  220. }
  221. }
  222. layer {
  223. name: "conv2_2/expand"
  224. type: "Convolution"
  225. bottom: "conv2_1/linear/bn"
  226. top: "conv2_2/expand"
  227. param {
  228. lr_mult: 1
  229. decay_mult: 1
  230. }
  231. convolution_param {
  232. num_output: 96
  233. bias_term: false
  234. kernel_size: 1
  235. weight_filler {
  236. type: "msra"
  237. }
  238. }
  239. }
  240. layer {
  241. name: "conv2_2/expand/bn"
  242. type: "BatchNorm"
  243. bottom: "conv2_2/expand"
  244. top: "conv2_2/expand/bn"
  245. }
  246. layer {
  247. name: "conv2_2/expand/scale"
  248. type: "Scale"
  249. bottom: "conv2_2/expand/bn"
  250. top: "conv2_2/expand/bn"
  251. param {
  252. lr_mult: 1
  253. decay_mult: 0
  254. }
  255. param {
  256. lr_mult: 1
  257. decay_mult: 0
  258. }
  259. scale_param {
  260. bias_term: true
  261. }
  262. }
  263. layer {
  264. name: "relu2_2/expand"
  265. type: "ReLU"
  266. bottom: "conv2_2/expand/bn"
  267. top: "conv2_2/expand/bn"
  268. }
  269. layer {
  270. name: "conv2_2/dwise"
  271. type: "ConvolutionDepthwise"
  272. bottom: "conv2_2/expand/bn"
  273. top: "conv2_2/dwise"
  274. param {
  275. lr_mult: 1
  276. decay_mult: 1
  277. }
  278. convolution_param {
  279. num_output: 96
  280. bias_term: false
  281. pad: 1
  282. kernel_size: 3
  283. group: 96
  284. stride: 2
  285. weight_filler {
  286. type: "msra"
  287. }
  288. engine: CAFFE
  289. }
  290. }
  291. layer {
  292. name: "conv2_2/dwise/bn"
  293. type: "BatchNorm"
  294. bottom: "conv2_2/dwise"
  295. top: "conv2_2/dwise/bn"
  296. }
  297. layer {
  298. name: "conv2_2/dwise/scale"
  299. type: "Scale"
  300. bottom: "conv2_2/dwise/bn"
  301. top: "conv2_2/dwise/bn"
  302. param {
  303. lr_mult: 1
  304. decay_mult: 0
  305. }
  306. param {
  307. lr_mult: 1
  308. decay_mult: 0
  309. }
  310. scale_param {
  311. bias_term: true
  312. }
  313. }
  314. layer {
  315. name: "relu2_2/dwise"
  316. type: "ReLU"
  317. bottom: "conv2_2/dwise/bn"
  318. top: "conv2_2/dwise/bn"
  319. }
  320. layer {
  321. name: "conv2_2/linear"
  322. type: "Convolution"
  323. bottom: "conv2_2/dwise/bn"
  324. top: "conv2_2/linear"
  325. param {
  326. lr_mult: 1
  327. decay_mult: 1
  328. }
  329. convolution_param {
  330. num_output: 24
  331. bias_term: false
  332. kernel_size: 1
  333. weight_filler {
  334. type: "msra"
  335. }
  336. }
  337. }
  338. layer {
  339. name: "conv2_2/linear/bn"
  340. type: "BatchNorm"
  341. bottom: "conv2_2/linear"
  342. top: "conv2_2/linear/bn"
  343. }
  344. layer {
  345. name: "conv2_2/linear/scale"
  346. type: "Scale"
  347. bottom: "conv2_2/linear/bn"
  348. top: "conv2_2/linear/bn"
  349. param {
  350. lr_mult: 1
  351. decay_mult: 0
  352. }
  353. param {
  354. lr_mult: 1
  355. decay_mult: 0
  356. }
  357. scale_param {
  358. bias_term: true
  359. }
  360. }
  361. layer {
  362. name: "conv3_1/expand"
  363. type: "Convolution"
  364. bottom: "conv2_2/linear/bn"
  365. top: "conv3_1/expand"
  366. param {
  367. lr_mult: 1
  368. decay_mult: 1
  369. }
  370. convolution_param {
  371. num_output: 144
  372. bias_term: false
  373. kernel_size: 1
  374. weight_filler {
  375. type: "msra"
  376. }
  377. }
  378. }
  379. layer {
  380. name: "conv3_1/expand/bn"
  381. type: "BatchNorm"
  382. bottom: "conv3_1/expand"
  383. top: "conv3_1/expand/bn"
  384. }
  385. layer {
  386. name: "conv3_1/expand/scale"
  387. type: "Scale"
  388. bottom: "conv3_1/expand/bn"
  389. top: "conv3_1/expand/bn"
  390. param {
  391. lr_mult: 1
  392. decay_mult: 0
  393. }
  394. param {
  395. lr_mult: 1
  396. decay_mult: 0
  397. }
  398. scale_param {
  399. bias_term: true
  400. }
  401. }
  402. layer {
  403. name: "relu3_1/expand"
  404. type: "ReLU"
  405. bottom: "conv3_1/expand/bn"
  406. top: "conv3_1/expand/bn"
  407. }
  408. layer {
  409. name: "conv3_1/dwise"
  410. type: "ConvolutionDepthwise"
  411. bottom: "conv3_1/expand/bn"
  412. top: "conv3_1/dwise"
  413. param {
  414. lr_mult: 1
  415. decay_mult: 1
  416. }
  417. convolution_param {
  418. num_output: 144
  419. bias_term: false
  420. pad: 1
  421. kernel_size: 3
  422. group: 144
  423. weight_filler {
  424. type: "msra"
  425. }
  426. engine: CAFFE
  427. }
  428. }
  429. layer {
  430. name: "conv3_1/dwise/bn"
  431. type: "BatchNorm"
  432. bottom: "conv3_1/dwise"
  433. top: "conv3_1/dwise/bn"
  434. }
  435. layer {
  436. name: "conv3_1/dwise/scale"
  437. type: "Scale"
  438. bottom: "conv3_1/dwise/bn"
  439. top: "conv3_1/dwise/bn"
  440. param {
  441. lr_mult: 1
  442. decay_mult: 0
  443. }
  444. param {
  445. lr_mult: 1
  446. decay_mult: 0
  447. }
  448. scale_param {
  449. bias_term: true
  450. }
  451. }
  452. layer {
  453. name: "relu3_1/dwise"
  454. type: "ReLU"
  455. bottom: "conv3_1/dwise/bn"
  456. top: "conv3_1/dwise/bn"
  457. }
  458. layer {
  459. name: "conv3_1/linear"
  460. type: "Convolution"
  461. bottom: "conv3_1/dwise/bn"
  462. top: "conv3_1/linear"
  463. param {
  464. lr_mult: 1
  465. decay_mult: 1
  466. }
  467. convolution_param {
  468. num_output: 24
  469. bias_term: false
  470. kernel_size: 1
  471. weight_filler {
  472. type: "msra"
  473. }
  474. }
  475. }
  476. layer {
  477. name: "conv3_1/linear/bn"
  478. type: "BatchNorm"
  479. bottom: "conv3_1/linear"
  480. top: "conv3_1/linear/bn"
  481. }
  482. layer {
  483. name: "conv3_1/linear/scale"
  484. type: "Scale"
  485. bottom: "conv3_1/linear/bn"
  486. top: "conv3_1/linear/bn"
  487. param {
  488. lr_mult: 1
  489. decay_mult: 0
  490. }
  491. param {
  492. lr_mult: 1
  493. decay_mult: 0
  494. }
  495. scale_param {
  496. bias_term: true
  497. }
  498. }
  499. layer {
  500. name: "block_3_1"
  501. type: "Eltwise"
  502. bottom: "conv2_2/linear/bn"
  503. bottom: "conv3_1/linear/bn"
  504. top: "block_3_1"
  505. }
  506. layer {
  507. name: "conv3_2/expand"
  508. type: "Convolution"
  509. bottom: "block_3_1"
  510. top: "conv3_2/expand"
  511. param {
  512. lr_mult: 1
  513. decay_mult: 1
  514. }
  515. convolution_param {
  516. num_output: 144
  517. bias_term: false
  518. kernel_size: 1
  519. weight_filler {
  520. type: "msra"
  521. }
  522. }
  523. }
  524. layer {
  525. name: "conv3_2/expand/bn"
  526. type: "BatchNorm"
  527. bottom: "conv3_2/expand"
  528. top: "conv3_2/expand/bn"
  529. }
  530. layer {
  531. name: "conv3_2/expand/scale"
  532. type: "Scale"
  533. bottom: "conv3_2/expand/bn"
  534. top: "conv3_2/expand/bn"
  535. param {
  536. lr_mult: 1
  537. decay_mult: 0
  538. }
  539. param {
  540. lr_mult: 1
  541. decay_mult: 0
  542. }
  543. scale_param {
  544. bias_term: true
  545. }
  546. }
  547. layer {
  548. name: "relu3_2/expand"
  549. type: "ReLU"
  550. bottom: "conv3_2/expand/bn"
  551. top: "conv3_2/expand/bn"
  552. }
  553. layer {
  554. name: "conv3_2/dwise"
  555. type: "ConvolutionDepthwise"
  556. bottom: "conv3_2/expand/bn"
  557. top: "conv3_2/dwise"
  558. param {
  559. lr_mult: 1
  560. decay_mult: 1
  561. }
  562. convolution_param {
  563. num_output: 144
  564. bias_term: false
  565. pad: 1
  566. kernel_size: 3
  567. group: 144
  568. stride: 2
  569. weight_filler {
  570. type: "msra"
  571. }
  572. engine: CAFFE
  573. }
  574. }
  575. layer {
  576. name: "conv3_2/dwise/bn"
  577. type: "BatchNorm"
  578. bottom: "conv3_2/dwise"
  579. top: "conv3_2/dwise/bn"
  580.  
  581. }
  582. layer {
  583. name: "conv3_2/dwise/scale"
  584. type: "Scale"
  585. bottom: "conv3_2/dwise/bn"
  586. top: "conv3_2/dwise/bn"
  587. param {
  588. lr_mult: 1
  589. decay_mult: 0
  590. }
  591. param {
  592. lr_mult: 1
  593. decay_mult: 0
  594. }
  595. scale_param {
  596. bias_term: true
  597. }
  598. }
  599. layer {
  600. name: "relu3_2/dwise"
  601. type: "ReLU"
  602. bottom: "conv3_2/dwise/bn"
  603. top: "conv3_2/dwise/bn"
  604. }
  605. layer {
  606. name: "conv3_2/linear"
  607. type: "Convolution"
  608. bottom: "conv3_2/dwise/bn"
  609. top: "conv3_2/linear"
  610. param {
  611. lr_mult: 1
  612. decay_mult: 1
  613. }
  614. convolution_param {
  615. num_output: 32
  616. bias_term: false
  617. kernel_size: 1
  618. weight_filler {
  619. type: "msra"
  620. }
  621. }
  622. }
  623. layer {
  624. name: "conv3_2/linear/bn"
  625. type: "BatchNorm"
  626. bottom: "conv3_2/linear"
  627. top: "conv3_2/linear/bn"
  628.  
  629. }
  630. layer {
  631. name: "conv3_2/linear/scale"
  632. type: "Scale"
  633. bottom: "conv3_2/linear/bn"
  634. top: "conv3_2/linear/bn"
  635. param {
  636. lr_mult: 1
  637. decay_mult: 0
  638. }
  639. param {
  640. lr_mult: 1
  641. decay_mult: 0
  642. }
  643. scale_param {
  644. bias_term: true
  645. }
  646. }
  647. layer {
  648. name: "conv4_1/expand"
  649. type: "Convolution"
  650. bottom: "conv3_2/linear/bn"
  651. top: "conv4_1/expand"
  652. param {
  653. lr_mult: 1
  654. decay_mult: 1
  655. }
  656. convolution_param {
  657. num_output: 192
  658. bias_term: false
  659. kernel_size: 1
  660. weight_filler {
  661. type: "msra"
  662. }
  663. }
  664. }
  665. layer {
  666. name: "conv4_1/expand/bn"
  667. type: "BatchNorm"
  668. bottom: "conv4_1/expand"
  669. top: "conv4_1/expand/bn"
  670.  
  671. }
  672. layer {
  673. name: "conv4_1/expand/scale"
  674. type: "Scale"
  675. bottom: "conv4_1/expand/bn"
  676. top: "conv4_1/expand/bn"
  677. param {
  678. lr_mult: 1
  679. decay_mult: 0
  680. }
  681. param {
  682. lr_mult: 1
  683. decay_mult: 0
  684. }
  685. scale_param {
  686. bias_term: true
  687. }
  688. }
  689. layer {
  690. name: "relu4_1/expand"
  691. type: "ReLU"
  692. bottom: "conv4_1/expand/bn"
  693. top: "conv4_1/expand/bn"
  694. }
  695. layer {
  696. name: "conv4_1/dwise"
  697. type: "ConvolutionDepthwise"
  698. bottom: "conv4_1/expand/bn"
  699. top: "conv4_1/dwise"
  700. param {
  701. lr_mult: 1
  702. decay_mult: 1
  703. }
  704. convolution_param {
  705. num_output: 192
  706. bias_term: false
  707. pad: 1
  708. kernel_size: 3
  709. group: 192
  710. weight_filler {
  711. type: "msra"
  712. }
  713. engine: CAFFE
  714. }
  715. }
  716. layer {
  717. name: "conv4_1/dwise/bn"
  718. type: "BatchNorm"
  719. bottom: "conv4_1/dwise"
  720. top: "conv4_1/dwise/bn"
  721.  
  722. }
  723. layer {
  724. name: "conv4_1/dwise/scale"
  725. type: "Scale"
  726. bottom: "conv4_1/dwise/bn"
  727. top: "conv4_1/dwise/bn"
  728. param {
  729. lr_mult: 1
  730. decay_mult: 0
  731. }
  732. param {
  733. lr_mult: 1
  734. decay_mult: 0
  735. }
  736. scale_param {
  737. bias_term: true
  738. }
  739. }
  740. layer {
  741. name: "relu4_1/dwise"
  742. type: "ReLU"
  743. bottom: "conv4_1/dwise/bn"
  744. top: "conv4_1/dwise/bn"
  745. }
  746. layer {
  747. name: "conv4_1/linear"
  748. type: "Convolution"
  749. bottom: "conv4_1/dwise/bn"
  750. top: "conv4_1/linear"
  751. param {
  752. lr_mult: 1
  753. decay_mult: 1
  754. }
  755. convolution_param {
  756. num_output: 32
  757. bias_term: false
  758. kernel_size: 1
  759. weight_filler {
  760. type: "msra"
  761. }
  762. }
  763. }
  764. layer {
  765. name: "conv4_1/linear/bn"
  766. type: "BatchNorm"
  767. bottom: "conv4_1/linear"
  768. top: "conv4_1/linear/bn"
  769.  
  770. }
  771. layer {
  772. name: "conv4_1/linear/scale"
  773. type: "Scale"
  774. bottom: "conv4_1/linear/bn"
  775. top: "conv4_1/linear/bn"
  776. param {
  777. lr_mult: 1
  778. decay_mult: 0
  779. }
  780. param {
  781. lr_mult: 1
  782. decay_mult: 0
  783. }
  784. scale_param {
  785. bias_term: true
  786. }
  787. }
  788. layer {
  789. name: "block_4_1"
  790. type: "Eltwise"
  791. bottom: "conv3_2/linear/bn"
  792. bottom: "conv4_1/linear/bn"
  793. top: "block_4_1"
  794. }
  795. layer {
  796. name: "conv4_2/expand"
  797. type: "Convolution"
  798. bottom: "block_4_1"
  799. top: "conv4_2/expand"
  800. param {
  801. lr_mult: 1
  802. decay_mult: 1
  803. }
  804. convolution_param {
  805. num_output: 192
  806. bias_term: false
  807. kernel_size: 1
  808. weight_filler {
  809. type: "msra"
  810. }
  811. }
  812. }
  813. layer {
  814. name: "conv4_2/expand/bn"
  815. type: "BatchNorm"
  816. bottom: "conv4_2/expand"
  817. top: "conv4_2/expand/bn"
  818.  
  819. }
  820. layer {
  821. name: "conv4_2/expand/scale"
  822. type: "Scale"
  823. bottom: "conv4_2/expand/bn"
  824. top: "conv4_2/expand/bn"
  825. param {
  826. lr_mult: 1
  827. decay_mult: 0
  828. }
  829. param {
  830. lr_mult: 1
  831. decay_mult: 0
  832. }
  833. scale_param {
  834. bias_term: true
  835. }
  836. }
  837. layer {
  838. name: "relu4_2/expand"
  839. type: "ReLU"
  840. bottom: "conv4_2/expand/bn"
  841. top: "conv4_2/expand/bn"
  842. }
  843. layer {
  844. name: "conv4_2/dwise"
  845. type: "ConvolutionDepthwise"
  846. bottom: "conv4_2/expand/bn"
  847. top: "conv4_2/dwise"
  848. param {
  849. lr_mult: 1
  850. decay_mult: 1
  851. }
  852. convolution_param {
  853. num_output: 192
  854. bias_term: false
  855. pad: 1
  856. kernel_size: 3
  857. group: 192
  858. weight_filler {
  859. type: "msra"
  860. }
  861. engine: CAFFE
  862. }
  863. }
  864. layer {
  865. name: "conv4_2/dwise/bn"
  866. type: "BatchNorm"
  867. bottom: "conv4_2/dwise"
  868. top: "conv4_2/dwise/bn"
  869.  
  870. }
  871. layer {
  872. name: "conv4_2/dwise/scale"
  873. type: "Scale"
  874. bottom: "conv4_2/dwise/bn"
  875. top: "conv4_2/dwise/bn"
  876. param {
  877. lr_mult: 1
  878. decay_mult: 0
  879. }
  880. param {
  881. lr_mult: 1
  882. decay_mult: 0
  883. }
  884. scale_param {
  885. bias_term: true
  886. }
  887. }
  888. layer {
  889. name: "relu4_2/dwise"
  890. type: "ReLU"
  891. bottom: "conv4_2/dwise/bn"
  892. top: "conv4_2/dwise/bn"
  893. }
  894. layer {
  895. name: "conv4_2/linear"
  896. type: "Convolution"
  897. bottom: "conv4_2/dwise/bn"
  898. top: "conv4_2/linear"
  899. param {
  900. lr_mult: 1
  901. decay_mult: 1
  902. }
  903. convolution_param {
  904. num_output: 32
  905. bias_term: false
  906. kernel_size: 1
  907. weight_filler {
  908. type: "msra"
  909. }
  910. }
  911. }
  912. layer {
  913. name: "conv4_2/linear/bn"
  914. type: "BatchNorm"
  915. bottom: "conv4_2/linear"
  916. top: "conv4_2/linear/bn"
  917.  
  918. }
  919. layer {
  920. name: "conv4_2/linear/scale"
  921. type: "Scale"
  922. bottom: "conv4_2/linear/bn"
  923. top: "conv4_2/linear/bn"
  924. param {
  925. lr_mult: 1
  926. decay_mult: 0
  927. }
  928. param {
  929. lr_mult: 1
  930. decay_mult: 0
  931. }
  932. scale_param {
  933. bias_term: true
  934. }
  935. }
  936. layer {
  937. name: "block_4_2"
  938. type: "Eltwise"
  939. bottom: "block_4_1"
  940. bottom: "conv4_2/linear/bn"
  941. top: "block_4_2"
  942. }
  943. layer {
  944. name: "conv4_3/expand"
  945. type: "Convolution"
  946. bottom: "block_4_2"
  947. top: "conv4_3/expand"
  948. param {
  949. lr_mult: 1
  950. decay_mult: 1
  951. }
  952. convolution_param {
  953. num_output: 192
  954. bias_term: false
  955. kernel_size: 1
  956. weight_filler {
  957. type: "msra"
  958. }
  959. }
  960. }
  961. layer {
  962. name: "conv4_3/expand/bn"
  963. type: "BatchNorm"
  964. bottom: "conv4_3/expand"
  965. top: "conv4_3/expand/bn"
  966.  
  967. }
  968. layer {
  969. name: "conv4_3/expand/scale"
  970. type: "Scale"
  971. bottom: "conv4_3/expand/bn"
  972. top: "conv4_3/expand/bn"
  973. param {
  974. lr_mult: 1
  975. decay_mult: 0
  976. }
  977. param {
  978. lr_mult: 1
  979. decay_mult: 0
  980. }
  981. scale_param {
  982. bias_term: true
  983. }
  984. }
  985. layer {
  986. name: "relu4_3/expand"
  987. type: "ReLU"
  988. bottom: "conv4_3/expand/bn"
  989. top: "conv4_3/expand/bn"
  990. }
  991. layer {
  992. name: "conv4_3/dwise"
  993. type: "ConvolutionDepthwise"
  994. bottom: "conv4_3/expand/bn"
  995. top: "conv4_3/dwise"
  996. param {
  997. lr_mult: 1
  998. decay_mult: 1
  999. }
  1000. convolution_param {
  1001. num_output: 192
  1002. bias_term: false
  1003. pad: 1
  1004. kernel_size: 3
  1005. group: 192
  1006. weight_filler {
  1007. type: "msra"
  1008. }
  1009. engine: CAFFE
  1010. }
  1011. }
  1012. layer {
  1013. name: "conv4_3/dwise/bn"
  1014. type: "BatchNorm"
  1015. bottom: "conv4_3/dwise"
  1016. top: "conv4_3/dwise/bn"
  1017.  
  1018. }
  1019. layer {
  1020. name: "conv4_3/dwise/scale"
  1021. type: "Scale"
  1022. bottom: "conv4_3/dwise/bn"
  1023. top: "conv4_3/dwise/bn"
  1024. param {
  1025. lr_mult: 1
  1026. decay_mult: 0
  1027. }
  1028. param {
  1029. lr_mult: 1
  1030. decay_mult: 0
  1031. }
  1032. scale_param {
  1033. bias_term: true
  1034. }
  1035. }
  1036. layer {
  1037. name: "relu4_3/dwise"
  1038. type: "ReLU"
  1039. bottom: "conv4_3/dwise/bn"
  1040. top: "conv4_3/dwise/bn"
  1041. }
  1042. layer {
  1043. name: "conv4_3/linear"
  1044. type: "Convolution"
  1045. bottom: "conv4_3/dwise/bn"
  1046. top: "conv4_3/linear"
  1047. param {
  1048. lr_mult: 1
  1049. decay_mult: 1
  1050. }
  1051. convolution_param {
  1052. num_output: 64
  1053. bias_term: false
  1054. kernel_size: 1
  1055. weight_filler {
  1056. type: "msra"
  1057. }
  1058. }
  1059. }
  1060. layer {
  1061. name: "conv4_3/linear/bn"
  1062. type: "BatchNorm"
  1063. bottom: "conv4_3/linear"
  1064. top: "conv4_3/linear/bn"
  1065.  
  1066. }
  1067. layer {
  1068. name: "conv4_3/linear/scale"
  1069. type: "Scale"
  1070. bottom: "conv4_3/linear/bn"
  1071. top: "conv4_3/linear/bn"
  1072. param {
  1073. lr_mult: 1
  1074. decay_mult: 0
  1075. }
  1076. param {
  1077. lr_mult: 1
  1078. decay_mult: 0
  1079. }
  1080. scale_param {
  1081. bias_term: true
  1082. }
  1083. }
  1084. layer {
  1085. name: "conv4_4/expand"
  1086. type: "Convolution"
  1087. bottom: "conv4_3/linear/bn"
  1088. top: "conv4_4/expand"
  1089. param {
  1090. lr_mult: 1
  1091. decay_mult: 1
  1092. }
  1093. convolution_param {
  1094. num_output: 384
  1095. bias_term: false
  1096. kernel_size: 1
  1097. weight_filler {
  1098. type: "msra"
  1099. }
  1100. }
  1101. }
  1102. layer {
  1103. name: "conv4_4/expand/bn"
  1104. type: "BatchNorm"
  1105. bottom: "conv4_4/expand"
  1106. top: "conv4_4/expand/bn"
  1107.  
  1108. }
  1109. layer {
  1110. name: "conv4_4/expand/scale"
  1111. type: "Scale"
  1112. bottom: "conv4_4/expand/bn"
  1113. top: "conv4_4/expand/bn"
  1114. param {
  1115. lr_mult: 1
  1116. decay_mult: 0
  1117. }
  1118. param {
  1119. lr_mult: 1
  1120. decay_mult: 0
  1121. }
  1122. scale_param {
  1123. bias_term: true
  1124. }
  1125. }
  1126. layer {
  1127. name: "relu4_4/expand"
  1128. type: "ReLU"
  1129. bottom: "conv4_4/expand/bn"
  1130. top: "conv4_4/expand/bn"
  1131. }
  1132. layer {
  1133. name: "conv4_4/dwise"
  1134. type: "ConvolutionDepthwise"
  1135. bottom: "conv4_4/expand/bn"
  1136. top: "conv4_4/dwise"
  1137. param {
  1138. lr_mult: 1
  1139. decay_mult: 1
  1140. }
  1141. convolution_param {
  1142. num_output: 384
  1143. bias_term: false
  1144. pad: 1
  1145. kernel_size: 3
  1146. group: 384
  1147. weight_filler {
  1148. type: "msra"
  1149. }
  1150. engine: CAFFE
  1151. }
  1152. }
  1153. layer {
  1154. name: "conv4_4/dwise/bn"
  1155. type: "BatchNorm"
  1156. bottom: "conv4_4/dwise"
  1157. top: "conv4_4/dwise/bn"
  1158.  
  1159. }
  1160. layer {
  1161. name: "conv4_4/dwise/scale"
  1162. type: "Scale"
  1163. bottom: "conv4_4/dwise/bn"
  1164. top: "conv4_4/dwise/bn"
  1165. param {
  1166. lr_mult: 1
  1167. decay_mult: 0
  1168. }
  1169. param {
  1170. lr_mult: 1
  1171. decay_mult: 0
  1172. }
  1173. scale_param {
  1174. bias_term: true
  1175. }
  1176. }
  1177. layer {
  1178. name: "relu4_4/dwise"
  1179. type: "ReLU"
  1180. bottom: "conv4_4/dwise/bn"
  1181. top: "conv4_4/dwise/bn"
  1182. }
  1183. layer {
  1184. name: "conv4_4/linear"
  1185. type: "Convolution"
  1186. bottom: "conv4_4/dwise/bn"
  1187. top: "conv4_4/linear"
  1188. param {
  1189. lr_mult: 1
  1190. decay_mult: 1
  1191. }
  1192. convolution_param {
  1193. num_output: 64
  1194. bias_term: false
  1195. kernel_size: 1
  1196. weight_filler {
  1197. type: "msra"
  1198. }
  1199. }
  1200. }
  1201. layer {
  1202. name: "conv4_4/linear/bn"
  1203. type: "BatchNorm"
  1204. bottom: "conv4_4/linear"
  1205. top: "conv4_4/linear/bn"
  1206.  
  1207. }
  1208. layer {
  1209. name: "conv4_4/linear/scale"
  1210. type: "Scale"
  1211. bottom: "conv4_4/linear/bn"
  1212. top: "conv4_4/linear/bn"
  1213. param {
  1214. lr_mult: 1
  1215. decay_mult: 0
  1216. }
  1217. param {
  1218. lr_mult: 1
  1219. decay_mult: 0
  1220. }
  1221. scale_param {
  1222. bias_term: true
  1223. }
  1224. }
  1225. layer {
  1226. name: "block_4_4"
  1227. type: "Eltwise"
  1228. bottom: "conv4_3/linear/bn"
  1229. bottom: "conv4_4/linear/bn"
  1230. top: "block_4_4"
  1231. }
  1232. layer {
  1233. name: "conv4_5/expand"
  1234. type: "Convolution"
  1235. bottom: "block_4_4"
  1236. top: "conv4_5/expand"
  1237. param {
  1238. lr_mult: 1
  1239. decay_mult: 1
  1240. }
  1241. convolution_param {
  1242. num_output: 384
  1243. bias_term: false
  1244. kernel_size: 1
  1245. weight_filler {
  1246. type: "msra"
  1247. }
  1248. }
  1249. }
  1250. layer {
  1251. name: "conv4_5/expand/bn"
  1252. type: "BatchNorm"
  1253. bottom: "conv4_5/expand"
  1254. top: "conv4_5/expand/bn"
  1255.  
  1256. }
  1257. layer {
  1258. name: "conv4_5/expand/scale"
  1259. type: "Scale"
  1260. bottom: "conv4_5/expand/bn"
  1261. top: "conv4_5/expand/bn"
  1262. param {
  1263. lr_mult: 1
  1264. decay_mult: 0
  1265. }
  1266. param {
  1267. lr_mult: 1
  1268. decay_mult: 0
  1269. }
  1270. scale_param {
  1271. bias_term: true
  1272. }
  1273. }
  1274. layer {
  1275. name: "relu4_5/expand"
  1276. type: "ReLU"
  1277. bottom: "conv4_5/expand/bn"
  1278. top: "conv4_5/expand/bn"
  1279. }
  1280. layer {
  1281. name: "conv4_5/dwise"
  1282. type: "ConvolutionDepthwise"
  1283. bottom: "conv4_5/expand/bn"
  1284. top: "conv4_5/dwise"
  1285. param {
  1286. lr_mult: 1
  1287. decay_mult: 1
  1288. }
  1289. convolution_param {
  1290. num_output: 384
  1291. bias_term: false
  1292. pad: 1
  1293. kernel_size: 3
  1294. group: 384
  1295. weight_filler {
  1296. type: "msra"
  1297. }
  1298. engine: CAFFE
  1299. }
  1300. }
  1301. layer {
  1302. name: "conv4_5/dwise/bn"
  1303. type: "BatchNorm"
  1304. bottom: "conv4_5/dwise"
  1305. top: "conv4_5/dwise/bn"
  1306.  
  1307. }
  1308. layer {
  1309. name: "conv4_5/dwise/scale"
  1310. type: "Scale"
  1311. bottom: "conv4_5/dwise/bn"
  1312. top: "conv4_5/dwise/bn"
  1313. param {
  1314. lr_mult: 1
  1315. decay_mult: 0
  1316. }
  1317. param {
  1318. lr_mult: 1
  1319. decay_mult: 0
  1320. }
  1321. scale_param {
  1322. bias_term: true
  1323. }
  1324. }
  1325. layer {
  1326. name: "relu4_5/dwise"
  1327. type: "ReLU"
  1328. bottom: "conv4_5/dwise/bn"
  1329. top: "conv4_5/dwise/bn"
  1330. }
  1331. layer {
  1332. name: "conv4_5/linear"
  1333. type: "Convolution"
  1334. bottom: "conv4_5/dwise/bn"
  1335. top: "conv4_5/linear"
  1336. param {
  1337. lr_mult: 1
  1338. decay_mult: 1
  1339. }
  1340. convolution_param {
  1341. num_output: 64
  1342. bias_term: false
  1343. kernel_size: 1
  1344. weight_filler {
  1345. type: "msra"
  1346. }
  1347. }
  1348. }
  1349. layer {
  1350. name: "conv4_5/linear/bn"
  1351. type: "BatchNorm"
  1352. bottom: "conv4_5/linear"
  1353. top: "conv4_5/linear/bn"
  1354.  
  1355. }
  1356. layer {
  1357. name: "conv4_5/linear/scale"
  1358. type: "Scale"
  1359. bottom: "conv4_5/linear/bn"
  1360. top: "conv4_5/linear/bn"
  1361. param {
  1362. lr_mult: 1
  1363. decay_mult: 0
  1364. }
  1365. param {
  1366. lr_mult: 1
  1367. decay_mult: 0
  1368. }
  1369. scale_param {
  1370. bias_term: true
  1371. }
  1372. }
  1373. layer {
  1374. name: "block_4_5"
  1375. type: "Eltwise"
  1376. bottom: "block_4_4"
  1377. bottom: "conv4_5/linear/bn"
  1378. top: "block_4_5"
  1379. }
  1380. layer {
  1381. name: "conv4_6/expand"
  1382. type: "Convolution"
  1383. bottom: "block_4_5"
  1384. top: "conv4_6/expand"
  1385. param {
  1386. lr_mult: 1
  1387. decay_mult: 1
  1388. }
  1389. convolution_param {
  1390. num_output: 384
  1391. bias_term: false
  1392. kernel_size: 1
  1393. weight_filler {
  1394. type: "msra"
  1395. }
  1396. }
  1397. }
  1398. layer {
  1399. name: "conv4_6/expand/bn"
  1400. type: "BatchNorm"
  1401. bottom: "conv4_6/expand"
  1402. top: "conv4_6/expand/bn"
  1403.  
  1404. }
  1405. layer {
  1406. name: "conv4_6/expand/scale"
  1407. type: "Scale"
  1408. bottom: "conv4_6/expand/bn"
  1409. top: "conv4_6/expand/bn"
  1410. param {
  1411. lr_mult: 1
  1412. decay_mult: 0
  1413. }
  1414. param {
  1415. lr_mult: 1
  1416. decay_mult: 0
  1417. }
  1418. scale_param {
  1419. bias_term: true
  1420. }
  1421. }
  1422. layer {
  1423. name: "relu4_6/expand"
  1424. type: "ReLU"
  1425. bottom: "conv4_6/expand/bn"
  1426. top: "conv4_6/expand/bn"
  1427. }
  1428. layer {
  1429. name: "conv4_6/dwise"
  1430. type: "ConvolutionDepthwise"
  1431. bottom: "conv4_6/expand/bn"
  1432. top: "conv4_6/dwise"
  1433. param {
  1434. lr_mult: 1
  1435. decay_mult: 1
  1436. }
  1437. convolution_param {
  1438. num_output: 384
  1439. bias_term: false
  1440. pad: 1
  1441. kernel_size: 3
  1442. group: 384
  1443. weight_filler {
  1444. type: "msra"
  1445. }
  1446. engine: CAFFE
  1447. }
  1448. }
  1449. layer {
  1450. name: "conv4_6/dwise/bn"
  1451. type: "BatchNorm"
  1452. bottom: "conv4_6/dwise"
  1453. top: "conv4_6/dwise/bn"
  1454.  
  1455. }
  1456. layer {
  1457. name: "conv4_6/dwise/scale"
  1458. type: "Scale"
  1459. bottom: "conv4_6/dwise/bn"
  1460. top: "conv4_6/dwise/bn"
  1461. param {
  1462. lr_mult: 1
  1463. decay_mult: 0
  1464. }
  1465. param {
  1466. lr_mult: 1
  1467. decay_mult: 0
  1468. }
  1469. scale_param {
  1470. bias_term: true
  1471. }
  1472. }
  1473. layer {
  1474. name: "relu4_6/dwise"
  1475. type: "ReLU"
  1476. bottom: "conv4_6/dwise/bn"
  1477. top: "conv4_6/dwise/bn"
  1478. }
  1479. layer {
  1480. name: "conv4_6/linear"
  1481. type: "Convolution"
  1482. bottom: "conv4_6/dwise/bn"
  1483. top: "conv4_6/linear"
  1484. param {
  1485. lr_mult: 1
  1486. decay_mult: 1
  1487. }
  1488. convolution_param {
  1489. num_output: 64
  1490. bias_term: false
  1491. kernel_size: 1
  1492. weight_filler {
  1493. type: "msra"
  1494. }
  1495. }
  1496. }
  1497. layer {
  1498. name: "conv4_6/linear/bn"
  1499. type: "BatchNorm"
  1500. bottom: "conv4_6/linear"
  1501. top: "conv4_6/linear/bn"
  1502.  
  1503. }
  1504. layer {
  1505. name: "conv4_6/linear/scale"
  1506. type: "Scale"
  1507. bottom: "conv4_6/linear/bn"
  1508. top: "conv4_6/linear/bn"
  1509. param {
  1510. lr_mult: 1
  1511. decay_mult: 0
  1512. }
  1513. param {
  1514. lr_mult: 1
  1515. decay_mult: 0
  1516. }
  1517. scale_param {
  1518. bias_term: true
  1519. }
  1520. }
  1521. layer {
  1522. name: "block_4_6"
  1523. type: "Eltwise"
  1524. bottom: "block_4_5"
  1525. bottom: "conv4_6/linear/bn"
  1526. top: "block_4_6"
  1527. }
  1528. layer {
  1529. name: "conv4_7/expand"
  1530. type: "Convolution"
  1531. bottom: "block_4_6"
  1532. top: "conv4_7/expand"
  1533. param {
  1534. lr_mult: 1
  1535. decay_mult: 1
  1536. }
  1537. convolution_param {
  1538. num_output: 384
  1539. bias_term: false
  1540. kernel_size: 1
  1541. weight_filler {
  1542. type: "msra"
  1543. }
  1544. }
  1545. }
  1546. layer {
  1547. name: "conv4_7/expand/bn"
  1548. type: "BatchNorm"
  1549. bottom: "conv4_7/expand"
  1550. top: "conv4_7/expand/bn"
  1551.  
  1552. }
  1553. layer {
  1554. name: "conv4_7/expand/scale"
  1555. type: "Scale"
  1556. bottom: "conv4_7/expand/bn"
  1557. top: "conv4_7/expand/bn"
  1558. param {
  1559. lr_mult: 1
  1560. decay_mult: 0
  1561. }
  1562. param {
  1563. lr_mult: 1
  1564. decay_mult: 0
  1565. }
  1566. scale_param {
  1567. bias_term: true
  1568. }
  1569. }
  1570. layer {
  1571. name: "relu4_7/expand"
  1572. type: "ReLU"
  1573. bottom: "conv4_7/expand/bn"
  1574. top: "conv4_7/expand/bn"
  1575. }
  1576. layer {
  1577. name: "conv4_7/dwise"
  1578. type: "ConvolutionDepthwise"
  1579. bottom: "conv4_7/expand/bn"
  1580. top: "conv4_7/dwise"
  1581. param {
  1582. lr_mult: 1
  1583. decay_mult: 1
  1584. }
  1585. convolution_param {
  1586. num_output: 384
  1587. bias_term: false
  1588. pad: 1
  1589. kernel_size: 3
  1590. group: 384
  1591. stride: 2
  1592. weight_filler {
  1593. type: "msra"
  1594. }
  1595. engine: CAFFE
  1596. }
  1597. }
  1598. layer {
  1599. name: "conv4_7/dwise/bn"
  1600. type: "BatchNorm"
  1601. bottom: "conv4_7/dwise"
  1602. top: "conv4_7/dwise/bn"
  1603.  
  1604. }
  1605. layer {
  1606. name: "conv4_7/dwise/scale"
  1607. type: "Scale"
  1608. bottom: "conv4_7/dwise/bn"
  1609. top: "conv4_7/dwise/bn"
  1610. param {
  1611. lr_mult: 1
  1612. decay_mult: 0
  1613. }
  1614. param {
  1615. lr_mult: 1
  1616. decay_mult: 0
  1617. }
  1618. scale_param {
  1619. bias_term: true
  1620. }
  1621. }
  1622. layer {
  1623. name: "relu4_7/dwise"
  1624. type: "ReLU"
  1625. bottom: "conv4_7/dwise/bn"
  1626. top: "conv4_7/dwise/bn"
  1627. }
  1628. layer {
  1629. name: "conv4_7/linear"
  1630. type: "Convolution"
  1631. bottom: "conv4_7/dwise/bn"
  1632. top: "conv4_7/linear"
  1633. param {
  1634. lr_mult: 1
  1635. decay_mult: 1
  1636. }
  1637. convolution_param {
  1638. num_output: 96
  1639. bias_term: false
  1640. kernel_size: 1
  1641. weight_filler {
  1642. type: "msra"
  1643. }
  1644. }
  1645. }
  1646. layer {
  1647. name: "conv4_7/linear/bn"
  1648. type: "BatchNorm"
  1649. bottom: "conv4_7/linear"
  1650. top: "conv4_7/linear/bn"
  1651.  
  1652. }
  1653. layer {
  1654. name: "conv4_7/linear/scale"
  1655. type: "Scale"
  1656. bottom: "conv4_7/linear/bn"
  1657. top: "conv4_7/linear/bn"
  1658. param {
  1659. lr_mult: 1
  1660. decay_mult: 0
  1661. }
  1662. param {
  1663. lr_mult: 1
  1664. decay_mult: 0
  1665. }
  1666. scale_param {
  1667. bias_term: true
  1668. }
  1669. }
  1670. layer {
  1671. name: "conv5_1/expand"
  1672. type: "Convolution"
  1673. bottom: "conv4_7/linear/bn"
  1674. top: "conv5_1/expand"
  1675. param {
  1676. lr_mult: 1
  1677. decay_mult: 1
  1678. }
  1679. convolution_param {
  1680. num_output: 576
  1681. bias_term: false
  1682. kernel_size: 1
  1683. weight_filler {
  1684. type: "msra"
  1685. }
  1686. }
  1687. }
  1688. layer {
  1689. name: "conv5_1/expand/bn"
  1690. type: "BatchNorm"
  1691. bottom: "conv5_1/expand"
  1692. top: "conv5_1/expand/bn"
  1693.  
  1694. }
  1695. layer {
  1696. name: "conv5_1/expand/scale"
  1697. type: "Scale"
  1698. bottom: "conv5_1/expand/bn"
  1699. top: "conv5_1/expand/bn"
  1700. param {
  1701. lr_mult: 1
  1702. decay_mult: 0
  1703. }
  1704. param {
  1705. lr_mult: 1
  1706. decay_mult: 0
  1707. }
  1708. scale_param {
  1709. bias_term: true
  1710. }
  1711. }
  1712. layer {
  1713. name: "relu5_1/expand"
  1714. type: "ReLU"
  1715. bottom: "conv5_1/expand/bn"
  1716. top: "conv5_1/expand/bn"
  1717. }
  1718. layer {
  1719. name: "conv5_1/dwise"
  1720. type: "ConvolutionDepthwise"
  1721. bottom: "conv5_1/expand/bn"
  1722. top: "conv5_1/dwise"
  1723. param {
  1724. lr_mult: 1
  1725. decay_mult: 1
  1726. }
  1727. convolution_param {
  1728. num_output: 576
  1729. bias_term: false
  1730. pad: 1
  1731. kernel_size: 3
  1732. group: 576
  1733. weight_filler {
  1734. type: "msra"
  1735. }
  1736. engine: CAFFE
  1737. }
  1738. }
  1739. layer {
  1740. name: "conv5_1/dwise/bn"
  1741. type: "BatchNorm"
  1742. bottom: "conv5_1/dwise"
  1743. top: "conv5_1/dwise/bn"
  1744.  
  1745. }
  1746. layer {
  1747. name: "conv5_1/dwise/scale"
  1748. type: "Scale"
  1749. bottom: "conv5_1/dwise/bn"
  1750. top: "conv5_1/dwise/bn"
  1751. param {
  1752. lr_mult: 1
  1753. decay_mult: 0
  1754. }
  1755. param {
  1756. lr_mult: 1
  1757. decay_mult: 0
  1758. }
  1759. scale_param {
  1760. bias_term: true
  1761. }
  1762. }
  1763. layer {
  1764. name: "relu5_1/dwise"
  1765. type: "ReLU"
  1766. bottom: "conv5_1/dwise/bn"
  1767. top: "conv5_1/dwise/bn"
  1768. }
  1769. layer {
  1770. name: "conv5_1/linear"
  1771. type: "Convolution"
  1772. bottom: "conv5_1/dwise/bn"
  1773. top: "conv5_1/linear"
  1774. param {
  1775. lr_mult: 1
  1776. decay_mult: 1
  1777. }
  1778. convolution_param {
  1779. num_output: 96
  1780. bias_term: false
  1781. kernel_size: 1
  1782. weight_filler {
  1783. type: "msra"
  1784. }
  1785. }
  1786. }
  1787. layer {
  1788. name: "conv5_1/linear/bn"
  1789. type: "BatchNorm"
  1790. bottom: "conv5_1/linear"
  1791. top: "conv5_1/linear/bn"
  1792.  
  1793. }
  1794. layer {
  1795. name: "conv5_1/linear/scale"
  1796. type: "Scale"
  1797. bottom: "conv5_1/linear/bn"
  1798. top: "conv5_1/linear/bn"
  1799. param {
  1800. lr_mult: 1
  1801. decay_mult: 0
  1802. }
  1803. param {
  1804. lr_mult: 1
  1805. decay_mult: 0
  1806. }
  1807. scale_param {
  1808. bias_term: true
  1809. }
  1810. }
  1811. layer {
  1812. name: "block_5_1"
  1813. type: "Eltwise"
  1814. bottom: "conv4_7/linear/bn"
  1815. bottom: "conv5_1/linear/bn"
  1816. top: "block_5_1"
  1817. }
  1818. layer {
  1819. name: "conv5_2/expand"
  1820. type: "Convolution"
  1821. bottom: "block_5_1"
  1822. top: "conv5_2/expand"
  1823. param {
  1824. lr_mult: 1
  1825. decay_mult: 1
  1826. }
  1827. convolution_param {
  1828. num_output: 576
  1829. bias_term: false
  1830. kernel_size: 1
  1831. weight_filler {
  1832. type: "msra"
  1833. }
  1834. }
  1835. }
  1836. layer {
  1837. name: "conv5_2/expand/bn"
  1838. type: "BatchNorm"
  1839. bottom: "conv5_2/expand"
  1840. top: "conv5_2/expand/bn"
  1841.  
  1842. }
  1843. layer {
  1844. name: "conv5_2/expand/scale"
  1845. type: "Scale"
  1846. bottom: "conv5_2/expand/bn"
  1847. top: "conv5_2/expand/bn"
  1848. param {
  1849. lr_mult: 1
  1850. decay_mult: 0
  1851. }
  1852. param {
  1853. lr_mult: 1
  1854. decay_mult: 0
  1855. }
  1856. scale_param {
  1857. bias_term: true
  1858. }
  1859. }
  1860. layer {
  1861. name: "relu5_2/expand"
  1862. type: "ReLU"
  1863. bottom: "conv5_2/expand/bn"
  1864. top: "conv5_2/expand/bn"
  1865. }
  1866. layer {
  1867. name: "conv5_2/dwise"
  1868. type: "ConvolutionDepthwise"
  1869. bottom: "conv5_2/expand/bn"
  1870. top: "conv5_2/dwise"
  1871. param {
  1872. lr_mult: 1
  1873. decay_mult: 1
  1874. }
  1875. convolution_param {
  1876. num_output: 576
  1877. bias_term: false
  1878. pad: 1
  1879. kernel_size: 3
  1880. group: 576
  1881. weight_filler {
  1882. type: "msra"
  1883. }
  1884. engine: CAFFE
  1885. }
  1886. }
  1887. layer {
  1888. name: "conv5_2/dwise/bn"
  1889. type: "BatchNorm"
  1890. bottom: "conv5_2/dwise"
  1891. top: "conv5_2/dwise/bn"
  1892.  
  1893. }
  1894. layer {
  1895. name: "conv5_2/dwise/scale"
  1896. type: "Scale"
  1897. bottom: "conv5_2/dwise/bn"
  1898. top: "conv5_2/dwise/bn"
  1899. param {
  1900. lr_mult: 1
  1901. decay_mult: 0
  1902. }
  1903. param {
  1904. lr_mult: 1
  1905. decay_mult: 0
  1906. }
  1907. scale_param {
  1908. bias_term: true
  1909. }
  1910. }
  1911. layer {
  1912. name: "relu5_2/dwise"
  1913. type: "ReLU"
  1914. bottom: "conv5_2/dwise/bn"
  1915. top: "conv5_2/dwise/bn"
  1916. }
  1917. layer {
  1918. name: "conv5_2/linear"
  1919. type: "Convolution"
  1920. bottom: "conv5_2/dwise/bn"
  1921. top: "conv5_2/linear"
  1922. param {
  1923. lr_mult: 1
  1924. decay_mult: 1
  1925. }
  1926. convolution_param {
  1927. num_output: 96
  1928. bias_term: false
  1929. kernel_size: 1
  1930. weight_filler {
  1931. type: "msra"
  1932. }
  1933. }
  1934. }
  1935. layer {
  1936. name: "conv5_2/linear/bn"
  1937. type: "BatchNorm"
  1938. bottom: "conv5_2/linear"
  1939. top: "conv5_2/linear/bn"
  1940.  
  1941. }
  1942. layer {
  1943. name: "conv5_2/linear/scale"
  1944. type: "Scale"
  1945. bottom: "conv5_2/linear/bn"
  1946. top: "conv5_2/linear/bn"
  1947. param {
  1948. lr_mult: 1
  1949. decay_mult: 0
  1950. }
  1951. param {
  1952. lr_mult: 1
  1953. decay_mult: 0
  1954. }
  1955. scale_param {
  1956. bias_term: true
  1957. }
  1958. }
  1959. layer {
  1960. name: "block_5_2"
  1961. type: "Eltwise"
  1962. bottom: "block_5_1"
  1963. bottom: "conv5_2/linear/bn"
  1964. top: "block_5_2"
  1965. }
  1966. layer {
  1967. name: "conv5_3/expand"
  1968. type: "Convolution"
  1969. bottom: "block_5_2"
  1970. top: "conv5_3/expand"
  1971. param {
  1972. lr_mult: 1
  1973. decay_mult: 1
  1974. }
  1975. convolution_param {
  1976. num_output: 576
  1977. bias_term: false
  1978. kernel_size: 1
  1979. weight_filler {
  1980. type: "msra"
  1981. }
  1982. }
  1983. }
  1984. layer {
  1985. name: "conv5_3/expand/bn"
  1986. type: "BatchNorm"
  1987. bottom: "conv5_3/expand"
  1988. top: "conv5_3/expand/bn"
  1989.  
  1990. }
  1991. layer {
  1992. name: "conv5_3/expand/scale"
  1993. type: "Scale"
  1994. bottom: "conv5_3/expand/bn"
  1995. top: "conv5_3/expand/bn"
  1996. param {
  1997. lr_mult: 1
  1998. decay_mult: 0
  1999. }
  2000. param {
  2001. lr_mult: 1
  2002. decay_mult: 0
  2003. }
  2004. scale_param {
  2005. bias_term: true
  2006. }
  2007. }
  2008. layer {
  2009. name: "relu5_3/expand"
  2010. type: "ReLU"
  2011. bottom: "conv5_3/expand/bn"
  2012. top: "conv5_3/expand/bn"
  2013. }
  2014. layer {
  2015. name: "conv5_3/dwise"
  2016. type: "ConvolutionDepthwise"
  2017. bottom: "conv5_3/expand/bn"
  2018. top: "conv5_3/dwise"
  2019. param {
  2020. lr_mult: 1
  2021. decay_mult: 1
  2022. }
  2023. convolution_param {
  2024. num_output: 576
  2025. bias_term: false
  2026. pad: 1
  2027. kernel_size: 3
  2028. group: 576
  2029. stride: 2
  2030. weight_filler {
  2031. type: "msra"
  2032. }
  2033. engine: CAFFE
  2034. }
  2035. }
  2036. layer {
  2037. name: "conv5_3/dwise/bn"
  2038. type: "BatchNorm"
  2039. bottom: "conv5_3/dwise"
  2040. top: "conv5_3/dwise/bn"
  2041.  
  2042. }
  2043. layer {
  2044. name: "conv5_3/dwise/scale"
  2045. type: "Scale"
  2046. bottom: "conv5_3/dwise/bn"
  2047. top: "conv5_3/dwise/bn"
  2048. param {
  2049. lr_mult: 1
  2050. decay_mult: 0
  2051. }
  2052. param {
  2053. lr_mult: 1
  2054. decay_mult: 0
  2055. }
  2056. scale_param {
  2057. bias_term: true
  2058. }
  2059. }
  2060. layer {
  2061. name: "relu5_3/dwise"
  2062. type: "ReLU"
  2063. bottom: "conv5_3/dwise/bn"
  2064. top: "conv5_3/dwise/bn"
  2065. }
  2066. layer {
  2067. name: "conv5_3/linear"
  2068. type: "Convolution"
  2069. bottom: "conv5_3/dwise/bn"
  2070. top: "conv5_3/linear"
  2071. param {
  2072. lr_mult: 1
  2073. decay_mult: 1
  2074. }
  2075. convolution_param {
  2076. num_output: 160
  2077. bias_term: false
  2078. kernel_size: 1
  2079. weight_filler {
  2080. type: "msra"
  2081. }
  2082. }
  2083. }
  2084. layer {
  2085. name: "conv5_3/linear/bn"
  2086. type: "BatchNorm"
  2087. bottom: "conv5_3/linear"
  2088. top: "conv5_3/linear/bn"
  2089.  
  2090. }
  2091. layer {
  2092. name: "conv5_3/linear/scale"
  2093. type: "Scale"
  2094. bottom: "conv5_3/linear/bn"
  2095. top: "conv5_3/linear/bn"
  2096. param {
  2097. lr_mult: 1
  2098. decay_mult: 0
  2099. }
  2100. param {
  2101. lr_mult: 1
  2102. decay_mult: 0
  2103. }
  2104. scale_param {
  2105. bias_term: true
  2106. }
  2107. }
  2108. layer {
  2109. name: "conv6_1/expand"
  2110. type: "Convolution"
  2111. bottom: "conv5_3/linear/bn"
  2112. top: "conv6_1/expand"
  2113. param {
  2114. lr_mult: 1
  2115. decay_mult: 1
  2116. }
  2117. convolution_param {
  2118. num_output: 960
  2119. bias_term: false
  2120. kernel_size: 1
  2121. weight_filler {
  2122. type: "msra"
  2123. }
  2124. }
  2125. }
  2126. layer {
  2127. name: "conv6_1/expand/bn"
  2128. type: "BatchNorm"
  2129. bottom: "conv6_1/expand"
  2130. top: "conv6_1/expand/bn"
  2131.  
  2132. }
  2133. layer {
  2134. name: "conv6_1/expand/scale"
  2135. type: "Scale"
  2136. bottom: "conv6_1/expand/bn"
  2137. top: "conv6_1/expand/bn"
  2138. param {
  2139. lr_mult: 1
  2140. decay_mult: 0
  2141. }
  2142. param {
  2143. lr_mult: 1
  2144. decay_mult: 0
  2145. }
  2146. scale_param {
  2147. bias_term: true
  2148. }
  2149. }
  2150. layer {
  2151. name: "relu6_1/expand"
  2152. type: "ReLU"
  2153. bottom: "conv6_1/expand/bn"
  2154. top: "conv6_1/expand/bn"
  2155. }
  2156. layer {
  2157. name: "conv6_1/dwise"
  2158. type: "ConvolutionDepthwise"
  2159. bottom: "conv6_1/expand/bn"
  2160. top: "conv6_1/dwise"
  2161. param {
  2162. lr_mult: 1
  2163. decay_mult: 1
  2164. }
  2165. convolution_param {
  2166. num_output: 960
  2167. bias_term: false
  2168. pad: 1
  2169. kernel_size: 3
  2170. group: 960
  2171. weight_filler {
  2172. type: "msra"
  2173. }
  2174. engine: CAFFE
  2175. }
  2176. }
  2177. layer {
  2178. name: "conv6_1/dwise/bn"
  2179. type: "BatchNorm"
  2180. bottom: "conv6_1/dwise"
  2181. top: "conv6_1/dwise/bn"
  2182.  
  2183. }
  2184. layer {
  2185. name: "conv6_1/dwise/scale"
  2186. type: "Scale"
  2187. bottom: "conv6_1/dwise/bn"
  2188. top: "conv6_1/dwise/bn"
  2189. param {
  2190. lr_mult: 1
  2191. decay_mult: 0
  2192. }
  2193. param {
  2194. lr_mult: 1
  2195. decay_mult: 0
  2196. }
  2197. scale_param {
  2198. bias_term: true
  2199. }
  2200. }
  2201. layer {
  2202. name: "relu6_1/dwise"
  2203. type: "ReLU"
  2204. bottom: "conv6_1/dwise/bn"
  2205. top: "conv6_1/dwise/bn"
  2206. }
  2207. layer {
  2208. name: "conv6_1/linear"
  2209. type: "Convolution"
  2210. bottom: "conv6_1/dwise/bn"
  2211. top: "conv6_1/linear"
  2212. param {
  2213. lr_mult: 1
  2214. decay_mult: 1
  2215. }
  2216. convolution_param {
  2217. num_output: 160
  2218. bias_term: false
  2219. kernel_size: 1
  2220. weight_filler {
  2221. type: "msra"
  2222. }
  2223. }
  2224. }
  2225. layer {
  2226. name: "conv6_1/linear/bn"
  2227. type: "BatchNorm"
  2228. bottom: "conv6_1/linear"
  2229. top: "conv6_1/linear/bn"
  2230.  
  2231. }
  2232. layer {
  2233. name: "conv6_1/linear/scale"
  2234. type: "Scale"
  2235. bottom: "conv6_1/linear/bn"
  2236. top: "conv6_1/linear/bn"
  2237. param {
  2238. lr_mult: 1
  2239. decay_mult: 0
  2240. }
  2241. param {
  2242. lr_mult: 1
  2243. decay_mult: 0
  2244. }
  2245. scale_param {
  2246. bias_term: true
  2247. }
  2248. }
  2249. layer {
  2250. name: "block_6_1"
  2251. type: "Eltwise"
  2252. bottom: "conv5_3/linear/bn"
  2253. bottom: "conv6_1/linear/bn"
  2254. top: "block_6_1"
  2255. }
  2256. layer {
  2257. name: "conv6_2/expand"
  2258. type: "Convolution"
  2259. bottom: "block_6_1"
  2260. top: "conv6_2/expand"
  2261. param {
  2262. lr_mult: 1
  2263. decay_mult: 1
  2264. }
  2265. convolution_param {
  2266. num_output: 960
  2267. bias_term: false
  2268. kernel_size: 1
  2269. weight_filler {
  2270. type: "msra"
  2271. }
  2272. }
  2273. }
  2274. layer {
  2275. name: "conv6_2/expand/bn"
  2276. type: "BatchNorm"
  2277. bottom: "conv6_2/expand"
  2278. top: "conv6_2/expand/bn"
  2279.  
  2280. }
  2281. layer {
  2282. name: "conv6_2/expand/scale"
  2283. type: "Scale"
  2284. bottom: "conv6_2/expand/bn"
  2285. top: "conv6_2/expand/bn"
  2286. param {
  2287. lr_mult: 1
  2288. decay_mult: 0
  2289. }
  2290. param {
  2291. lr_mult: 1
  2292. decay_mult: 0
  2293. }
  2294. scale_param {
  2295. bias_term: true
  2296. }
  2297. }
  2298. layer {
  2299. name: "relu6_2/expand"
  2300. type: "ReLU"
  2301. bottom: "conv6_2/expand/bn"
  2302. top: "conv6_2/expand/bn"
  2303. }
  2304. layer {
  2305. name: "conv6_2/dwise"
  2306. type: "ConvolutionDepthwise"
  2307. bottom: "conv6_2/expand/bn"
  2308. top: "conv6_2/dwise"
  2309. param {
  2310. lr_mult: 1
  2311. decay_mult: 1
  2312. }
  2313. convolution_param {
  2314. num_output: 960
  2315. bias_term: false
  2316. pad: 1
  2317. kernel_size: 3
  2318. group: 960
  2319. weight_filler {
  2320. type: "msra"
  2321. }
  2322. engine: CAFFE
  2323. }
  2324. }
  2325. layer {
  2326. name: "conv6_2/dwise/bn"
  2327. type: "BatchNorm"
  2328. bottom: "conv6_2/dwise"
  2329. top: "conv6_2/dwise/bn"
  2330.  
  2331. }
  2332. layer {
  2333. name: "conv6_2/dwise/scale"
  2334. type: "Scale"
  2335. bottom: "conv6_2/dwise/bn"
  2336. top: "conv6_2/dwise/bn"
  2337. param {
  2338. lr_mult: 1
  2339. decay_mult: 0
  2340. }
  2341. param {
  2342. lr_mult: 1
  2343. decay_mult: 0
  2344. }
  2345. scale_param {
  2346. bias_term: true
  2347. }
  2348. }
  2349. layer {
  2350. name: "relu6_2/dwise"
  2351. type: "ReLU"
  2352. bottom: "conv6_2/dwise/bn"
  2353. top: "conv6_2/dwise/bn"
  2354. }
  2355. layer {
  2356. name: "conv6_2/linear"
  2357. type: "Convolution"
  2358. bottom: "conv6_2/dwise/bn"
  2359. top: "conv6_2/linear"
  2360. param {
  2361. lr_mult: 1
  2362. decay_mult: 1
  2363. }
  2364. convolution_param {
  2365. num_output: 160
  2366. bias_term: false
  2367. kernel_size: 1
  2368. weight_filler {
  2369. type: "msra"
  2370. }
  2371. }
  2372. }
  2373. layer {
  2374. name: "conv6_2/linear/bn"
  2375. type: "BatchNorm"
  2376. bottom: "conv6_2/linear"
  2377. top: "conv6_2/linear/bn"
  2378.  
  2379. }
  2380. layer {
  2381. name: "conv6_2/linear/scale"
  2382. type: "Scale"
  2383. bottom: "conv6_2/linear/bn"
  2384. top: "conv6_2/linear/bn"
  2385. param {
  2386. lr_mult: 1
  2387. decay_mult: 0
  2388. }
  2389. param {
  2390. lr_mult: 1
  2391. decay_mult: 0
  2392. }
  2393. scale_param {
  2394. bias_term: true
  2395. }
  2396. }
  2397. layer {
  2398. name: "block_6_2"
  2399. type: "Eltwise"
  2400. bottom: "block_6_1"
  2401. bottom: "conv6_2/linear/bn"
  2402. top: "block_6_2"
  2403. }
  2404. ########### below modification for ssd: stride 2 in 6_3
  2405.  
  2406. layer {
  2407. name: "conv6_3/expand"
  2408. type: "Convolution"
  2409. bottom: "block_6_2"
  2410. top: "conv6_3/expand"
  2411. param {
  2412. lr_mult: 1
  2413. decay_mult: 1
  2414. }
  2415. convolution_param {
  2416. num_output: 960
  2417. bias_term: false
  2418. kernel_size: 1
  2419. weight_filler {
  2420. type: "msra"
  2421. }
  2422. }
  2423. }
  2424. layer {
  2425. name: "conv6_3/expand/bn"
  2426. type: "BatchNorm"
  2427. bottom: "conv6_3/expand"
  2428. top: "conv6_3/expand/bn"
  2429.  
  2430. }
  2431. layer {
  2432. name: "conv6_3/expand/scale"
  2433. type: "Scale"
  2434. bottom: "conv6_3/expand/bn"
  2435. top: "conv6_3/expand/bn"
  2436. param {
  2437. lr_mult: 1
  2438. decay_mult: 0
  2439. }
  2440. param {
  2441. lr_mult: 1
  2442. decay_mult: 0
  2443. }
  2444. scale_param {
  2445. bias_term: true
  2446. }
  2447. }
  2448. layer {
  2449. name: "relu6_3/expand"
  2450. type: "ReLU"
  2451. bottom: "conv6_3/expand/bn"
  2452. top: "conv6_3/expand/bn"
  2453. }
  2454. layer {
  2455. name: "conv6_3/dwise"
  2456. type: "ConvolutionDepthwise"
  2457. bottom: "conv6_3/expand/bn"
  2458. top: "conv6_3/dwise"
  2459. param {
  2460. lr_mult: 1
  2461. decay_mult: 1
  2462. }
  2463. convolution_param {
  2464. num_output: 960
  2465. bias_term: false
  2466. pad: 1
  2467. kernel_size: 3
  2468. stride: 2
  2469. group: 960
  2470. weight_filler {
  2471. type: "msra"
  2472. }
  2473. engine: CAFFE
  2474. }
  2475. }
  2476. layer {
  2477. name: "conv6_3/dwise/bn"
  2478. type: "BatchNorm"
  2479. bottom: "conv6_3/dwise"
  2480. top: "conv6_3/dwise/bn"
  2481.  
  2482. }
  2483. layer {
  2484. name: "conv6_3/dwise/scale"
  2485. type: "Scale"
  2486. bottom: "conv6_3/dwise/bn"
  2487. top: "conv6_3/dwise/bn"
  2488. param {
  2489. lr_mult: 1
  2490. decay_mult: 0
  2491. }
  2492. param {
  2493. lr_mult: 1
  2494. decay_mult: 0
  2495. }
  2496. scale_param {
  2497. bias_term: true
  2498. }
  2499. }
  2500. layer {
  2501. name: "relu6_3/dwise"
  2502. type: "ReLU"
  2503. bottom: "conv6_3/dwise/bn"
  2504. top: "conv6_3/dwise/bn"
  2505. }
  2506. layer {
  2507. name: "conv6_3/linear"
  2508. type: "Convolution"
  2509. bottom: "conv6_3/dwise/bn"
  2510. top: "conv6_3/linear"
  2511. param {
  2512. lr_mult: 1
  2513. decay_mult: 1
  2514. }
  2515. convolution_param {
  2516. num_output: 320
  2517. bias_term: false
  2518. kernel_size: 1
  2519. weight_filler {
  2520. type: "msra"
  2521. }
  2522. }
  2523. }
  2524. layer {
  2525. name: "conv6_3/linear/bn"
  2526. type: "BatchNorm"
  2527. bottom: "conv6_3/linear"
  2528. top: "conv6_3/linear/bn"
  2529.  
  2530. }
  2531. layer {
  2532. name: "conv6_3/linear/scale"
  2533. type: "Scale"
  2534. bottom: "conv6_3/linear/bn"
  2535. top: "conv6_3/linear/bn"
  2536. param {
  2537. lr_mult: 1
  2538. decay_mult: 0
  2539. }
  2540. param {
  2541. lr_mult: 1
  2542. decay_mult: 0
  2543. }
  2544. scale_param {
  2545. bias_term: true
  2546. }
  2547. }
  2548. ##### below change all the end for ssd: add 6_4 and 6_5 on the model of bottleneck
  2549. layer {
  2550. name: "conv6_4/expand"
  2551. type: "Convolution"
  2552. bottom: "conv6_3/linear/bn"
  2553. top: "conv6_4/expand"
  2554. param {
  2555. lr_mult: 1
  2556. decay_mult: 1
  2557. }
  2558. convolution_param {
  2559. num_output: 1920
  2560. bias_term: false
  2561. kernel_size: 1
  2562. weight_filler {
  2563. type: "msra"
  2564. }
  2565. }
  2566. }
  2567. layer {
  2568. name: "conv6_4/expand/bn"
  2569. type: "BatchNorm"
  2570. bottom: "conv6_4/expand"
  2571. top: "conv6_4/expand/bn"
  2572.  
  2573. }
  2574. layer {
  2575. name: "conv6_4/expand/scale"
  2576. type: "Scale"
  2577. bottom: "conv6_4/expand/bn"
  2578. top: "conv6_4/expand/bn"
  2579. param {
  2580. lr_mult: 1
  2581. decay_mult: 0
  2582. }
  2583. param {
  2584. lr_mult: 1
  2585. decay_mult: 0
  2586. }
  2587. scale_param {
  2588. bias_term: true
  2589. }
  2590. }
  2591. layer {
  2592. name: "relu6_4/expand"
  2593. type: "ReLU"
  2594. bottom: "conv6_4/expand/bn"
  2595. top: "conv6_4/expand/bn"
  2596. }
  2597. layer {
  2598. name: "conv6_4/dwise"
  2599. type: "ConvolutionDepthwise"
  2600. bottom: "conv6_4/expand/bn"
  2601. top: "conv6_4/dwise"
  2602. param {
  2603. lr_mult: 1
  2604. decay_mult: 1
  2605. }
  2606. convolution_param {
  2607. num_output: 1920
  2608. bias_term: false
  2609. pad: 1
  2610. kernel_size: 3
  2611. group: 1920
  2612. weight_filler {
  2613. type: "msra"
  2614. }
  2615. engine: CAFFE
  2616. }
  2617. }
  2618. layer {
  2619. name: "conv6_4/dwise/bn"
  2620. type: "BatchNorm"
  2621. bottom: "conv6_4/dwise"
  2622. top: "conv6_4/dwise/bn"
  2623.  
  2624. }
  2625. layer {
  2626. name: "conv6_4/dwise/scale"
  2627. type: "Scale"
  2628. bottom: "conv6_4/dwise/bn"
  2629. top: "conv6_4/dwise/bn"
  2630. param {
  2631. lr_mult: 1
  2632. decay_mult: 0
  2633. }
  2634. param {
  2635. lr_mult: 1
  2636. decay_mult: 0
  2637. }
  2638. scale_param {
  2639. bias_term: true
  2640. }
  2641. }
  2642. layer {
  2643. name: "relu6_4/dwise"
  2644. type: "ReLU"
  2645. bottom: "conv6_4/dwise/bn"
  2646. top: "conv6_4/dwise/bn"
  2647. }
  2648. layer {
  2649. name: "conv6_4/linear"
  2650. type: "Convolution"
  2651. bottom: "conv6_4/dwise/bn"
  2652. top: "conv6_4/linear"
  2653. param {
  2654. lr_mult: 1
  2655. decay_mult: 1
  2656. }
  2657. convolution_param {
  2658. num_output: 320
  2659. bias_term: false
  2660. kernel_size: 1
  2661. weight_filler {
  2662. type: "msra"
  2663. }
  2664. }
  2665. }
  2666. layer {
  2667. name: "conv6_4/linear/bn"
  2668. type: "BatchNorm"
  2669. bottom: "conv6_4/linear"
  2670. top: "conv6_4/linear/bn"
  2671.  
  2672. }
  2673. layer {
  2674. name: "conv6_4/linear/scale"
  2675. type: "Scale"
  2676. bottom: "conv6_4/linear/bn"
  2677. top: "conv6_4/linear/bn"
  2678. param {
  2679. lr_mult: 1
  2680. decay_mult: 0
  2681. }
  2682. param {
  2683. lr_mult: 1
  2684. decay_mult: 0
  2685. }
  2686. scale_param {
  2687. bias_term: true
  2688. }
  2689. }
  2690. layer {
  2691. name: "block_6_4"
  2692. type: "Eltwise"
  2693. bottom: "conv6_3/linear/bn"
  2694. bottom: "conv6_4/linear/bn"
  2695. top: "block_6_4"
  2696. }
  2697. layer {
  2698. name: "conv6_5/expand"
  2699. type: "Convolution"
  2700. bottom: "block_6_4"
  2701. top: "conv6_5/expand"
  2702. param {
  2703. lr_mult: 1
  2704. decay_mult: 1
  2705. }
  2706. convolution_param {
  2707. num_output: 1920
  2708. bias_term: false
  2709. kernel_size: 1
  2710. weight_filler {
  2711. type: "msra"
  2712. }
  2713. }
  2714. }
  2715. layer {
  2716. name: "conv6_5/expand/bn"
  2717. type: "BatchNorm"
  2718. bottom: "conv6_5/expand"
  2719. top: "conv6_5/expand/bn"
  2720.  
  2721. }
  2722. layer {
  2723. name: "conv6_5/expand/scale"
  2724. type: "Scale"
  2725. bottom: "conv6_5/expand/bn"
  2726. top: "conv6_5/expand/bn"
  2727. param {
  2728. lr_mult: 1
  2729. decay_mult: 0
  2730. }
  2731. param {
  2732. lr_mult: 1
  2733. decay_mult: 0
  2734. }
  2735. scale_param {
  2736. bias_term: true
  2737. }
  2738. }
  2739. layer {
  2740. name: "relu6_5/expand"
  2741. type: "ReLU"
  2742. bottom: "conv6_5/expand/bn"
  2743. top: "conv6_5/expand/bn"
  2744. }
  2745. layer {
  2746. name: "conv6_5/dwise"
  2747. type: "ConvolutionDepthwise"
  2748. bottom: "conv6_5/expand/bn"
  2749. top: "conv6_5/dwise"
  2750. param {
  2751. lr_mult: 1
  2752. decay_mult: 1
  2753. }
  2754. convolution_param {
  2755. num_output: 1920
  2756. bias_term: false
  2757. pad: 1
  2758. kernel_size: 3
  2759. group: 1920
  2760. weight_filler {
  2761. type: "msra"
  2762. }
  2763. engine: CAFFE
  2764. }
  2765. }
  2766. layer {
  2767. name: "conv6_5/dwise/bn"
  2768. type: "BatchNorm"
  2769. bottom: "conv6_5/dwise"
  2770. top: "conv6_5/dwise/bn"
  2771.  
  2772. }
  2773. layer {
  2774. name: "conv6_5/dwise/scale"
  2775. type: "Scale"
  2776. bottom: "conv6_5/dwise/bn"
  2777. top: "conv6_5/dwise/bn"
  2778. param {
  2779. lr_mult: 1
  2780. decay_mult: 0
  2781. }
  2782. param {
  2783. lr_mult: 1
  2784. decay_mult: 0
  2785. }
  2786. scale_param {
  2787. bias_term: true
  2788. }
  2789. }
  2790. layer {
  2791. name: "relu6_5/dwise"
  2792. type: "ReLU"
  2793. bottom: "conv6_5/dwise/bn"
  2794. top: "conv6_5/dwise/bn"
  2795. }
  2796. layer {
  2797. name: "conv6_5/linear"
  2798. type: "Convolution"
  2799. bottom: "conv6_5/dwise/bn"
  2800. top: "conv6_5/linear"
  2801. param {
  2802. lr_mult: 1
  2803. decay_mult: 1
  2804. }
  2805. convolution_param {
  2806. num_output: 320
  2807. bias_term: false
  2808. kernel_size: 1
  2809. weight_filler {
  2810. type: "msra"
  2811. }
  2812. }
  2813. }
  2814. layer {
  2815. name: "conv6_5/linear/bn"
  2816. type: "BatchNorm"
  2817. bottom: "conv6_5/linear"
  2818. top: "conv6_5/linear/bn"
  2819.  
  2820. }
  2821. layer {
  2822. name: "conv6_5/linear/scale"
  2823. type: "Scale"
  2824. bottom: "conv6_5/linear/bn"
  2825. top: "conv6_5/linear/bn"
  2826. param {
  2827. lr_mult: 1
  2828. decay_mult: 0
  2829. }
  2830. param {
  2831. lr_mult: 1
  2832. decay_mult: 0
  2833. }
  2834. scale_param {
  2835. bias_term: true
  2836. }
  2837. }
  2838. layer {
  2839. name: "block_6_5"
  2840. type: "Eltwise"
  2841. bottom: "block_6_4"
  2842. bottom: "conv6_5/linear/bn"
  2843. top: "block_6_5"
  2844. }
  2845.  
  2846.  
  2847. ##### as well as 7_1 and 7_2 and 7_3 : bottlenecks
  2848.  
  2849.  
  2850.  
  2851. layer {
  2852. name: "conv7_1/expand"
  2853. type: "Convolution"
  2854. bottom: "block_6_5"
  2855. top: "conv7_1/expand"
  2856. param {
  2857. lr_mult: 1
  2858. decay_mult: 1
  2859. }
  2860. convolution_param {
  2861. num_output: 1920
  2862. bias_term: false
  2863. kernel_size: 1
  2864. weight_filler {
  2865. type: "msra"
  2866. }
  2867. }
  2868. }
  2869.  
  2870.  
  2871. layer {
  2872. name: "conv7_1/expand/bn"
  2873. type: "BatchNorm"
  2874. bottom: "conv7_1/expand"
  2875. top: "conv7_1/expand/bn"
  2876.  
  2877. }
  2878. layer {
  2879. name: "conv7_1/expand/scale"
  2880. type: "Scale"
  2881. bottom: "conv7_1/expand/bn"
  2882. top: "conv7_1/expand/bn"
  2883. param {
  2884. lr_mult: 1
  2885. decay_mult: 0
  2886. }
  2887. param {
  2888. lr_mult: 1
  2889. decay_mult: 0
  2890. }
  2891. scale_param {
  2892. bias_term: true
  2893. }
  2894. }
  2895. layer {
  2896. name: "relu7_1/expand"
  2897. type: "ReLU"
  2898. bottom: "conv7_1/expand/bn"
  2899. top: "conv7_1/expand/bn"
  2900. }
  2901. layer {
  2902. name: "conv7_1/dwise"
  2903. type: "ConvolutionDepthwise"
  2904. bottom: "conv7_1/expand/bn"
  2905. top: "conv7_1/dwise"
  2906. param {
  2907. lr_mult: 1
  2908. decay_mult: 1
  2909. }
  2910. convolution_param {
  2911. num_output: 1920
  2912. bias_term: false
  2913. pad: 1
  2914. kernel_size: 3
  2915. stride: 2
  2916. group: 1920
  2917. weight_filler {
  2918. type: "msra"
  2919. }
  2920. engine: CAFFE
  2921. }
  2922. }
  2923. layer {
  2924. name: "conv7_1/dwise/bn"
  2925. type: "BatchNorm"
  2926. bottom: "conv7_1/dwise"
  2927. top: "conv7_1/dwise/bn"
  2928.  
  2929. }
  2930. layer {
  2931. name: "conv7_1/dwise/scale"
  2932. type: "Scale"
  2933. bottom: "conv7_1/dwise/bn"
  2934. top: "conv7_1/dwise/bn"
  2935. param {
  2936. lr_mult: 1
  2937. decay_mult: 0
  2938. }
  2939. param {
  2940. lr_mult: 1
  2941. decay_mult: 0
  2942. }
  2943. scale_param {
  2944. bias_term: true
  2945. }
  2946. }
  2947. layer {
  2948. name: "relu7_1/dwise"
  2949. type: "ReLU"
  2950. bottom: "conv7_1/dwise/bn"
  2951. top: "conv7_1/dwise/bn"
  2952. }
  2953. layer {
  2954. name: "conv7_1/linear"
  2955. type: "Convolution"
  2956. bottom: "conv7_1/dwise/bn"
  2957. top: "conv7_1/linear"
  2958. param {
  2959. lr_mult: 1
  2960. decay_mult: 1
  2961. }
  2962. convolution_param {
  2963. num_output: 640
  2964. bias_term: false
  2965. kernel_size: 1
  2966. weight_filler {
  2967. type: "msra"
  2968. }
  2969. }
  2970. }
  2971. layer {
  2972. name: "conv7_1/linear/bn"
  2973. type: "BatchNorm"
  2974. bottom: "conv7_1/linear"
  2975. top: "conv7_1/linear/bn"
  2976.  
  2977. }
  2978. layer {
  2979. name: "conv7_1/linear/scale"
  2980. type: "Scale"
  2981. bottom: "conv7_1/linear/bn"
  2982. top: "conv7_1/linear/bn"
  2983. param {
  2984. lr_mult: 1
  2985. decay_mult: 0
  2986. }
  2987. param {
  2988. lr_mult: 1
  2989. decay_mult: 0
  2990. }
  2991. scale_param {
  2992. bias_term: true
  2993. }
  2994. }
  2995. layer {
  2996. name: "conv7_2/expand"
  2997. type: "Convolution"
  2998. bottom: "conv7_1/linear/bn"
  2999. top: "conv7_2/expand"
  3000. param {
  3001. lr_mult: 1
  3002. decay_mult: 1
  3003. }
  3004. convolution_param {
  3005. num_output: 3840
  3006. bias_term: false
  3007. kernel_size: 1
  3008. weight_filler {
  3009. type: "msra"
  3010. }
  3011. }
  3012. }
  3013. layer {
  3014. name: "conv7_2/expand/bn"
  3015. type: "BatchNorm"
  3016. bottom: "conv7_2/expand"
  3017. top: "conv7_2/expand/bn"
  3018.  
  3019. }
  3020. layer {
  3021. name: "conv7_2/expand/scale"
  3022. type: "Scale"
  3023. bottom: "conv7_2/expand/bn"
  3024. top: "conv7_2/expand/bn"
  3025. param {
  3026. lr_mult: 1
  3027. decay_mult: 0
  3028. }
  3029. param {
  3030. lr_mult: 1
  3031. decay_mult: 0
  3032. }
  3033. scale_param {
  3034. bias_term: true
  3035. }
  3036. }
  3037. layer {
  3038. name: "relu7_2/expand"
  3039. type: "ReLU"
  3040. bottom: "conv7_2/expand/bn"
  3041. top: "conv7_2/expand/bn"
  3042. }
  3043. layer {
  3044. name: "conv7_2/dwise"
  3045. type: "ConvolutionDepthwise"
  3046. bottom: "conv7_2/expand/bn"
  3047. top: "conv7_2/dwise"
  3048. param {
  3049. lr_mult: 1
  3050. decay_mult: 1
  3051. }
  3052. convolution_param {
  3053. num_output: 3840
  3054. bias_term: false
  3055. pad: 1
  3056. kernel_size: 3
  3057. group: 3840
  3058. weight_filler {
  3059. type: "msra"
  3060. }
  3061. engine: CAFFE
  3062. }
  3063. }
  3064. layer {
  3065. name: "conv7_2/dwise/bn"
  3066. type: "BatchNorm"
  3067. bottom: "conv7_2/dwise"
  3068. top: "conv7_2/dwise/bn"
  3069.  
  3070. }
  3071. layer {
  3072. name: "conv7_2/dwise/scale"
  3073. type: "Scale"
  3074. bottom: "conv7_2/dwise/bn"
  3075. top: "conv7_2/dwise/bn"
  3076. param {
  3077. lr_mult: 1
  3078. decay_mult: 0
  3079. }
  3080. param {
  3081. lr_mult: 1
  3082. decay_mult: 0
  3083. }
  3084. scale_param {
  3085. bias_term: true
  3086. }
  3087. }
  3088. layer {
  3089. name: "relu7_2/dwise"
  3090. type: "ReLU"
  3091. bottom: "conv7_2/dwise/bn"
  3092. top: "conv7_2/dwise/bn"
  3093. }
  3094. layer {
  3095. name: "conv7_2/linear"
  3096. type: "Convolution"
  3097. bottom: "conv7_2/dwise/bn"
  3098. top: "conv7_2/linear"
  3099. param {
  3100. lr_mult: 1
  3101. decay_mult: 1
  3102. }
  3103. convolution_param {
  3104. num_output: 640
  3105. bias_term: false
  3106. kernel_size: 1
  3107. weight_filler {
  3108. type: "msra"
  3109. }
  3110. }
  3111. }
  3112. layer {
  3113. name: "conv7_2/linear/bn"
  3114. type: "BatchNorm"
  3115. bottom: "conv7_2/linear"
  3116. top: "conv7_2/linear/bn"
  3117. }
  3118. layer {
  3119. name: "conv7_2/linear/scale"
  3120. type: "Scale"
  3121. bottom: "conv7_2/linear/bn"
  3122. top: "conv7_2/linear/bn"
  3123. param {
  3124. lr_mult: 1
  3125. decay_mult: 0
  3126. }
  3127. param {
  3128. lr_mult: 1
  3129. decay_mult: 0
  3130. }
  3131. scale_param {
  3132. bias_term: true
  3133. }
  3134. }
  3135. layer {
  3136. name: "block_7_2"
  3137. type: "Eltwise"
  3138. bottom: "conv7_1/linear/bn"
  3139. bottom: "conv7_2/linear/bn"
  3140. top: "block_7_2"
  3141. }
  3142. layer {
  3143. name: "conv7_3/expand"
  3144. type: "Convolution"
  3145. bottom: "block_7_2"
  3146. top: "conv7_3/expand"
  3147. param {
  3148. lr_mult: 1
  3149. decay_mult: 1
  3150. }
  3151. convolution_param {
  3152. num_output: 3840
  3153. bias_term: false
  3154. kernel_size: 1
  3155. weight_filler {
  3156. type: "msra"
  3157. }
  3158. }
  3159. }
  3160. layer {
  3161. name: "conv7_3/expand/bn"
  3162. type: "BatchNorm"
  3163. bottom: "conv7_3/expand"
  3164. top: "conv7_3/expand/bn"
  3165.  
  3166. }
  3167. # layer {
  3168. # name: "conv7_3/expand/scale"
  3169. # type: "Scale"
  3170. # bottom: "conv7_3/expand/bn"
  3171. # top: "conv7_3/expand/bn"
  3172. # param {
  3173. # lr_mult: 1
  3174. # decay_mult: 0
  3175. # }
  3176. # param {
  3177. # lr_mult: 1
  3178. # decay_mult: 0
  3179. # }
  3180. # scale_param {
  3181. # bias_term: true
  3182. # }
  3183. # }
  3184. # layer {
  3185. # name: "relu7_3/expand"
  3186. # type: "ReLU"
  3187. # bottom: "conv7_3/expand/bn"
  3188. # top: "conv7_3/expand/bn"
  3189. # }
  3190. # layer {
  3191. # name: "conv7_3/dwise"
  3192. # type: "ConvolutionDepthwise"
  3193. # bottom: "conv7_3/expand/bn"
  3194. # top: "conv7_3/dwise"
  3195. # param {
  3196. # lr_mult: 1
  3197. # decay_mult: 1
  3198. # }
  3199. # convolution_param {
  3200. # num_output: 3840
  3201. # bias_term: false
  3202. # pad: 1
  3203. # kernel_size: 3
  3204. # group: 3840
  3205. # weight_filler {
  3206. # type: "msra"
  3207. # }
  3208. # engine: CAFFE
  3209. # }
  3210. # }
  3211. # layer {
  3212. # name: "conv7_3/dwise/bn"
  3213. # type: "BatchNorm"
  3214. # bottom: "conv7_3/dwise"
  3215. # top: "conv7_3/dwise/bn"
  3216.  
  3217. # }
  3218. # layer {
  3219. # name: "conv7_3/dwise/scale"
  3220. # type: "Scale"
  3221. # bottom: "conv7_3/dwise/bn"
  3222. # top: "conv7_3/dwise/bn"
  3223. # param {
  3224. # lr_mult: 1
  3225. # decay_mult: 0
  3226. # }
  3227. # param {
  3228. # lr_mult: 1
  3229. # decay_mult: 0
  3230. # }
  3231. # scale_param {
  3232. # bias_term: true
  3233. # }
  3234. # }
  3235. # layer {
  3236. # name: "relu7_3/dwise"
  3237. # type: "ReLU"
  3238. # bottom: "conv7_3/dwise/bn"
  3239. # top: "conv7_3/dwise/bn"
  3240. # }
  3241. # layer {
  3242. # name: "conv7_3/linear"
  3243. # type: "Convolution"
  3244. # bottom: "conv7_3/dwise/bn"
  3245. # top: "conv7_3/linear"
  3246. # param {
  3247. # lr_mult: 1
  3248. # decay_mult: 1
  3249. # }
  3250. # convolution_param {
  3251. # num_output: 640
  3252. # bias_term: false
  3253. # kernel_size: 1
  3254. # weight_filler {
  3255. # type: "msra"
  3256. # }
  3257. # }
  3258. # }
  3259. # layer {
  3260. # name: "conv7_3/linear/bn"
  3261. # type: "BatchNorm"
  3262. # bottom: "conv7_3/linear"
  3263. # top: "conv7_3/linear/bn"
  3264.  
  3265. # }
  3266. # layer {
  3267. # name: "conv7_3/linear/scale"
  3268. # type: "Scale"
  3269. # bottom: "conv7_3/linear/bn"
  3270. # top: "conv7_3/linear/bn"
  3271. # param {
  3272. # lr_mult: 1
  3273. # decay_mult: 0
  3274. # }
  3275. # param {
  3276. # lr_mult: 1
  3277. # decay_mult: 0
  3278. # }
  3279. # scale_param {
  3280. # bias_term: true
  3281. # }
  3282. # }
  3283. # layer {
  3284. # name: "block_7_3"
  3285. # type: "Eltwise"
  3286. # bottom: "block_7_2"
  3287. # bottom: "conv7_3/linear/bn"
  3288. # top: "block_7_3"
  3289. # }
  3290.  
  3291.  
  3292.  
  3293. # ##### 8_1 is 1x1 pixel large
  3294. # layer {
  3295. # name: "conv8_1/expand"
  3296. # type: "Convolution"
  3297. # bottom: "block_7_3"
  3298. # top: "conv8_1/expand"
  3299. # param {
  3300. # lr_mult: 1
  3301. # decay_mult: 1
  3302. # }
  3303. # convolution_param {
  3304. # num_output: 3840
  3305. # bias_term: false
  3306. # kernel_size: 1
  3307. # weight_filler {
  3308. # type: "msra"
  3309. # }
  3310. # }
  3311. # }
  3312. # layer {
  3313. # name: "conv8_1/expand/bn"
  3314. # type: "BatchNorm"
  3315. # bottom: "conv8_1/expand"
  3316. # top: "conv8_1/expand/bn"
  3317.  
  3318. # }
  3319. # layer {
  3320. # name: "conv8_1/expand/scale"
  3321. # type: "Scale"
  3322. # bottom: "conv8_1/expand/bn"
  3323. # top: "conv8_1/expand/bn"
  3324. # param {
  3325. # lr_mult: 1
  3326. # decay_mult: 0
  3327. # }
  3328. # param {
  3329. # lr_mult: 1
  3330. # decay_mult: 0
  3331. # }
  3332. # scale_param {
  3333. # bias_term: true
  3334. # }
  3335. # }
  3336. # layer {
  3337. # name: "relu8_1/expand"
  3338. # type: "ReLU"
  3339. # bottom: "conv8_1/expand/bn"
  3340. # top: "conv8_1/expand/bn"
  3341. # }
  3342. # layer {
  3343. # name: "conv8_1/dwise"
  3344. # type: "ConvolutionDepthwise"
  3345. # bottom: "conv8_1/expand/bn"
  3346. # top: "conv8_1/dwise"
  3347. # param {
  3348. # lr_mult: 1
  3349. # decay_mult: 1
  3350. # }
  3351. # convolution_param {
  3352. # num_output: 3840
  3353. # bias_term: false
  3354. # kernel_size: 1
  3355. # group: 3840
  3356. # weight_filler {
  3357. # type: "msra"
  3358. # }
  3359. # engine: CAFFE
  3360. # }
  3361. # }
  3362. # layer {
  3363. # name: "conv8_1/dwise/bn"
  3364. # type: "BatchNorm"
  3365. # bottom: "conv8_1/dwise"
  3366. # top: "conv8_1/dwise/bn"
  3367.  
  3368. # }
  3369. # layer {
  3370. # name: "conv8_1/dwise/scale"
  3371. # type: "Scale"
  3372. # bottom: "conv8_1/dwise/bn"
  3373. # top: "conv8_1/dwise/bn"
  3374. # param {
  3375. # lr_mult: 1
  3376. # decay_mult: 0
  3377. # }
  3378. # param {
  3379. # lr_mult: 1
  3380. # decay_mult: 0
  3381. # }
  3382. # scale_param {
  3383. # bias_term: true
  3384. # }
  3385. # }
  3386. # layer {
  3387. # name: "relu8_1/dwise"
  3388. # type: "ReLU"
  3389. # bottom: "conv8_1/dwise/bn"
  3390. # top: "conv8_1/dwise/bn"
  3391. # }
  3392. # layer {
  3393. # name: "conv8_1/linear"
  3394. # type: "Convolution"
  3395. # bottom: "conv8_1/dwise/bn"
  3396. # top: "conv8_1/linear"
  3397. # param {
  3398. # lr_mult: 1
  3399. # decay_mult: 1
  3400. # }
  3401. # convolution_param {
  3402. # num_output: 1280
  3403. # bias_term: false
  3404. # kernel_size: 1
  3405. # weight_filler {
  3406. # type: "msra"
  3407. # }
  3408. # }
  3409. # }
  3410. # layer {
  3411. # name: "conv8_1/linear/bn"
  3412. # type: "BatchNorm"
  3413. # bottom: "conv8_1/linear"
  3414. # top: "conv8_1/linear/bn"
  3415.  
  3416. # }
  3417. # layer {
  3418. # name: "conv8_1/linear/scale"
  3419. # type: "Scale"
  3420. # bottom: "conv8_1/linear/bn"
  3421. # top: "conv8_1/linear/bn"
  3422. # param {
  3423. # lr_mult: 1
  3424. # decay_mult: 0
  3425. # }
  3426. # param {
  3427. # lr_mult: 1
  3428. # decay_mult: 0
  3429. # }
  3430. # scale_param {
  3431. # bias_term: true
  3432. # }
  3433. # }
  3434. # layer {
  3435. # name: "conv8_2/expand"
  3436. # type: "Convolution"
  3437. # bottom: "conv8_1/linear/bn"
  3438. # top: "conv8_2/expand"
  3439. # param {
  3440. # lr_mult: 1
  3441. # decay_mult: 1
  3442. # }
  3443. # convolution_param {
  3444. # num_output: 7680
  3445. # bias_term: false
  3446. # kernel_size: 1
  3447. # weight_filler {
  3448. # type: "msra"
  3449. # }
  3450. # }
  3451. # }
  3452. # layer {
  3453. # name: "conv8_2/expand/bn"
  3454. # type: "BatchNorm"
  3455. # bottom: "conv8_2/expand"
  3456. # top: "conv8_2/expand/bn"
  3457.  
  3458. # }
  3459. # layer {
  3460. # name: "conv8_2/expand/scale"
  3461. # type: "Scale"
  3462. # bottom: "conv8_2/expand/bn"
  3463. # top: "conv8_2/expand/bn"
  3464. # param {
  3465. # lr_mult: 1
  3466. # decay_mult: 0
  3467. # }
  3468. # param {
  3469. # lr_mult: 1
  3470. # decay_mult: 0
  3471. # }
  3472. # scale_param {
  3473. # bias_term: true
  3474. # }
  3475. # }
  3476. # layer {
  3477. # name: "relu8_2/expand"
  3478. # type: "ReLU"
  3479. # bottom: "conv8_2/expand/bn"
  3480. # top: "conv8_2/expand/bn"
  3481. # }
  3482. # layer {
  3483. # name: "conv8_2/dwise"
  3484. # type: "ConvolutionDepthwise"
  3485. # bottom: "conv8_2/expand/bn"
  3486. # top: "conv8_2/dwise"
  3487. # param {
  3488. # lr_mult: 1
  3489. # decay_mult: 1
  3490. # }
  3491. # convolution_param {
  3492. # num_output: 7680
  3493. # bias_term: false
  3494. # kernel_size: 1
  3495. # group: 7680
  3496. # weight_filler {
  3497. # type: "msra"
  3498. # }
  3499. # engine: CAFFE
  3500. # }
  3501. # }
  3502. # layer {
  3503. # name: "conv8_2/dwise/bn"
  3504. # type: "BatchNorm"
  3505. # bottom: "conv8_2/dwise"
  3506. # top: "conv8_2/dwise/bn"
  3507.  
  3508. # }
  3509. # layer {
  3510. # name: "conv8_2/dwise/scale"
  3511. # type: "Scale"
  3512. # bottom: "conv8_2/dwise/bn"
  3513. # top: "conv8_2/dwise/bn"
  3514. # param {
  3515. # lr_mult: 1
  3516. # decay_mult: 0
  3517. # }
  3518. # param {
  3519. # lr_mult: 1
  3520. # decay_mult: 0
  3521. # }
  3522. # scale_param {
  3523. # bias_term: true
  3524. # }
  3525. # }
  3526. # layer {
  3527. # name: "relu8_2/dwise"
  3528. # type: "ReLU"
  3529. # bottom: "conv8_2/dwise/bn"
  3530. # top: "conv8_2/dwise/bn"
  3531. # }
  3532. # layer {
  3533. # name: "conv8_2/linear"
  3534. # type: "Convolution"
  3535. # bottom: "conv8_2/dwise/bn"
  3536. # top: "conv8_2/linear"
  3537. # param {
  3538. # lr_mult: 1
  3539. # decay_mult: 1
  3540. # }
  3541. # convolution_param {
  3542. # num_output: 1280
  3543. # bias_term: false
  3544. # kernel_size: 1
  3545. # weight_filler {
  3546. # type: "msra"
  3547. # }
  3548. # }
  3549. # }
  3550. # layer {
  3551. # name: "conv8_2/linear/bn"
  3552. # type: "BatchNorm"
  3553. # bottom: "conv8_2/linear"
  3554. # top: "conv8_2/linear/bn"
  3555.  
  3556. # }
  3557. # layer {
  3558. # name: "conv8_2/linear/scale"
  3559. # type: "Scale"
  3560. # bottom: "conv8_2/linear/bn"
  3561. # top: "conv8_2/linear/bn"
  3562. # param {
  3563. # lr_mult: 1
  3564. # decay_mult: 0
  3565. # }
  3566. # param {
  3567. # lr_mult: 1
  3568. # decay_mult: 0
  3569. # }
  3570. # scale_param {
  3571. # bias_term: true
  3572. # }
  3573. # }
  3574. # layer {
  3575. # name: "block_8_2"
  3576. # type: "Eltwise"
  3577. # bottom: "conv8_1/linear/bn"
  3578. # bottom: "conv8_2/linear/bn"
  3579. # top: "block_8_2"
  3580. # }
Add Comment
Please, Sign In to add comment