Guest User

Untitled

a guest
Dec 14th, 2018
67
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 38.14 KB | None | 0 0
  1. name: "MobileNet-SSD"
  2. layer {
  3. name: "data"
  4. type: "AnnotatedData"
  5. top: "data"
  6. top: "label"
  7. include {
  8. phase: TRAIN
  9. }
  10. transform_param {
  11. scale: 0.007843
  12. mirror: false
  13. mean_value: 94
  14. mean_value: 97
  15. mean_value: 109
  16. resize_param {
  17. prob: 1.0
  18. resize_mode: WARP
  19. height: 640
  20. width: 640
  21. interp_mode: LINEAR
  22. interp_mode: AREA
  23. interp_mode: NEAREST
  24. interp_mode: CUBIC
  25. interp_mode: LANCZOS4
  26. }
  27. emit_constraint {
  28. emit_type: CENTER
  29. }
  30. distort_param {
  31. brightness_prob: 0.5
  32. brightness_delta: 32.0
  33. contrast_prob: 0.5
  34. contrast_lower: 0.5
  35. contrast_upper: 1.5
  36. hue_prob: 0.5
  37. hue_delta: 18.0
  38. saturation_prob: 0.5
  39. saturation_lower: 0.5
  40. saturation_upper: 1.5
  41. random_order_prob: 0.0
  42. }
  43. }
  44. data_param {
  45. source: "/data/BoschLisaLaraDat_1113/VOC0712/VOC0712_trainval_lmdb/"
  46. batch_size: 4
  47. backend: LMDB
  48. prefetch: 20
  49. }
  50. annotated_data_param {
  51. batch_sampler {
  52. max_sample: 1
  53. max_trials: 1
  54. }
  55. batch_sampler {
  56. sampler {
  57. min_scale: 0.3
  58. max_scale: 1.0
  59. min_aspect_ratio: 1.0
  60. max_aspect_ratio: 1.0
  61. }
  62. sample_constraint {
  63. min_jaccard_overlap: 0.5
  64. }
  65. max_sample: 1
  66. max_trials: 50
  67. }
  68. batch_sampler {
  69. sampler {
  70. min_scale: 0.3
  71. max_scale: 1.0
  72. min_aspect_ratio: 1.0
  73. max_aspect_ratio: 1.0
  74. }
  75. sample_constraint {
  76. min_jaccard_overlap: 0.7
  77. }
  78. max_sample: 1
  79. max_trials: 50
  80. }
  81. batch_sampler {
  82. sampler {
  83. min_scale: 0.3
  84. max_scale: 1.0
  85. min_aspect_ratio: 1.0
  86. max_aspect_ratio: 1.0
  87. }
  88. sample_constraint {
  89. min_jaccard_overlap: 0.9
  90. }
  91. max_sample: 1
  92. max_trials: 50
  93. }
  94. batch_sampler {
  95. sampler {
  96. min_scale: 0.3
  97. max_scale: 1.0
  98. min_aspect_ratio: 1.0
  99. max_aspect_ratio: 1.0
  100. }
  101. sample_constraint {
  102. max_jaccard_overlap: 1.0
  103. }
  104. max_sample: 1
  105. max_trials: 50
  106. }
  107. label_map_file: "/data/BoschLisaLaraDat_1113/VOC0712/labelmap_voc.prototxt"
  108. }
  109. }
  110. layer {
  111. name: "conv0"
  112. type: "Convolution"
  113. bottom: "data"
  114. top: "conv0"
  115. param {
  116. lr_mult: 0.1
  117. decay_mult: 0.1
  118. }
  119. convolution_param {
  120. num_output: 32
  121. bias_term: false
  122. pad: 1
  123. kernel_size: 3
  124. stride: 2
  125. weight_filler {
  126. type: "msra"
  127. }
  128. }
  129. }
  130. layer {
  131. name: "conv0/bn"
  132. type: "BatchNorm"
  133. bottom: "conv0"
  134. top: "conv0"
  135. param {
  136. lr_mult: 0
  137. decay_mult: 0
  138. }
  139. param {
  140. lr_mult: 0
  141. decay_mult: 0
  142. }
  143. param {
  144. lr_mult: 0
  145. decay_mult: 0
  146. }
  147. }
  148. layer {
  149. name: "conv0/scale"
  150. type: "Scale"
  151. bottom: "conv0"
  152. top: "conv0"
  153. param {
  154. lr_mult: 0.1
  155. decay_mult: 0.0
  156. }
  157. param {
  158. lr_mult: 0.2
  159. decay_mult: 0.0
  160. }
  161. scale_param {
  162. filler {
  163. value: 1
  164. }
  165. bias_term: true
  166. bias_filler {
  167. value: 0
  168. }
  169. }
  170. }
  171. layer {
  172. name: "conv0/relu"
  173. type: "ReLU"
  174. bottom: "conv0"
  175. top: "conv0"
  176. }
  177. layer {
  178. name: "conv1/dw"
  179. type: "Convolution"
  180. bottom: "conv0"
  181. top: "conv1/dw"
  182. param {
  183. lr_mult: 0.1
  184. decay_mult: 0.1
  185. }
  186. convolution_param {
  187. num_output: 32
  188. bias_term: false
  189. pad: 1
  190. kernel_size: 3
  191. group: 32
  192. engine: CAFFE
  193. weight_filler {
  194. type: "msra"
  195. }
  196. }
  197. }
  198. layer {
  199. name: "conv1/dw/bn"
  200. type: "BatchNorm"
  201. bottom: "conv1/dw"
  202. top: "conv1/dw"
  203. param {
  204. lr_mult: 0
  205. decay_mult: 0
  206. }
  207. param {
  208. lr_mult: 0
  209. decay_mult: 0
  210. }
  211. param {
  212. lr_mult: 0
  213. decay_mult: 0
  214. }
  215. }
  216. layer {
  217. name: "conv1/dw/scale"
  218. type: "Scale"
  219. bottom: "conv1/dw"
  220. top: "conv1/dw"
  221. param {
  222. lr_mult: 0.1
  223. decay_mult: 0.0
  224. }
  225. param {
  226. lr_mult: 0.2
  227. decay_mult: 0.0
  228. }
  229. scale_param {
  230. filler {
  231. value: 1
  232. }
  233. bias_term: true
  234. bias_filler {
  235. value: 0
  236. }
  237. }
  238. }
  239. layer {
  240. name: "conv1/dw/relu"
  241. type: "ReLU"
  242. bottom: "conv1/dw"
  243. top: "conv1/dw"
  244. }
  245. layer {
  246. name: "conv1"
  247. type: "Convolution"
  248. bottom: "conv1/dw"
  249. top: "conv1"
  250. param {
  251. lr_mult: 0.1
  252. decay_mult: 0.1
  253. }
  254. convolution_param {
  255. num_output: 64
  256. bias_term: false
  257. kernel_size: 1
  258. weight_filler {
  259. type: "msra"
  260. }
  261. }
  262. }
  263. layer {
  264. name: "conv1/bn"
  265. type: "BatchNorm"
  266. bottom: "conv1"
  267. top: "conv1"
  268. param {
  269. lr_mult: 0
  270. decay_mult: 0
  271. }
  272. param {
  273. lr_mult: 0
  274. decay_mult: 0
  275. }
  276. param {
  277. lr_mult: 0
  278. decay_mult: 0
  279. }
  280. }
  281. layer {
  282. name: "conv1/scale"
  283. type: "Scale"
  284. bottom: "conv1"
  285. top: "conv1"
  286. param {
  287. lr_mult: 0.1
  288. decay_mult: 0.0
  289. }
  290. param {
  291. lr_mult: 0.2
  292. decay_mult: 0.0
  293. }
  294. scale_param {
  295. filler {
  296. value: 1
  297. }
  298. bias_term: true
  299. bias_filler {
  300. value: 0
  301. }
  302. }
  303. }
  304. layer {
  305. name: "conv1/relu"
  306. type: "ReLU"
  307. bottom: "conv1"
  308. top: "conv1"
  309. }
  310. layer {
  311. name: "conv2/dw"
  312. type: "Convolution"
  313. bottom: "conv1"
  314. top: "conv2/dw"
  315. param {
  316. lr_mult: 0.1
  317. decay_mult: 0.1
  318. }
  319. convolution_param {
  320. num_output: 64
  321. bias_term: false
  322. pad: 1
  323. kernel_size: 3
  324. stride: 2
  325. group: 64
  326. engine: CAFFE
  327. weight_filler {
  328. type: "msra"
  329. }
  330. }
  331. }
  332. layer {
  333. name: "conv2/dw/bn"
  334. type: "BatchNorm"
  335. bottom: "conv2/dw"
  336. top: "conv2/dw"
  337. param {
  338. lr_mult: 0
  339. decay_mult: 0
  340. }
  341. param {
  342. lr_mult: 0
  343. decay_mult: 0
  344. }
  345. param {
  346. lr_mult: 0
  347. decay_mult: 0
  348. }
  349. }
  350. layer {
  351. name: "conv2/dw/scale"
  352. type: "Scale"
  353. bottom: "conv2/dw"
  354. top: "conv2/dw"
  355. param {
  356. lr_mult: 0.1
  357. decay_mult: 0.0
  358. }
  359. param {
  360. lr_mult: 0.2
  361. decay_mult: 0.0
  362. }
  363. scale_param {
  364. filler {
  365. value: 1
  366. }
  367. bias_term: true
  368. bias_filler {
  369. value: 0
  370. }
  371. }
  372. }
  373. layer {
  374. name: "conv2/dw/relu"
  375. type: "ReLU"
  376. bottom: "conv2/dw"
  377. top: "conv2/dw"
  378. }
  379. layer {
  380. name: "conv2"
  381. type: "Convolution"
  382. bottom: "conv2/dw"
  383. top: "conv2"
  384. param {
  385. lr_mult: 0.1
  386. decay_mult: 0.1
  387. }
  388. convolution_param {
  389. num_output: 128
  390. bias_term: false
  391. kernel_size: 1
  392. weight_filler {
  393. type: "msra"
  394. }
  395. }
  396. }
  397. layer {
  398. name: "conv2/bn"
  399. type: "BatchNorm"
  400. bottom: "conv2"
  401. top: "conv2"
  402. param {
  403. lr_mult: 0
  404. decay_mult: 0
  405. }
  406. param {
  407. lr_mult: 0
  408. decay_mult: 0
  409. }
  410. param {
  411. lr_mult: 0
  412. decay_mult: 0
  413. }
  414. }
  415. layer {
  416. name: "conv2/scale"
  417. type: "Scale"
  418. bottom: "conv2"
  419. top: "conv2"
  420. param {
  421. lr_mult: 0.1
  422. decay_mult: 0.0
  423. }
  424. param {
  425. lr_mult: 0.2
  426. decay_mult: 0.0
  427. }
  428. scale_param {
  429. filler {
  430. value: 1
  431. }
  432. bias_term: true
  433. bias_filler {
  434. value: 0
  435. }
  436. }
  437. }
  438. layer {
  439. name: "conv2/relu"
  440. type: "ReLU"
  441. bottom: "conv2"
  442. top: "conv2"
  443. }
  444. layer {
  445. name: "conv3/dw"
  446. type: "Convolution"
  447. bottom: "conv2"
  448. top: "conv3/dw"
  449. param {
  450. lr_mult: 0.1
  451. decay_mult: 0.1
  452. }
  453. convolution_param {
  454. num_output: 128
  455. bias_term: false
  456. pad: 1
  457. kernel_size: 3
  458. group: 128
  459. engine: CAFFE
  460. weight_filler {
  461. type: "msra"
  462. }
  463. }
  464. }
  465. layer {
  466. name: "conv3/dw/bn"
  467. type: "BatchNorm"
  468. bottom: "conv3/dw"
  469. top: "conv3/dw"
  470. param {
  471. lr_mult: 0
  472. decay_mult: 0
  473. }
  474. param {
  475. lr_mult: 0
  476. decay_mult: 0
  477. }
  478. param {
  479. lr_mult: 0
  480. decay_mult: 0
  481. }
  482. }
  483. layer {
  484. name: "conv3/dw/scale"
  485. type: "Scale"
  486. bottom: "conv3/dw"
  487. top: "conv3/dw"
  488. param {
  489. lr_mult: 0.1
  490. decay_mult: 0.0
  491. }
  492. param {
  493. lr_mult: 0.2
  494. decay_mult: 0.0
  495. }
  496. scale_param {
  497. filler {
  498. value: 1
  499. }
  500. bias_term: true
  501. bias_filler {
  502. value: 0
  503. }
  504. }
  505. }
  506. layer {
  507. name: "conv3/dw/relu"
  508. type: "ReLU"
  509. bottom: "conv3/dw"
  510. top: "conv3/dw"
  511. }
  512. layer {
  513. name: "conv3"
  514. type: "Convolution"
  515. bottom: "conv3/dw"
  516. top: "conv3"
  517. param {
  518. lr_mult: 0.1
  519. decay_mult: 0.1
  520. }
  521. convolution_param {
  522. num_output: 128
  523. bias_term: false
  524. kernel_size: 1
  525. weight_filler {
  526. type: "msra"
  527. }
  528. }
  529. }
  530. layer {
  531. name: "conv3/bn"
  532. type: "BatchNorm"
  533. bottom: "conv3"
  534. top: "conv3"
  535. param {
  536. lr_mult: 0
  537. decay_mult: 0
  538. }
  539. param {
  540. lr_mult: 0
  541. decay_mult: 0
  542. }
  543. param {
  544. lr_mult: 0
  545. decay_mult: 0
  546. }
  547. }
  548. layer {
  549. name: "conv3/scale"
  550. type: "Scale"
  551. bottom: "conv3"
  552. top: "conv3"
  553. param {
  554. lr_mult: 0.1
  555. decay_mult: 0.0
  556. }
  557. param {
  558. lr_mult: 0.2
  559. decay_mult: 0.0
  560. }
  561. scale_param {
  562. filler {
  563. value: 1
  564. }
  565. bias_term: true
  566. bias_filler {
  567. value: 0
  568. }
  569. }
  570. }
  571. layer {
  572. name: "conv3/relu"
  573. type: "ReLU"
  574. bottom: "conv3"
  575. top: "conv3"
  576. }
  577. layer {
  578. name: "conv4/dw"
  579. type: "Convolution"
  580. bottom: "conv3"
  581. top: "conv4/dw"
  582. param {
  583. lr_mult: 0.1
  584. decay_mult: 0.1
  585. }
  586. convolution_param {
  587. num_output: 128
  588. bias_term: false
  589. pad: 1
  590. kernel_size: 3
  591. stride: 2
  592. group: 128
  593. engine: CAFFE
  594. weight_filler {
  595. type: "msra"
  596. }
  597. }
  598. }
  599. layer {
  600. name: "conv4/dw/bn"
  601. type: "BatchNorm"
  602. bottom: "conv4/dw"
  603. top: "conv4/dw"
  604. param {
  605. lr_mult: 0
  606. decay_mult: 0
  607. }
  608. param {
  609. lr_mult: 0
  610. decay_mult: 0
  611. }
  612. param {
  613. lr_mult: 0
  614. decay_mult: 0
  615. }
  616. }
  617. layer {
  618. name: "conv4/dw/scale"
  619. type: "Scale"
  620. bottom: "conv4/dw"
  621. top: "conv4/dw"
  622. param {
  623. lr_mult: 0.1
  624. decay_mult: 0.0
  625. }
  626. param {
  627. lr_mult: 0.2
  628. decay_mult: 0.0
  629. }
  630. scale_param {
  631. filler {
  632. value: 1
  633. }
  634. bias_term: true
  635. bias_filler {
  636. value: 0
  637. }
  638. }
  639. }
  640. layer {
  641. name: "conv4/dw/relu"
  642. type: "ReLU"
  643. bottom: "conv4/dw"
  644. top: "conv4/dw"
  645. }
  646. layer {
  647. name: "conv4"
  648. type: "Convolution"
  649. bottom: "conv4/dw"
  650. top: "conv4"
  651. param {
  652. lr_mult: 0.1
  653. decay_mult: 0.1
  654. }
  655. convolution_param {
  656. num_output: 256
  657. bias_term: false
  658. kernel_size: 1
  659. weight_filler {
  660. type: "msra"
  661. }
  662. }
  663. }
  664. layer {
  665. name: "conv4/bn"
  666. type: "BatchNorm"
  667. bottom: "conv4"
  668. top: "conv4"
  669. param {
  670. lr_mult: 0
  671. decay_mult: 0
  672. }
  673. param {
  674. lr_mult: 0
  675. decay_mult: 0
  676. }
  677. param {
  678. lr_mult: 0
  679. decay_mult: 0
  680. }
  681. }
  682. layer {
  683. name: "conv4/scale"
  684. type: "Scale"
  685. bottom: "conv4"
  686. top: "conv4"
  687. param {
  688. lr_mult: 0.1
  689. decay_mult: 0.0
  690. }
  691. param {
  692. lr_mult: 0.2
  693. decay_mult: 0.0
  694. }
  695. scale_param {
  696. filler {
  697. value: 1
  698. }
  699. bias_term: true
  700. bias_filler {
  701. value: 0
  702. }
  703. }
  704. }
  705. layer {
  706. name: "conv4/relu"
  707. type: "ReLU"
  708. bottom: "conv4"
  709. top: "conv4"
  710. }
  711. layer {
  712. name: "conv5/dw"
  713. type: "Convolution"
  714. bottom: "conv4"
  715. top: "conv5/dw"
  716. param {
  717. lr_mult: 0.1
  718. decay_mult: 0.1
  719. }
  720. convolution_param {
  721. num_output: 256
  722. bias_term: false
  723. pad: 1
  724. kernel_size: 3
  725. group: 256
  726. engine: CAFFE
  727. weight_filler {
  728. type: "msra"
  729. }
  730. }
  731. }
  732. layer {
  733. name: "conv5/dw/bn"
  734. type: "BatchNorm"
  735. bottom: "conv5/dw"
  736. top: "conv5/dw"
  737. param {
  738. lr_mult: 0
  739. decay_mult: 0
  740. }
  741. param {
  742. lr_mult: 0
  743. decay_mult: 0
  744. }
  745. param {
  746. lr_mult: 0
  747. decay_mult: 0
  748. }
  749. }
  750. layer {
  751. name: "conv5/dw/scale"
  752. type: "Scale"
  753. bottom: "conv5/dw"
  754. top: "conv5/dw"
  755. param {
  756. lr_mult: 0.1
  757. decay_mult: 0.0
  758. }
  759. param {
  760. lr_mult: 0.2
  761. decay_mult: 0.0
  762. }
  763. scale_param {
  764. filler {
  765. value: 1
  766. }
  767. bias_term: true
  768. bias_filler {
  769. value: 0
  770. }
  771. }
  772. }
  773. layer {
  774. name: "conv5/dw/relu"
  775. type: "ReLU"
  776. bottom: "conv5/dw"
  777. top: "conv5/dw"
  778. }
  779. layer {
  780. name: "conv5"
  781. type: "Convolution"
  782. bottom: "conv5/dw"
  783. top: "conv5"
  784. param {
  785. lr_mult: 0.1
  786. decay_mult: 0.1
  787. }
  788. convolution_param {
  789. num_output: 256
  790. bias_term: false
  791. kernel_size: 1
  792. weight_filler {
  793. type: "msra"
  794. }
  795. }
  796. }
  797. layer {
  798. name: "conv5/bn"
  799. type: "BatchNorm"
  800. bottom: "conv5"
  801. top: "conv5"
  802. param {
  803. lr_mult: 0
  804. decay_mult: 0
  805. }
  806. param {
  807. lr_mult: 0
  808. decay_mult: 0
  809. }
  810. param {
  811. lr_mult: 0
  812. decay_mult: 0
  813. }
  814. }
  815. layer {
  816. name: "conv5/scale"
  817. type: "Scale"
  818. bottom: "conv5"
  819. top: "conv5"
  820. param {
  821. lr_mult: 0.1
  822. decay_mult: 0.0
  823. }
  824. param {
  825. lr_mult: 0.2
  826. decay_mult: 0.0
  827. }
  828. scale_param {
  829. filler {
  830. value: 1
  831. }
  832. bias_term: true
  833. bias_filler {
  834. value: 0
  835. }
  836. }
  837. }
  838. layer {
  839. name: "conv5/relu"
  840. type: "ReLU"
  841. bottom: "conv5"
  842. top: "conv5"
  843. }
  844. layer {
  845. name: "conv6/dw"
  846. type: "Convolution"
  847. bottom: "conv5"
  848. top: "conv6/dw"
  849. param {
  850. lr_mult: 0.1
  851. decay_mult: 0.1
  852. }
  853. convolution_param {
  854. num_output: 256
  855. bias_term: false
  856. pad: 1
  857. kernel_size: 3
  858. stride: 2
  859. group: 256
  860. engine: CAFFE
  861. weight_filler {
  862. type: "msra"
  863. }
  864. }
  865. }
  866. layer {
  867. name: "conv6/dw/bn"
  868. type: "BatchNorm"
  869. bottom: "conv6/dw"
  870. top: "conv6/dw"
  871. param {
  872. lr_mult: 0
  873. decay_mult: 0
  874. }
  875. param {
  876. lr_mult: 0
  877. decay_mult: 0
  878. }
  879. param {
  880. lr_mult: 0
  881. decay_mult: 0
  882. }
  883. }
  884. layer {
  885. name: "conv6/dw/scale"
  886. type: "Scale"
  887. bottom: "conv6/dw"
  888. top: "conv6/dw"
  889. param {
  890. lr_mult: 0.1
  891. decay_mult: 0.0
  892. }
  893. param {
  894. lr_mult: 0.2
  895. decay_mult: 0.0
  896. }
  897. scale_param {
  898. filler {
  899. value: 1
  900. }
  901. bias_term: true
  902. bias_filler {
  903. value: 0
  904. }
  905. }
  906. }
  907. layer {
  908. name: "conv6/dw/relu"
  909. type: "ReLU"
  910. bottom: "conv6/dw"
  911. top: "conv6/dw"
  912. }
  913. layer {
  914. name: "conv6"
  915. type: "Convolution"
  916. bottom: "conv6/dw"
  917. top: "conv6"
  918. param {
  919. lr_mult: 0.1
  920. decay_mult: 0.1
  921. }
  922. convolution_param {
  923. num_output: 512
  924. bias_term: false
  925. kernel_size: 1
  926. weight_filler {
  927. type: "msra"
  928. }
  929. }
  930. }
  931. layer {
  932. name: "conv6/bn"
  933. type: "BatchNorm"
  934. bottom: "conv6"
  935. top: "conv6"
  936. param {
  937. lr_mult: 0
  938. decay_mult: 0
  939. }
  940. param {
  941. lr_mult: 0
  942. decay_mult: 0
  943. }
  944. param {
  945. lr_mult: 0
  946. decay_mult: 0
  947. }
  948. }
  949. layer {
  950. name: "conv6/scale"
  951. type: "Scale"
  952. bottom: "conv6"
  953. top: "conv6"
  954. param {
  955. lr_mult: 0.1
  956. decay_mult: 0.0
  957. }
  958. param {
  959. lr_mult: 0.2
  960. decay_mult: 0.0
  961. }
  962. scale_param {
  963. filler {
  964. value: 1
  965. }
  966. bias_term: true
  967. bias_filler {
  968. value: 0
  969. }
  970. }
  971. }
  972. layer {
  973. name: "conv6/relu"
  974. type: "ReLU"
  975. bottom: "conv6"
  976. top: "conv6"
  977. }
  978. layer {
  979. name: "conv7/dw"
  980. type: "Convolution"
  981. bottom: "conv6"
  982. top: "conv7/dw"
  983. param {
  984. lr_mult: 0.1
  985. decay_mult: 0.1
  986. }
  987. convolution_param {
  988. num_output: 512
  989. bias_term: false
  990. pad: 1
  991. kernel_size: 3
  992. group: 512
  993. engine: CAFFE
  994. weight_filler {
  995. type: "msra"
  996. }
  997. }
  998. }
  999. layer {
  1000. name: "conv7/dw/bn"
  1001. type: "BatchNorm"
  1002. bottom: "conv7/dw"
  1003. top: "conv7/dw"
  1004. param {
  1005. lr_mult: 0
  1006. decay_mult: 0
  1007. }
  1008. param {
  1009. lr_mult: 0
  1010. decay_mult: 0
  1011. }
  1012. param {
  1013. lr_mult: 0
  1014. decay_mult: 0
  1015. }
  1016. }
  1017. layer {
  1018. name: "conv7/dw/scale"
  1019. type: "Scale"
  1020. bottom: "conv7/dw"
  1021. top: "conv7/dw"
  1022. param {
  1023. lr_mult: 0.1
  1024. decay_mult: 0.0
  1025. }
  1026. param {
  1027. lr_mult: 0.2
  1028. decay_mult: 0.0
  1029. }
  1030. scale_param {
  1031. filler {
  1032. value: 1
  1033. }
  1034. bias_term: true
  1035. bias_filler {
  1036. value: 0
  1037. }
  1038. }
  1039. }
  1040. layer {
  1041. name: "conv7/dw/relu"
  1042. type: "ReLU"
  1043. bottom: "conv7/dw"
  1044. top: "conv7/dw"
  1045. }
  1046. layer {
  1047. name: "conv7"
  1048. type: "Convolution"
  1049. bottom: "conv7/dw"
  1050. top: "conv7"
  1051. param {
  1052. lr_mult: 0.1
  1053. decay_mult: 0.1
  1054. }
  1055. convolution_param {
  1056. num_output: 512
  1057. bias_term: false
  1058. kernel_size: 1
  1059. weight_filler {
  1060. type: "msra"
  1061. }
  1062. }
  1063. }
  1064. layer {
  1065. name: "conv7/bn"
  1066. type: "BatchNorm"
  1067. bottom: "conv7"
  1068. top: "conv7"
  1069. param {
  1070. lr_mult: 0
  1071. decay_mult: 0
  1072. }
  1073. param {
  1074. lr_mult: 0
  1075. decay_mult: 0
  1076. }
  1077. param {
  1078. lr_mult: 0
  1079. decay_mult: 0
  1080. }
  1081. }
  1082. layer {
  1083. name: "conv7/scale"
  1084. type: "Scale"
  1085. bottom: "conv7"
  1086. top: "conv7"
  1087. param {
  1088. lr_mult: 0.1
  1089. decay_mult: 0.0
  1090. }
  1091. param {
  1092. lr_mult: 0.2
  1093. decay_mult: 0.0
  1094. }
  1095. scale_param {
  1096. filler {
  1097. value: 1
  1098. }
  1099. bias_term: true
  1100. bias_filler {
  1101. value: 0
  1102. }
  1103. }
  1104. }
  1105. layer {
  1106. name: "conv7/relu"
  1107. type: "ReLU"
  1108. bottom: "conv7"
  1109. top: "conv7"
  1110. }
  1111. layer {
  1112. name: "conv8/dw"
  1113. type: "Convolution"
  1114. bottom: "conv7"
  1115. top: "conv8/dw"
  1116. param {
  1117. lr_mult: 0.1
  1118. decay_mult: 0.1
  1119. }
  1120. convolution_param {
  1121. num_output: 512
  1122. bias_term: false
  1123. pad: 1
  1124. kernel_size: 3
  1125. group: 512
  1126. engine: CAFFE
  1127. weight_filler {
  1128. type: "msra"
  1129. }
  1130. }
  1131. }
  1132. layer {
  1133. name: "conv8/dw/bn"
  1134. type: "BatchNorm"
  1135. bottom: "conv8/dw"
  1136. top: "conv8/dw"
  1137. param {
  1138. lr_mult: 0
  1139. decay_mult: 0
  1140. }
  1141. param {
  1142. lr_mult: 0
  1143. decay_mult: 0
  1144. }
  1145. param {
  1146. lr_mult: 0
  1147. decay_mult: 0
  1148. }
  1149. }
  1150. layer {
  1151. name: "conv8/dw/scale"
  1152. type: "Scale"
  1153. bottom: "conv8/dw"
  1154. top: "conv8/dw"
  1155. param {
  1156. lr_mult: 0.1
  1157. decay_mult: 0.0
  1158. }
  1159. param {
  1160. lr_mult: 0.2
  1161. decay_mult: 0.0
  1162. }
  1163. scale_param {
  1164. filler {
  1165. value: 1
  1166. }
  1167. bias_term: true
  1168. bias_filler {
  1169. value: 0
  1170. }
  1171. }
  1172. }
  1173. layer {
  1174. name: "conv8/dw/relu"
  1175. type: "ReLU"
  1176. bottom: "conv8/dw"
  1177. top: "conv8/dw"
  1178. }
  1179. layer {
  1180. name: "conv8"
  1181. type: "Convolution"
  1182. bottom: "conv8/dw"
  1183. top: "conv8"
  1184. param {
  1185. lr_mult: 0.1
  1186. decay_mult: 0.1
  1187. }
  1188. convolution_param {
  1189. num_output: 512
  1190. bias_term: false
  1191. kernel_size: 1
  1192. weight_filler {
  1193. type: "msra"
  1194. }
  1195. }
  1196. }
  1197. layer {
  1198. name: "conv8/bn"
  1199. type: "BatchNorm"
  1200. bottom: "conv8"
  1201. top: "conv8"
  1202. param {
  1203. lr_mult: 0
  1204. decay_mult: 0
  1205. }
  1206. param {
  1207. lr_mult: 0
  1208. decay_mult: 0
  1209. }
  1210. param {
  1211. lr_mult: 0
  1212. decay_mult: 0
  1213. }
  1214. }
  1215. layer {
  1216. name: "conv8/scale"
  1217. type: "Scale"
  1218. bottom: "conv8"
  1219. top: "conv8"
  1220. param {
  1221. lr_mult: 0.1
  1222. decay_mult: 0.0
  1223. }
  1224. param {
  1225. lr_mult: 0.2
  1226. decay_mult: 0.0
  1227. }
  1228. scale_param {
  1229. filler {
  1230. value: 1
  1231. }
  1232. bias_term: true
  1233. bias_filler {
  1234. value: 0
  1235. }
  1236. }
  1237. }
  1238. layer {
  1239. name: "conv8/relu"
  1240. type: "ReLU"
  1241. bottom: "conv8"
  1242. top: "conv8"
  1243. }
  1244. layer {
  1245. name: "conv9/dw"
  1246. type: "Convolution"
  1247. bottom: "conv8"
  1248. top: "conv9/dw"
  1249. param {
  1250. lr_mult: 0.1
  1251. decay_mult: 0.1
  1252. }
  1253. convolution_param {
  1254. num_output: 512
  1255. bias_term: false
  1256. pad: 1
  1257. kernel_size: 3
  1258. group: 512
  1259. engine: CAFFE
  1260. weight_filler {
  1261. type: "msra"
  1262. }
  1263. }
  1264. }
  1265. layer {
  1266. name: "conv9/dw/bn"
  1267. type: "BatchNorm"
  1268. bottom: "conv9/dw"
  1269. top: "conv9/dw"
  1270. param {
  1271. lr_mult: 0
  1272. decay_mult: 0
  1273. }
  1274. param {
  1275. lr_mult: 0
  1276. decay_mult: 0
  1277. }
  1278. param {
  1279. lr_mult: 0
  1280. decay_mult: 0
  1281. }
  1282. }
  1283. layer {
  1284. name: "conv9/dw/scale"
  1285. type: "Scale"
  1286. bottom: "conv9/dw"
  1287. top: "conv9/dw"
  1288. param {
  1289. lr_mult: 0.1
  1290. decay_mult: 0.0
  1291. }
  1292. param {
  1293. lr_mult: 0.2
  1294. decay_mult: 0.0
  1295. }
  1296. scale_param {
  1297. filler {
  1298. value: 1
  1299. }
  1300. bias_term: true
  1301. bias_filler {
  1302. value: 0
  1303. }
  1304. }
  1305. }
  1306. layer {
  1307. name: "conv9/dw/relu"
  1308. type: "ReLU"
  1309. bottom: "conv9/dw"
  1310. top: "conv9/dw"
  1311. }
  1312. layer {
  1313. name: "conv9"
  1314. type: "Convolution"
  1315. bottom: "conv9/dw"
  1316. top: "conv9"
  1317. param {
  1318. lr_mult: 0.1
  1319. decay_mult: 0.1
  1320. }
  1321. convolution_param {
  1322. num_output: 512
  1323. bias_term: false
  1324. kernel_size: 1
  1325. weight_filler {
  1326. type: "msra"
  1327. }
  1328. }
  1329. }
  1330. layer {
  1331. name: "conv9/bn"
  1332. type: "BatchNorm"
  1333. bottom: "conv9"
  1334. top: "conv9"
  1335. param {
  1336. lr_mult: 0
  1337. decay_mult: 0
  1338. }
  1339. param {
  1340. lr_mult: 0
  1341. decay_mult: 0
  1342. }
  1343. param {
  1344. lr_mult: 0
  1345. decay_mult: 0
  1346. }
  1347. }
  1348. layer {
  1349. name: "conv9/scale"
  1350. type: "Scale"
  1351. bottom: "conv9"
  1352. top: "conv9"
  1353. param {
  1354. lr_mult: 0.1
  1355. decay_mult: 0.0
  1356. }
  1357. param {
  1358. lr_mult: 0.2
  1359. decay_mult: 0.0
  1360. }
  1361. scale_param {
  1362. filler {
  1363. value: 1
  1364. }
  1365. bias_term: true
  1366. bias_filler {
  1367. value: 0
  1368. }
  1369. }
  1370. }
  1371. layer {
  1372. name: "conv9/relu"
  1373. type: "ReLU"
  1374. bottom: "conv9"
  1375. top: "conv9"
  1376. }
  1377. layer {
  1378. name: "conv10/dw"
  1379. type: "Convolution"
  1380. bottom: "conv9"
  1381. top: "conv10/dw"
  1382. param {
  1383. lr_mult: 0.1
  1384. decay_mult: 0.1
  1385. }
  1386. convolution_param {
  1387. num_output: 512
  1388. bias_term: false
  1389. pad: 1
  1390. kernel_size: 3
  1391. group: 512
  1392. engine: CAFFE
  1393. weight_filler {
  1394. type: "msra"
  1395. }
  1396. }
  1397. }
  1398. layer {
  1399. name: "conv10/dw/bn"
  1400. type: "BatchNorm"
  1401. bottom: "conv10/dw"
  1402. top: "conv10/dw"
  1403. param {
  1404. lr_mult: 0
  1405. decay_mult: 0
  1406. }
  1407. param {
  1408. lr_mult: 0
  1409. decay_mult: 0
  1410. }
  1411. param {
  1412. lr_mult: 0
  1413. decay_mult: 0
  1414. }
  1415. }
  1416. layer {
  1417. name: "conv10/dw/scale"
  1418. type: "Scale"
  1419. bottom: "conv10/dw"
  1420. top: "conv10/dw"
  1421. param {
  1422. lr_mult: 0.1
  1423. decay_mult: 0.0
  1424. }
  1425. param {
  1426. lr_mult: 0.2
  1427. decay_mult: 0.0
  1428. }
  1429. scale_param {
  1430. filler {
  1431. value: 1
  1432. }
  1433. bias_term: true
  1434. bias_filler {
  1435. value: 0
  1436. }
  1437. }
  1438. }
  1439. layer {
  1440. name: "conv10/dw/relu"
  1441. type: "ReLU"
  1442. bottom: "conv10/dw"
  1443. top: "conv10/dw"
  1444. }
  1445. layer {
  1446. name: "conv10"
  1447. type: "Convolution"
  1448. bottom: "conv10/dw"
  1449. top: "conv10"
  1450. param {
  1451. lr_mult: 0.1
  1452. decay_mult: 0.1
  1453. }
  1454. convolution_param {
  1455. num_output: 512
  1456. bias_term: false
  1457. kernel_size: 1
  1458. weight_filler {
  1459. type: "msra"
  1460. }
  1461. }
  1462. }
  1463. layer {
  1464. name: "conv10/bn"
  1465. type: "BatchNorm"
  1466. bottom: "conv10"
  1467. top: "conv10"
  1468. param {
  1469. lr_mult: 0
  1470. decay_mult: 0
  1471. }
  1472. param {
  1473. lr_mult: 0
  1474. decay_mult: 0
  1475. }
  1476. param {
  1477. lr_mult: 0
  1478. decay_mult: 0
  1479. }
  1480. }
  1481. layer {
  1482. name: "conv10/scale"
  1483. type: "Scale"
  1484. bottom: "conv10"
  1485. top: "conv10"
  1486. param {
  1487. lr_mult: 0.1
  1488. decay_mult: 0.0
  1489. }
  1490. param {
  1491. lr_mult: 0.2
  1492. decay_mult: 0.0
  1493. }
  1494. scale_param {
  1495. filler {
  1496. value: 1
  1497. }
  1498. bias_term: true
  1499. bias_filler {
  1500. value: 0
  1501. }
  1502. }
  1503. }
  1504. layer {
  1505. name: "conv10/relu"
  1506. type: "ReLU"
  1507. bottom: "conv10"
  1508. top: "conv10"
  1509. }
  1510. layer {
  1511. name: "conv11/dw"
  1512. type: "Convolution"
  1513. bottom: "conv10"
  1514. top: "conv11/dw"
  1515. param {
  1516. lr_mult: 0.1
  1517. decay_mult: 0.1
  1518. }
  1519. convolution_param {
  1520. num_output: 512
  1521. bias_term: false
  1522. pad: 1
  1523. kernel_size: 3
  1524. group: 512
  1525. engine: CAFFE
  1526. weight_filler {
  1527. type: "msra"
  1528. }
  1529. }
  1530. }
  1531. layer {
  1532. name: "conv11/dw/bn"
  1533. type: "BatchNorm"
  1534. bottom: "conv11/dw"
  1535. top: "conv11/dw"
  1536. param {
  1537. lr_mult: 0
  1538. decay_mult: 0
  1539. }
  1540. param {
  1541. lr_mult: 0
  1542. decay_mult: 0
  1543. }
  1544. param {
  1545. lr_mult: 0
  1546. decay_mult: 0
  1547. }
  1548. }
  1549. layer {
  1550. name: "conv11/dw/scale"
  1551. type: "Scale"
  1552. bottom: "conv11/dw"
  1553. top: "conv11/dw"
  1554. param {
  1555. lr_mult: 0.1
  1556. decay_mult: 0.0
  1557. }
  1558. param {
  1559. lr_mult: 0.2
  1560. decay_mult: 0.0
  1561. }
  1562. scale_param {
  1563. filler {
  1564. value: 1
  1565. }
  1566. bias_term: true
  1567. bias_filler {
  1568. value: 0
  1569. }
  1570. }
  1571. }
  1572. layer {
  1573. name: "conv11/dw/relu"
  1574. type: "ReLU"
  1575. bottom: "conv11/dw"
  1576. top: "conv11/dw"
  1577. }
  1578. layer {
  1579. name: "conv11"
  1580. type: "Convolution"
  1581. bottom: "conv11/dw"
  1582. top: "conv11"
  1583. param {
  1584. lr_mult: 0.1
  1585. decay_mult: 0.1
  1586. }
  1587. convolution_param {
  1588. num_output: 512
  1589. bias_term: false
  1590. kernel_size: 1
  1591. weight_filler {
  1592. type: "msra"
  1593. }
  1594. }
  1595. }
  1596. layer {
  1597. name: "conv11/bn"
  1598. type: "BatchNorm"
  1599. bottom: "conv11"
  1600. top: "conv11"
  1601. param {
  1602. lr_mult: 0
  1603. decay_mult: 0
  1604. }
  1605. param {
  1606. lr_mult: 0
  1607. decay_mult: 0
  1608. }
  1609. param {
  1610. lr_mult: 0
  1611. decay_mult: 0
  1612. }
  1613. }
  1614. layer {
  1615. name: "conv11/scale"
  1616. type: "Scale"
  1617. bottom: "conv11"
  1618. top: "conv11"
  1619. param {
  1620. lr_mult: 0.1
  1621. decay_mult: 0.0
  1622. }
  1623. param {
  1624. lr_mult: 0.2
  1625. decay_mult: 0.0
  1626. }
  1627. scale_param {
  1628. filler {
  1629. value: 1
  1630. }
  1631. bias_term: true
  1632. bias_filler {
  1633. value: 0
  1634. }
  1635. }
  1636. }
  1637. layer {
  1638. name: "conv11/relu"
  1639. type: "ReLU"
  1640. bottom: "conv11"
  1641. top: "conv11"
  1642. }
  1643. layer {
  1644. name: "conv12/dw"
  1645. type: "Convolution"
  1646. bottom: "conv11"
  1647. top: "conv12/dw"
  1648. param {
  1649. lr_mult: 0.1
  1650. decay_mult: 0.1
  1651. }
  1652. convolution_param {
  1653. num_output: 512
  1654. bias_term: false
  1655. pad: 1
  1656. kernel_size: 3
  1657. stride: 2
  1658. group: 512
  1659. engine: CAFFE
  1660. weight_filler {
  1661. type: "msra"
  1662. }
  1663. }
  1664. }
  1665. layer {
  1666. name: "conv12/dw/bn"
  1667. type: "BatchNorm"
  1668. bottom: "conv12/dw"
  1669. top: "conv12/dw"
  1670. param {
  1671. lr_mult: 0
  1672. decay_mult: 0
  1673. }
  1674. param {
  1675. lr_mult: 0
  1676. decay_mult: 0
  1677. }
  1678. param {
  1679. lr_mult: 0
  1680. decay_mult: 0
  1681. }
  1682. }
  1683. layer {
  1684. name: "conv12/dw/scale"
  1685. type: "Scale"
  1686. bottom: "conv12/dw"
  1687. top: "conv12/dw"
  1688. param {
  1689. lr_mult: 0.1
  1690. decay_mult: 0.0
  1691. }
  1692. param {
  1693. lr_mult: 0.2
  1694. decay_mult: 0.0
  1695. }
  1696. scale_param {
  1697. filler {
  1698. value: 1
  1699. }
  1700. bias_term: true
  1701. bias_filler {
  1702. value: 0
  1703. }
  1704. }
  1705. }
  1706. layer {
  1707. name: "conv12/dw/relu"
  1708. type: "ReLU"
  1709. bottom: "conv12/dw"
  1710. top: "conv12/dw"
  1711. }
  1712. layer {
  1713. name: "conv12"
  1714. type: "Convolution"
  1715. bottom: "conv12/dw"
  1716. top: "conv12"
  1717. param {
  1718. lr_mult: 0.1
  1719. decay_mult: 0.1
  1720. }
  1721. convolution_param {
  1722. num_output: 1024
  1723. bias_term: false
  1724. kernel_size: 1
  1725. weight_filler {
  1726. type: "msra"
  1727. }
  1728. }
  1729. }
  1730. layer {
  1731. name: "conv12/bn"
  1732. type: "BatchNorm"
  1733. bottom: "conv12"
  1734. top: "conv12"
  1735. param {
  1736. lr_mult: 0
  1737. decay_mult: 0
  1738. }
  1739. param {
  1740. lr_mult: 0
  1741. decay_mult: 0
  1742. }
  1743. param {
  1744. lr_mult: 0
  1745. decay_mult: 0
  1746. }
  1747. }
  1748. layer {
  1749. name: "conv12/scale"
  1750. type: "Scale"
  1751. bottom: "conv12"
  1752. top: "conv12"
  1753. param {
  1754. lr_mult: 0.1
  1755. decay_mult: 0.0
  1756. }
  1757. param {
  1758. lr_mult: 0.2
  1759. decay_mult: 0.0
  1760. }
  1761. scale_param {
  1762. filler {
  1763. value: 1
  1764. }
  1765. bias_term: true
  1766. bias_filler {
  1767. value: 0
  1768. }
  1769. }
  1770. }
  1771. layer {
  1772. name: "conv12/relu"
  1773. type: "ReLU"
  1774. bottom: "conv12"
  1775. top: "conv12"
  1776. }
  1777. layer {
  1778. name: "conv13/dw"
  1779. type: "Convolution"
  1780. bottom: "conv12"
  1781. top: "conv13/dw"
  1782. param {
  1783. lr_mult: 0.1
  1784. decay_mult: 0.1
  1785. }
  1786. convolution_param {
  1787. num_output: 1024
  1788. bias_term: false
  1789. pad: 1
  1790. kernel_size: 3
  1791. group: 1024
  1792. engine: CAFFE
  1793. weight_filler {
  1794. type: "msra"
  1795. }
  1796. }
  1797. }
  1798. layer {
  1799. name: "conv13/dw/bn"
  1800. type: "BatchNorm"
  1801. bottom: "conv13/dw"
  1802. top: "conv13/dw"
  1803. param {
  1804. lr_mult: 0
  1805. decay_mult: 0
  1806. }
  1807. param {
  1808. lr_mult: 0
  1809. decay_mult: 0
  1810. }
  1811. param {
  1812. lr_mult: 0
  1813. decay_mult: 0
  1814. }
  1815. }
  1816. layer {
  1817. name: "conv13/dw/scale"
  1818. type: "Scale"
  1819. bottom: "conv13/dw"
  1820. top: "conv13/dw"
  1821. param {
  1822. lr_mult: 0.1
  1823. decay_mult: 0.0
  1824. }
  1825. param {
  1826. lr_mult: 0.2
  1827. decay_mult: 0.0
  1828. }
  1829. scale_param {
  1830. filler {
  1831. value: 1
  1832. }
  1833. bias_term: true
  1834. bias_filler {
  1835. value: 0
  1836. }
  1837. }
  1838. }
  1839. layer {
  1840. name: "conv13/dw/relu"
  1841. type: "ReLU"
  1842. bottom: "conv13/dw"
  1843. top: "conv13/dw"
  1844. }
  1845. layer {
  1846. name: "conv13"
  1847. type: "Convolution"
  1848. bottom: "conv13/dw"
  1849. top: "conv13"
  1850. param {
  1851. lr_mult: 0.1
  1852. decay_mult: 0.1
  1853. }
  1854. convolution_param {
  1855. num_output: 1024
  1856. bias_term: false
  1857. kernel_size: 1
  1858. weight_filler {
  1859. type: "msra"
  1860. }
  1861. }
  1862. }
  1863. layer {
  1864. name: "conv13/bn"
  1865. type: "BatchNorm"
  1866. bottom: "conv13"
  1867. top: "conv13"
  1868. param {
  1869. lr_mult: 0
  1870. decay_mult: 0
  1871. }
  1872. param {
  1873. lr_mult: 0
  1874. decay_mult: 0
  1875. }
  1876. param {
  1877. lr_mult: 0
  1878. decay_mult: 0
  1879. }
  1880. }
  1881. layer {
  1882. name: "conv13/scale"
  1883. type: "Scale"
  1884. bottom: "conv13"
  1885. top: "conv13"
  1886. param {
  1887. lr_mult: 0.1
  1888. decay_mult: 0.0
  1889. }
  1890. param {
  1891. lr_mult: 0.2
  1892. decay_mult: 0.0
  1893. }
  1894. scale_param {
  1895. filler {
  1896. value: 1
  1897. }
  1898. bias_term: true
  1899. bias_filler {
  1900. value: 0
  1901. }
  1902. }
  1903. }
  1904. layer {
  1905. name: "conv13/relu"
  1906. type: "ReLU"
  1907. bottom: "conv13"
  1908. top: "conv13"
  1909. }
  1910. layer {
  1911. name: "conv14_1"
  1912. type: "Convolution"
  1913. bottom: "conv13"
  1914. top: "conv14_1"
  1915. param {
  1916. lr_mult: 0.1
  1917. decay_mult: 0.1
  1918. }
  1919. convolution_param {
  1920. num_output: 256
  1921. bias_term: false
  1922. kernel_size: 1
  1923. weight_filler {
  1924. type: "msra"
  1925. }
  1926. }
  1927. }
  1928. layer {
  1929. name: "conv14_1/bn"
  1930. type: "BatchNorm"
  1931. bottom: "conv14_1"
  1932. top: "conv14_1"
  1933. param {
  1934. lr_mult: 0
  1935. decay_mult: 0
  1936. }
  1937. param {
  1938. lr_mult: 0
  1939. decay_mult: 0
  1940. }
  1941. param {
  1942. lr_mult: 0
  1943. decay_mult: 0
  1944. }
  1945. }
  1946. layer {
  1947. name: "conv14_1/scale"
  1948. type: "Scale"
  1949. bottom: "conv14_1"
  1950. top: "conv14_1"
  1951. param {
  1952. lr_mult: 0.1
  1953. decay_mult: 0.0
  1954. }
  1955. param {
  1956. lr_mult: 0.2
  1957. decay_mult: 0.0
  1958. }
  1959. scale_param {
  1960. filler {
  1961. value: 1
  1962. }
  1963. bias_term: true
  1964. bias_filler {
  1965. value: 0
  1966. }
  1967. }
  1968. }
  1969. layer {
  1970. name: "conv14_1/relu"
  1971. type: "ReLU"
  1972. bottom: "conv14_1"
  1973. top: "conv14_1"
  1974. }
  1975. layer {
  1976. name: "conv14_2"
  1977. type: "Convolution"
  1978. bottom: "conv14_1"
  1979. top: "conv14_2"
  1980. param {
  1981. lr_mult: 0.1
  1982. decay_mult: 0.1
  1983. }
  1984. convolution_param {
  1985. num_output: 512
  1986. bias_term: false
  1987. pad: 1
  1988. kernel_size: 3
  1989. stride: 2
  1990. weight_filler {
  1991. type: "msra"
  1992. }
  1993. }
  1994. }
  1995. layer {
  1996. name: "conv14_2/bn"
  1997. type: "BatchNorm"
  1998. bottom: "conv14_2"
  1999. top: "conv14_2"
  2000. param {
  2001. lr_mult: 0
  2002. decay_mult: 0
  2003. }
  2004. param {
  2005. lr_mult: 0
  2006. decay_mult: 0
  2007. }
  2008. param {
  2009. lr_mult: 0
  2010. decay_mult: 0
  2011. }
  2012. }
  2013. layer {
  2014. name: "conv14_2/scale"
  2015. type: "Scale"
  2016. bottom: "conv14_2"
  2017. top: "conv14_2"
  2018. param {
  2019. lr_mult: 0.1
  2020. decay_mult: 0.0
  2021. }
  2022. param {
  2023. lr_mult: 0.2
  2024. decay_mult: 0.0
  2025. }
  2026. scale_param {
  2027. filler {
  2028. value: 1
  2029. }
  2030. bias_term: true
  2031. bias_filler {
  2032. value: 0
  2033. }
  2034. }
  2035. }
  2036. layer {
  2037. name: "conv14_2/relu"
  2038. type: "ReLU"
  2039. bottom: "conv14_2"
  2040. top: "conv14_2"
  2041. }
  2042. layer {
  2043. name: "conv15_1"
  2044. type: "Convolution"
  2045. bottom: "conv14_2"
  2046. top: "conv15_1"
  2047. param {
  2048. lr_mult: 0.1
  2049. decay_mult: 0.1
  2050. }
  2051. convolution_param {
  2052. num_output: 128
  2053. bias_term: false
  2054. kernel_size: 1
  2055. weight_filler {
  2056. type: "msra"
  2057. }
  2058. }
  2059. }
  2060. layer {
  2061. name: "conv15_1/bn"
  2062. type: "BatchNorm"
  2063. bottom: "conv15_1"
  2064. top: "conv15_1"
  2065. param {
  2066. lr_mult: 0
  2067. decay_mult: 0
  2068. }
  2069. param {
  2070. lr_mult: 0
  2071. decay_mult: 0
  2072. }
  2073. param {
  2074. lr_mult: 0
  2075. decay_mult: 0
  2076. }
  2077. }
  2078. layer {
  2079. name: "conv15_1/scale"
  2080. type: "Scale"
  2081. bottom: "conv15_1"
  2082. top: "conv15_1"
  2083. param {
  2084. lr_mult: 0.1
  2085. decay_mult: 0.0
  2086. }
  2087. param {
  2088. lr_mult: 0.2
  2089. decay_mult: 0.0
  2090. }
  2091. scale_param {
  2092. filler {
  2093. value: 1
  2094. }
  2095. bias_term: true
  2096. bias_filler {
  2097. value: 0
  2098. }
  2099. }
  2100. }
  2101. layer {
  2102. name: "conv15_1/relu"
  2103. type: "ReLU"
  2104. bottom: "conv15_1"
  2105. top: "conv15_1"
  2106. }
  2107. layer {
  2108. name: "conv15_2"
  2109. type: "Convolution"
  2110. bottom: "conv15_1"
  2111. top: "conv15_2"
  2112. param {
  2113. lr_mult: 0.1
  2114. decay_mult: 0.1
  2115. }
  2116. convolution_param {
  2117. num_output: 256
  2118. bias_term: false
  2119. pad: 1
  2120. kernel_size: 3
  2121. stride: 2
  2122. weight_filler {
  2123. type: "msra"
  2124. }
  2125. }
  2126. }
  2127. layer {
  2128. name: "conv15_2/bn"
  2129. type: "BatchNorm"
  2130. bottom: "conv15_2"
  2131. top: "conv15_2"
  2132. param {
  2133. lr_mult: 0
  2134. decay_mult: 0
  2135. }
  2136. param {
  2137. lr_mult: 0
  2138. decay_mult: 0
  2139. }
  2140. param {
  2141. lr_mult: 0
  2142. decay_mult: 0
  2143. }
  2144. }
  2145. layer {
  2146. name: "conv15_2/scale"
  2147. type: "Scale"
  2148. bottom: "conv15_2"
  2149. top: "conv15_2"
  2150. param {
  2151. lr_mult: 0.1
  2152. decay_mult: 0.0
  2153. }
  2154. param {
  2155. lr_mult: 0.2
  2156. decay_mult: 0.0
  2157. }
  2158. scale_param {
  2159. filler {
  2160. value: 1
  2161. }
  2162. bias_term: true
  2163. bias_filler {
  2164. value: 0
  2165. }
  2166. }
  2167. }
  2168. layer {
  2169. name: "conv15_2/relu"
  2170. type: "ReLU"
  2171. bottom: "conv15_2"
  2172. top: "conv15_2"
  2173. }
  2174. layer {
  2175. name: "conv11_mbox_loc_0929"
  2176. type: "Convolution"
  2177. bottom: "conv11"
  2178. top: "conv11_mbox_loc"
  2179. param {
  2180. lr_mult: 0.1
  2181. decay_mult: 0.1
  2182. }
  2183. param {
  2184. lr_mult: 0.2
  2185. decay_mult: 0.0
  2186. }
  2187. convolution_param {
  2188. num_output: 12
  2189. kernel_size: 1
  2190. weight_filler {
  2191. type: "msra"
  2192. }
  2193. bias_filler {
  2194. type: "constant"
  2195. value: 0.0
  2196. }
  2197. }
  2198. }
  2199. layer {
  2200. name: "conv11_mbox_loc_perm"
  2201. type: "Permute"
  2202. bottom: "conv11_mbox_loc"
  2203. top: "conv11_mbox_loc_perm"
  2204. permute_param {
  2205. order: 0
  2206. order: 2
  2207. order: 3
  2208. order: 1
  2209. }
  2210. }
  2211. layer {
  2212. name: "conv11_mbox_loc_flat"
  2213. type: "Flatten"
  2214. bottom: "conv11_mbox_loc_perm"
  2215. top: "conv11_mbox_loc_flat"
  2216. flatten_param {
  2217. axis: 1
  2218. }
  2219. }
  2220. layer {
  2221. name: "conv11_mbox_conf_new_0929"
  2222. type: "Convolution"
  2223. bottom: "conv11"
  2224. top: "conv11_mbox_conf"
  2225. param {
  2226. lr_mult: 1.0
  2227. decay_mult: 1.0
  2228. }
  2229. param {
  2230. lr_mult: 2.0
  2231. decay_mult: 0.0
  2232. }
  2233. convolution_param {
  2234. num_output: 12
  2235. kernel_size: 1
  2236. weight_filler {
  2237. type: "msra"
  2238. }
  2239. bias_filler {
  2240. type: "constant"
  2241. value: 0.0
  2242. }
  2243. }
  2244. }
  2245. layer {
  2246. name: "conv11_mbox_conf_perm"
  2247. type: "Permute"
  2248. bottom: "conv11_mbox_conf"
  2249. top: "conv11_mbox_conf_perm"
  2250. permute_param {
  2251. order: 0
  2252. order: 2
  2253. order: 3
  2254. order: 1
  2255. }
  2256. }
  2257. layer {
  2258. name: "conv11_mbox_conf_flat"
  2259. type: "Flatten"
  2260. bottom: "conv11_mbox_conf_perm"
  2261. top: "conv11_mbox_conf_flat"
  2262. flatten_param {
  2263. axis: 1
  2264. }
  2265. }
  2266. layer {
  2267. name: "conv11_mbox_priorbox"
  2268. type: "PriorBox"
  2269. bottom: "conv11"
  2270. bottom: "data"
  2271. top: "conv11_mbox_priorbox"
  2272. prior_box_param {
  2273. min_size: 6.0
  2274. aspect_ratio: 4.0
  2275. flip: true
  2276. clip: false
  2277. variance: 0.1
  2278. variance: 0.1
  2279. variance: 0.2
  2280. variance: 0.2
  2281. offset: 0.5
  2282. }
  2283. }
  2284. layer {
  2285. name: "conv13_mbox_loc_0929"
  2286. type: "Convolution"
  2287. bottom: "conv13"
  2288. top: "conv13_mbox_loc"
  2289. param {
  2290. lr_mult: 0.1
  2291. decay_mult: 0.1
  2292. }
  2293. param {
  2294. lr_mult: 0.2
  2295. decay_mult: 0.0
  2296. }
  2297. convolution_param {
  2298. num_output: 16
  2299. kernel_size: 1
  2300. weight_filler {
  2301. type: "msra"
  2302. }
  2303. bias_filler {
  2304. type: "constant"
  2305. value: 0.0
  2306. }
  2307. }
  2308. }
  2309. layer {
  2310. name: "conv13_mbox_loc_perm"
  2311. type: "Permute"
  2312. bottom: "conv13_mbox_loc"
  2313. top: "conv13_mbox_loc_perm"
  2314. permute_param {
  2315. order: 0
  2316. order: 2
  2317. order: 3
  2318. order: 1
  2319. }
  2320. }
  2321. layer {
  2322. name: "conv13_mbox_loc_flat"
  2323. type: "Flatten"
  2324. bottom: "conv13_mbox_loc_perm"
  2325. top: "conv13_mbox_loc_flat"
  2326. flatten_param {
  2327. axis: 1
  2328. }
  2329. }
  2330. layer {
  2331. name: "conv13_mbox_conf_new_0929"
  2332. type: "Convolution"
  2333. bottom: "conv13"
  2334. top: "conv13_mbox_conf"
  2335. param {
  2336. lr_mult: 1.0
  2337. decay_mult: 1.0
  2338. }
  2339. param {
  2340. lr_mult: 2.0
  2341. decay_mult: 0.0
  2342. }
  2343. convolution_param {
  2344. num_output: 16
  2345. kernel_size: 1
  2346. weight_filler {
  2347. type: "msra"
  2348. }
  2349. bias_filler {
  2350. type: "constant"
  2351. value: 0.0
  2352. }
  2353. }
  2354. }
  2355. layer {
  2356. name: "conv13_mbox_conf_perm"
  2357. type: "Permute"
  2358. bottom: "conv13_mbox_conf"
  2359. top: "conv13_mbox_conf_perm"
  2360. permute_param {
  2361. order: 0
  2362. order: 2
  2363. order: 3
  2364. order: 1
  2365. }
  2366. }
  2367. layer {
  2368. name: "conv13_mbox_conf_flat"
  2369. type: "Flatten"
  2370. bottom: "conv13_mbox_conf_perm"
  2371. top: "conv13_mbox_conf_flat"
  2372. flatten_param {
  2373. axis: 1
  2374. }
  2375. }
  2376. layer {
  2377. name: "conv13_mbox_priorbox"
  2378. type: "PriorBox"
  2379. bottom: "conv13"
  2380. bottom: "data"
  2381. top: "conv13_mbox_priorbox"
  2382. prior_box_param {
  2383. min_size: 30.0
  2384. max_size: 55.0
  2385. aspect_ratio: 4.0
  2386. flip: true
  2387. clip: false
  2388. variance: 0.1
  2389. variance: 0.1
  2390. variance: 0.2
  2391. variance: 0.2
  2392. offset: 0.5
  2393. }
  2394. }
  2395. layer {
  2396. name: "conv14_2_mbox_loc_0929"
  2397. type: "Convolution"
  2398. bottom: "conv14_2"
  2399. top: "conv14_2_mbox_loc"
  2400. param {
  2401. lr_mult: 0.1
  2402. decay_mult: 0.1
  2403. }
  2404. param {
  2405. lr_mult: 0.2
  2406. decay_mult: 0.0
  2407. }
  2408. convolution_param {
  2409. num_output: 16
  2410. kernel_size: 1
  2411. weight_filler {
  2412. type: "msra"
  2413. }
  2414. bias_filler {
  2415. type: "constant"
  2416. value: 0.0
  2417. }
  2418. }
  2419. }
  2420. layer {
  2421. name: "conv14_2_mbox_loc_perm"
  2422. type: "Permute"
  2423. bottom: "conv14_2_mbox_loc"
  2424. top: "conv14_2_mbox_loc_perm"
  2425. permute_param {
  2426. order: 0
  2427. order: 2
  2428. order: 3
  2429. order: 1
  2430. }
  2431. }
  2432. layer {
  2433. name: "conv14_2_mbox_loc_flat"
  2434. type: "Flatten"
  2435. bottom: "conv14_2_mbox_loc_perm"
  2436. top: "conv14_2_mbox_loc_flat"
  2437. flatten_param {
  2438. axis: 1
  2439. }
  2440. }
  2441. layer {
  2442. name: "conv14_2_mbox_conf_new_0929"
  2443. type: "Convolution"
  2444. bottom: "conv14_2"
  2445. top: "conv14_2_mbox_conf"
  2446. param {
  2447. lr_mult: 1.0
  2448. decay_mult: 1.0
  2449. }
  2450. param {
  2451. lr_mult: 2.0
  2452. decay_mult: 0.0
  2453. }
  2454. convolution_param {
  2455. num_output: 16
  2456. kernel_size: 1
  2457. weight_filler {
  2458. type: "msra"
  2459. }
  2460. bias_filler {
  2461. type: "constant"
  2462. value: 0.0
  2463. }
  2464. }
  2465. }
  2466. layer {
  2467. name: "conv14_2_mbox_conf_perm"
  2468. type: "Permute"
  2469. bottom: "conv14_2_mbox_conf"
  2470. top: "conv14_2_mbox_conf_perm"
  2471. permute_param {
  2472. order: 0
  2473. order: 2
  2474. order: 3
  2475. order: 1
  2476. }
  2477. }
  2478. layer {
  2479. name: "conv14_2_mbox_conf_flat"
  2480. type: "Flatten"
  2481. bottom: "conv14_2_mbox_conf_perm"
  2482. top: "conv14_2_mbox_conf_flat"
  2483. flatten_param {
  2484. axis: 1
  2485. }
  2486. }
  2487. layer {
  2488. name: "conv14_2_mbox_priorbox"
  2489. type: "PriorBox"
  2490. bottom: "conv14_2"
  2491. bottom: "data"
  2492. top: "conv14_2_mbox_priorbox"
  2493. prior_box_param {
  2494. min_size: 55.0
  2495. max_size: 79.0
  2496. aspect_ratio: 4.0
  2497. flip: true
  2498. clip: false
  2499. variance: 0.1
  2500. variance: 0.1
  2501. variance: 0.2
  2502. variance: 0.2
  2503. offset: 0.5
  2504. }
  2505. }
  2506. layer {
  2507. name: "conv15_2_mbox_loc_0929"
  2508. type: "Convolution"
  2509. bottom: "conv15_2"
  2510. top: "conv15_2_mbox_loc"
  2511. param {
  2512. lr_mult: 0.1
  2513. decay_mult: 0.1
  2514. }
  2515. param {
  2516. lr_mult: 0.2
  2517. decay_mult: 0.0
  2518. }
  2519. convolution_param {
  2520. num_output: 16
  2521. kernel_size: 1
  2522. weight_filler {
  2523. type: "msra"
  2524. }
  2525. bias_filler {
  2526. type: "constant"
  2527. value: 0.0
  2528. }
  2529. }
  2530. }
  2531. layer {
  2532. name: "conv15_2_mbox_loc_perm"
  2533. type: "Permute"
  2534. bottom: "conv15_2_mbox_loc"
  2535. top: "conv15_2_mbox_loc_perm"
  2536. permute_param {
  2537. order: 0
  2538. order: 2
  2539. order: 3
  2540. order: 1
  2541. }
  2542. }
  2543. layer {
  2544. name: "conv15_2_mbox_loc_flat"
  2545. type: "Flatten"
  2546. bottom: "conv15_2_mbox_loc_perm"
  2547. top: "conv15_2_mbox_loc_flat"
  2548. flatten_param {
  2549. axis: 1
  2550. }
  2551. }
  2552. layer {
  2553. name: "conv15_2_mbox_conf_new_0929"
  2554. type: "Convolution"
  2555. bottom: "conv15_2"
  2556. top: "conv15_2_mbox_conf"
  2557. param {
  2558. lr_mult: 1.0
  2559. decay_mult: 1.0
  2560. }
  2561. param {
  2562. lr_mult: 2.0
  2563. decay_mult: 0.0
  2564. }
  2565. convolution_param {
  2566. num_output: 16
  2567. kernel_size: 1
  2568. weight_filler {
  2569. type: "msra"
  2570. }
  2571. bias_filler {
  2572. type: "constant"
  2573. value: 0.0
  2574. }
  2575. }
  2576. }
  2577. layer {
  2578. name: "conv15_2_mbox_conf_perm"
  2579. type: "Permute"
  2580. bottom: "conv15_2_mbox_conf"
  2581. top: "conv15_2_mbox_conf_perm"
  2582. permute_param {
  2583. order: 0
  2584. order: 2
  2585. order: 3
  2586. order: 1
  2587. }
  2588. }
  2589. layer {
  2590. name: "conv15_2_mbox_conf_flat"
  2591. type: "Flatten"
  2592. bottom: "conv15_2_mbox_conf_perm"
  2593. top: "conv15_2_mbox_conf_flat"
  2594. flatten_param {
  2595. axis: 1
  2596. }
  2597. }
  2598. layer {
  2599. name: "conv15_2_mbox_priorbox"
  2600. type: "PriorBox"
  2601. bottom: "conv15_2"
  2602. bottom: "data"
  2603. top: "conv15_2_mbox_priorbox"
  2604. prior_box_param {
  2605. min_size: 79.0
  2606. max_size: 103.0
  2607. aspect_ratio: 4.0
  2608. flip: true
  2609. clip: false
  2610. variance: 0.1
  2611. variance: 0.1
  2612. variance: 0.2
  2613. variance: 0.2
  2614. offset: 0.5
  2615. }
  2616. }
  2617. layer {
  2618. name: "mbox_loc"
  2619. type: "Concat"
  2620. bottom: "conv11_mbox_loc_flat"
  2621. bottom: "conv13_mbox_loc_flat"
  2622. bottom: "conv14_2_mbox_loc_flat"
  2623. bottom: "conv15_2_mbox_loc_flat"
  2624. top: "mbox_loc"
  2625. concat_param {
  2626. axis: 1
  2627. }
  2628. }
  2629. layer {
  2630. name: "mbox_conf"
  2631. type: "Concat"
  2632. bottom: "conv11_mbox_conf_flat"
  2633. bottom: "conv13_mbox_conf_flat"
  2634. bottom: "conv14_2_mbox_conf_flat"
  2635. bottom: "conv15_2_mbox_conf_flat"
  2636. top: "mbox_conf"
  2637. concat_param {
  2638. axis: 1
  2639. }
  2640. }
  2641. layer {
  2642. name: "mbox_priorbox"
  2643. type: "Concat"
  2644. bottom: "conv11_mbox_priorbox"
  2645. bottom: "conv13_mbox_priorbox"
  2646. bottom: "conv14_2_mbox_priorbox"
  2647. bottom: "conv15_2_mbox_priorbox"
  2648. top: "mbox_priorbox"
  2649. concat_param {
  2650. axis: 2
  2651. }
  2652. }
  2653. layer {
  2654. name: "mbox_loss"
  2655. type: "MultiBoxLoss"
  2656. bottom: "mbox_loc"
  2657. bottom: "mbox_conf"
  2658. bottom: "mbox_priorbox"
  2659. bottom: "label"
  2660. top: "mbox_loss"
  2661. include {
  2662. phase: TRAIN
  2663. }
  2664. propagate_down: true
  2665. propagate_down: true
  2666. propagate_down: false
  2667. propagate_down: false
  2668. loss_param {
  2669. normalization: VALID
  2670. }
  2671. multibox_loss_param {
  2672. loc_loss_type: SMOOTH_L1
  2673. conf_loss_type: SOFTMAX
  2674. loc_weight: 1.0
  2675. num_classes: 4
  2676. share_location: true
  2677. match_type: PER_PREDICTION
  2678. overlap_threshold: 0.5
  2679. use_prior_for_matching: true
  2680. background_label_id: 0
  2681. use_difficult_gt: true
  2682. neg_pos_ratio: 3.0
  2683. neg_overlap: 0.5
  2684. code_type: CENTER_SIZE
  2685. ignore_cross_boundary_bbox: false
  2686. mining_type: MAX_NEGATIVE
  2687. }
  2688. }
Add Comment
Please, Sign In to add comment