Guest User

Untitled

a guest
Dec 16th, 2018
87
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 39.15 KB | None | 0 0
  1. name: "MobileNet-SSD"
  2. layer {
  3. name: "data"
  4. type: "AnnotatedData"
  5. top: "data"
  6. top: "label"
  7. include {
  8. phase: TRAIN
  9. }
  10. transform_param {
  11. scale: 0.007843
  12. mirror: false
  13. mean_value: 94
  14. mean_value: 97
  15. mean_value: 109
  16. resize_param {
  17. prob: 1.0
  18. resize_mode: WARP
  19. height: 640
  20. width: 640
  21. interp_mode: LINEAR
  22. interp_mode: AREA
  23. interp_mode: NEAREST
  24. interp_mode: CUBIC
  25. interp_mode: LANCZOS4
  26. }
  27. emit_constraint {
  28. emit_type: CENTER
  29. }
  30. distort_param {
  31. brightness_prob: 0.5
  32. brightness_delta: 32.0
  33. contrast_prob: 0.5
  34. contrast_lower: 0.5
  35. contrast_upper: 1.5
  36. hue_prob: 0.5
  37. hue_delta: 18.0
  38. saturation_prob: 0.5
  39. saturation_lower: 0.5
  40. saturation_upper: 1.5
  41. random_order_prob: 0.0
  42. }
  43. }
  44. data_param {
  45. source: "/data/BoschLisaLaraDat_1113/VOC0712/VOC0712_trainval_lmdb/"
  46. batch_size: 4
  47. backend: LMDB
  48. prefetch: 20
  49. }
  50. annotated_data_param {
  51. batch_sampler {
  52. max_sample: 1
  53. max_trials: 1
  54. }
  55. batch_sampler {
  56. sampler {
  57. min_scale: 0.3
  58. max_scale: 1.0
  59. min_aspect_ratio: 1.0
  60. max_aspect_ratio: 1.0
  61. }
  62. sample_constraint {
  63. min_jaccard_overlap: 0.5
  64. }
  65. max_sample: 1
  66. max_trials: 50
  67. }
  68. batch_sampler {
  69. sampler {
  70. min_scale: 0.3
  71. max_scale: 1.0
  72. min_aspect_ratio: 1.0
  73. max_aspect_ratio: 1.0
  74. }
  75. sample_constraint {
  76. min_jaccard_overlap: 0.7
  77. }
  78. max_sample: 1
  79. max_trials: 50
  80. }
  81. batch_sampler {
  82. sampler {
  83. min_scale: 0.3
  84. max_scale: 1.0
  85. min_aspect_ratio: 1.0
  86. max_aspect_ratio: 1.0
  87. }
  88. sample_constraint {
  89. min_jaccard_overlap: 0.9
  90. }
  91. max_sample: 1
  92. max_trials: 50
  93. }
  94. batch_sampler {
  95. sampler {
  96. min_scale: 0.3
  97. max_scale: 1.0
  98. min_aspect_ratio: 1.0
  99. max_aspect_ratio: 1.0
  100. }
  101. sample_constraint {
  102. max_jaccard_overlap: 1.0
  103. }
  104. max_sample: 1
  105. max_trials: 50
  106. }
  107. label_map_file: "/data/BoschLisaLaraDat_1113/VOC0712/labelmap_voc.prototxt"
  108. }
  109. }
  110. layer {
  111. name: "conv0"
  112. type: "Convolution"
  113. bottom: "data"
  114. top: "conv0"
  115. param {
  116. lr_mult: 0.1
  117. decay_mult: 0.1
  118. }
  119. convolution_param {
  120. num_output: 32
  121. bias_term: false
  122. pad: 1
  123. kernel_size: 3
  124. stride: 2
  125. weight_filler {
  126. type: "msra"
  127. }
  128. }
  129. }
  130. layer {
  131. name: "conv0/bn"
  132. type: "BatchNorm"
  133. bottom: "conv0"
  134. top: "conv0"
  135. param {
  136. lr_mult: 0
  137. decay_mult: 0
  138. }
  139. param {
  140. lr_mult: 0
  141. decay_mult: 0
  142. }
  143. param {
  144. lr_mult: 0
  145. decay_mult: 0
  146. }
  147. }
  148. layer {
  149. name: "conv0/scale"
  150. type: "Scale"
  151. bottom: "conv0"
  152. top: "conv0"
  153. param {
  154. lr_mult: 0.1
  155. decay_mult: 0.0
  156. }
  157. param {
  158. lr_mult: 0.2
  159. decay_mult: 0.0
  160. }
  161. scale_param {
  162. filler {
  163. value: 1
  164. }
  165. bias_term: true
  166. bias_filler {
  167. value: 0
  168. }
  169. }
  170. }
  171. layer {
  172. name: "conv0/relu"
  173. type: "ReLU"
  174. bottom: "conv0"
  175. top: "conv0"
  176. }
  177. layer {
  178. name: "conv1/dw"
  179. type: "Convolution"
  180. bottom: "conv0"
  181. top: "conv1/dw"
  182. param {
  183. lr_mult: 0.1
  184. decay_mult: 0.1
  185. }
  186. convolution_param {
  187. num_output: 32
  188. bias_term: false
  189. pad: 1
  190. kernel_size: 3
  191. group: 32
  192. engine: CAFFE
  193. weight_filler {
  194. type: "msra"
  195. }
  196. }
  197. }
  198. layer {
  199. name: "conv1/dw/bn"
  200. type: "BatchNorm"
  201. bottom: "conv1/dw"
  202. top: "conv1/dw"
  203. param {
  204. lr_mult: 0
  205. decay_mult: 0
  206. }
  207. param {
  208. lr_mult: 0
  209. decay_mult: 0
  210. }
  211. param {
  212. lr_mult: 0
  213. decay_mult: 0
  214. }
  215. }
  216. layer {
  217. name: "conv1/dw/scale"
  218. type: "Scale"
  219. bottom: "conv1/dw"
  220. top: "conv1/dw"
  221. param {
  222. lr_mult: 0.1
  223. decay_mult: 0.0
  224. }
  225. param {
  226. lr_mult: 0.2
  227. decay_mult: 0.0
  228. }
  229. scale_param {
  230. filler {
  231. value: 1
  232. }
  233. bias_term: true
  234. bias_filler {
  235. value: 0
  236. }
  237. }
  238. }
  239. layer {
  240. name: "conv1/dw/relu"
  241. type: "ReLU"
  242. bottom: "conv1/dw"
  243. top: "conv1/dw"
  244. }
  245. layer {
  246. name: "conv1"
  247. type: "Convolution"
  248. bottom: "conv1/dw"
  249. top: "conv1"
  250. param {
  251. lr_mult: 0.1
  252. decay_mult: 0.1
  253. }
  254. convolution_param {
  255. num_output: 64
  256. bias_term: false
  257. kernel_size: 1
  258. weight_filler {
  259. type: "msra"
  260. }
  261. }
  262. }
  263. layer {
  264. name: "conv1/bn"
  265. type: "BatchNorm"
  266. bottom: "conv1"
  267. top: "conv1"
  268. param {
  269. lr_mult: 0
  270. decay_mult: 0
  271. }
  272. param {
  273. lr_mult: 0
  274. decay_mult: 0
  275. }
  276. param {
  277. lr_mult: 0
  278. decay_mult: 0
  279. }
  280. }
  281. layer {
  282. name: "conv1/scale"
  283. type: "Scale"
  284. bottom: "conv1"
  285. top: "conv1"
  286. param {
  287. lr_mult: 0.1
  288. decay_mult: 0.0
  289. }
  290. param {
  291. lr_mult: 0.2
  292. decay_mult: 0.0
  293. }
  294. scale_param {
  295. filler {
  296. value: 1
  297. }
  298. bias_term: true
  299. bias_filler {
  300. value: 0
  301. }
  302. }
  303. }
  304. layer {
  305. name: "conv1/relu"
  306. type: "ReLU"
  307. bottom: "conv1"
  308. top: "conv1"
  309. }
  310. layer {
  311. name: "conv2/dw"
  312. type: "Convolution"
  313. bottom: "conv1"
  314. top: "conv2/dw"
  315. param {
  316. lr_mult: 0.1
  317. decay_mult: 0.1
  318. }
  319. convolution_param {
  320. num_output: 64
  321. bias_term: false
  322. pad: 1
  323. kernel_size: 3
  324. stride: 2
  325. group: 64
  326. engine: CAFFE
  327. weight_filler {
  328. type: "msra"
  329. }
  330. }
  331. }
  332. layer {
  333. name: "conv2/dw/bn"
  334. type: "BatchNorm"
  335. bottom: "conv2/dw"
  336. top: "conv2/dw"
  337. param {
  338. lr_mult: 0
  339. decay_mult: 0
  340. }
  341. param {
  342. lr_mult: 0
  343. decay_mult: 0
  344. }
  345. param {
  346. lr_mult: 0
  347. decay_mult: 0
  348. }
  349. }
  350. layer {
  351. name: "conv2/dw/scale"
  352. type: "Scale"
  353. bottom: "conv2/dw"
  354. top: "conv2/dw"
  355. param {
  356. lr_mult: 0.1
  357. decay_mult: 0.0
  358. }
  359. param {
  360. lr_mult: 0.2
  361. decay_mult: 0.0
  362. }
  363. scale_param {
  364. filler {
  365. value: 1
  366. }
  367. bias_term: true
  368. bias_filler {
  369. value: 0
  370. }
  371. }
  372. }
  373. layer {
  374. name: "conv2/dw/relu"
  375. type: "ReLU"
  376. bottom: "conv2/dw"
  377. top: "conv2/dw"
  378. }
  379. layer {
  380. name: "conv2"
  381. type: "Convolution"
  382. bottom: "conv2/dw"
  383. top: "conv2"
  384. param {
  385. lr_mult: 0.1
  386. decay_mult: 0.1
  387. }
  388. convolution_param {
  389. num_output: 128
  390. bias_term: false
  391. kernel_size: 1
  392. weight_filler {
  393. type: "msra"
  394. }
  395. }
  396. }
  397. layer {
  398. name: "conv2/bn"
  399. type: "BatchNorm"
  400. bottom: "conv2"
  401. top: "conv2"
  402. param {
  403. lr_mult: 0
  404. decay_mult: 0
  405. }
  406. param {
  407. lr_mult: 0
  408. decay_mult: 0
  409. }
  410. param {
  411. lr_mult: 0
  412. decay_mult: 0
  413. }
  414. }
  415. layer {
  416. name: "conv2/scale"
  417. type: "Scale"
  418. bottom: "conv2"
  419. top: "conv2"
  420. param {
  421. lr_mult: 0.1
  422. decay_mult: 0.0
  423. }
  424. param {
  425. lr_mult: 0.2
  426. decay_mult: 0.0
  427. }
  428. scale_param {
  429. filler {
  430. value: 1
  431. }
  432. bias_term: true
  433. bias_filler {
  434. value: 0
  435. }
  436. }
  437. }
  438. layer {
  439. name: "conv2/relu"
  440. type: "ReLU"
  441. bottom: "conv2"
  442. top: "conv2"
  443. }
  444. layer {
  445. name: "conv3/dw"
  446. type: "Convolution"
  447. bottom: "conv2"
  448. top: "conv3/dw"
  449. param {
  450. lr_mult: 0.1
  451. decay_mult: 0.1
  452. }
  453. convolution_param {
  454. num_output: 128
  455. bias_term: false
  456. pad: 1
  457. kernel_size: 3
  458. group: 128
  459. engine: CAFFE
  460. weight_filler {
  461. type: "msra"
  462. }
  463. }
  464. }
  465. layer {
  466. name: "conv3/dw/bn"
  467. type: "BatchNorm"
  468. bottom: "conv3/dw"
  469. top: "conv3/dw"
  470. param {
  471. lr_mult: 0
  472. decay_mult: 0
  473. }
  474. param {
  475. lr_mult: 0
  476. decay_mult: 0
  477. }
  478. param {
  479. lr_mult: 0
  480. decay_mult: 0
  481. }
  482. }
  483. layer {
  484. name: "conv3/dw/scale"
  485. type: "Scale"
  486. bottom: "conv3/dw"
  487. top: "conv3/dw"
  488. param {
  489. lr_mult: 0.1
  490. decay_mult: 0.0
  491. }
  492. param {
  493. lr_mult: 0.2
  494. decay_mult: 0.0
  495. }
  496. scale_param {
  497. filler {
  498. value: 1
  499. }
  500. bias_term: true
  501. bias_filler {
  502. value: 0
  503. }
  504. }
  505. }
  506. layer {
  507. name: "conv3/dw/relu"
  508. type: "ReLU"
  509. bottom: "conv3/dw"
  510. top: "conv3/dw"
  511. }
  512. layer {
  513. name: "conv3"
  514. type: "Convolution"
  515. bottom: "conv3/dw"
  516. top: "conv3"
  517. param {
  518. lr_mult: 0.1
  519. decay_mult: 0.1
  520. }
  521. convolution_param {
  522. num_output: 128
  523. bias_term: false
  524. kernel_size: 1
  525. weight_filler {
  526. type: "msra"
  527. }
  528. }
  529. }
  530. layer {
  531. name: "conv3/bn"
  532. type: "BatchNorm"
  533. bottom: "conv3"
  534. top: "conv3"
  535. param {
  536. lr_mult: 0
  537. decay_mult: 0
  538. }
  539. param {
  540. lr_mult: 0
  541. decay_mult: 0
  542. }
  543. param {
  544. lr_mult: 0
  545. decay_mult: 0
  546. }
  547. }
  548. layer {
  549. name: "conv3/scale"
  550. type: "Scale"
  551. bottom: "conv3"
  552. top: "conv3"
  553. param {
  554. lr_mult: 0.1
  555. decay_mult: 0.0
  556. }
  557. param {
  558. lr_mult: 0.2
  559. decay_mult: 0.0
  560. }
  561. scale_param {
  562. filler {
  563. value: 1
  564. }
  565. bias_term: true
  566. bias_filler {
  567. value: 0
  568. }
  569. }
  570. }
  571. layer {
  572. name: "conv3/relu"
  573. type: "ReLU"
  574. bottom: "conv3"
  575. top: "conv3"
  576. }
  577. layer {
  578. name: "conv4/dw"
  579. type: "Convolution"
  580. bottom: "conv3"
  581. top: "conv4/dw"
  582. param {
  583. lr_mult: 0.1
  584. decay_mult: 0.1
  585. }
  586. convolution_param {
  587. num_output: 128
  588. bias_term: false
  589. pad: 1
  590. kernel_size: 3
  591. stride: 2
  592. group: 128
  593. engine: CAFFE
  594. weight_filler {
  595. type: "msra"
  596. }
  597. }
  598. }
  599. layer {
  600. name: "conv4/dw/bn"
  601. type: "BatchNorm"
  602. bottom: "conv4/dw"
  603. top: "conv4/dw"
  604. param {
  605. lr_mult: 0
  606. decay_mult: 0
  607. }
  608. param {
  609. lr_mult: 0
  610. decay_mult: 0
  611. }
  612. param {
  613. lr_mult: 0
  614. decay_mult: 0
  615. }
  616. }
  617. layer {
  618. name: "conv4/dw/scale"
  619. type: "Scale"
  620. bottom: "conv4/dw"
  621. top: "conv4/dw"
  622. param {
  623. lr_mult: 0.1
  624. decay_mult: 0.0
  625. }
  626. param {
  627. lr_mult: 0.2
  628. decay_mult: 0.0
  629. }
  630. scale_param {
  631. filler {
  632. value: 1
  633. }
  634. bias_term: true
  635. bias_filler {
  636. value: 0
  637. }
  638. }
  639. }
  640. layer {
  641. name: "conv4/dw/relu"
  642. type: "ReLU"
  643. bottom: "conv4/dw"
  644. top: "conv4/dw"
  645. }
  646. layer {
  647. name: "conv4"
  648. type: "Convolution"
  649. bottom: "conv4/dw"
  650. top: "conv4"
  651. param {
  652. lr_mult: 0.1
  653. decay_mult: 0.1
  654. }
  655. convolution_param {
  656. num_output: 256
  657. bias_term: false
  658. kernel_size: 1
  659. weight_filler {
  660. type: "msra"
  661. }
  662. }
  663. }
  664. layer {
  665. name: "conv4/bn"
  666. type: "BatchNorm"
  667. bottom: "conv4"
  668. top: "conv4"
  669. param {
  670. lr_mult: 0
  671. decay_mult: 0
  672. }
  673. param {
  674. lr_mult: 0
  675. decay_mult: 0
  676. }
  677. param {
  678. lr_mult: 0
  679. decay_mult: 0
  680. }
  681. }
  682. layer {
  683. name: "conv4/scale"
  684. type: "Scale"
  685. bottom: "conv4"
  686. top: "conv4"
  687. param {
  688. lr_mult: 0.1
  689. decay_mult: 0.0
  690. }
  691. param {
  692. lr_mult: 0.2
  693. decay_mult: 0.0
  694. }
  695. scale_param {
  696. filler {
  697. value: 1
  698. }
  699. bias_term: true
  700. bias_filler {
  701. value: 0
  702. }
  703. }
  704. }
  705. layer {
  706. name: "conv4/relu"
  707. type: "ReLU"
  708. bottom: "conv4"
  709. top: "conv4"
  710. }
  711. layer {
  712. name: "conv5/dw"
  713. type: "Convolution"
  714. bottom: "conv4"
  715. top: "conv5/dw"
  716. param {
  717. lr_mult: 0.1
  718. decay_mult: 0.1
  719. }
  720. convolution_param {
  721. num_output: 256
  722. bias_term: false
  723. pad: 1
  724. kernel_size: 3
  725. group: 256
  726. engine: CAFFE
  727. weight_filler {
  728. type: "msra"
  729. }
  730. }
  731. }
  732. layer {
  733. name: "conv5/dw/bn"
  734. type: "BatchNorm"
  735. bottom: "conv5/dw"
  736. top: "conv5/dw"
  737. param {
  738. lr_mult: 0
  739. decay_mult: 0
  740. }
  741. param {
  742. lr_mult: 0
  743. decay_mult: 0
  744. }
  745. param {
  746. lr_mult: 0
  747. decay_mult: 0
  748. }
  749. }
  750. layer {
  751. name: "conv5/dw/scale"
  752. type: "Scale"
  753. bottom: "conv5/dw"
  754. top: "conv5/dw"
  755. param {
  756. lr_mult: 0.1
  757. decay_mult: 0.0
  758. }
  759. param {
  760. lr_mult: 0.2
  761. decay_mult: 0.0
  762. }
  763. scale_param {
  764. filler {
  765. value: 1
  766. }
  767. bias_term: true
  768. bias_filler {
  769. value: 0
  770. }
  771. }
  772. }
  773. layer {
  774. name: "conv5/dw/relu"
  775. type: "ReLU"
  776. bottom: "conv5/dw"
  777. top: "conv5/dw"
  778. }
  779. layer {
  780. name: "conv5"
  781. type: "Convolution"
  782. bottom: "conv5/dw"
  783. top: "conv5"
  784. param {
  785. lr_mult: 0.1
  786. decay_mult: 0.1
  787. }
  788. convolution_param {
  789. num_output: 256
  790. bias_term: false
  791. kernel_size: 1
  792. weight_filler {
  793. type: "msra"
  794. }
  795. }
  796. }
  797. layer {
  798. name: "conv5/bn"
  799. type: "BatchNorm"
  800. bottom: "conv5"
  801. top: "conv5"
  802. param {
  803. lr_mult: 0
  804. decay_mult: 0
  805. }
  806. param {
  807. lr_mult: 0
  808. decay_mult: 0
  809. }
  810. param {
  811. lr_mult: 0
  812. decay_mult: 0
  813. }
  814. }
  815. layer {
  816. name: "conv5/scale"
  817. type: "Scale"
  818. bottom: "conv5"
  819. top: "conv5"
  820. param {
  821. lr_mult: 0.1
  822. decay_mult: 0.0
  823. }
  824. param {
  825. lr_mult: 0.2
  826. decay_mult: 0.0
  827. }
  828. scale_param {
  829. filler {
  830. value: 1
  831. }
  832. bias_term: true
  833. bias_filler {
  834. value: 0
  835. }
  836. }
  837. }
  838. layer {
  839. name: "conv5/relu"
  840. type: "ReLU"
  841. bottom: "conv5"
  842. top: "conv5"
  843. }
  844. layer {
  845. name: "conv6/dw"
  846. type: "Convolution"
  847. bottom: "conv5"
  848. top: "conv6/dw"
  849. param {
  850. lr_mult: 0.1
  851. decay_mult: 0.1
  852. }
  853. convolution_param {
  854. num_output: 256
  855. bias_term: false
  856. pad: 1
  857. kernel_size: 3
  858. stride: 2
  859. group: 256
  860. engine: CAFFE
  861. weight_filler {
  862. type: "msra"
  863. }
  864. }
  865. }
  866. layer {
  867. name: "conv6/dw/bn"
  868. type: "BatchNorm"
  869. bottom: "conv6/dw"
  870. top: "conv6/dw"
  871. param {
  872. lr_mult: 0
  873. decay_mult: 0
  874. }
  875. param {
  876. lr_mult: 0
  877. decay_mult: 0
  878. }
  879. param {
  880. lr_mult: 0
  881. decay_mult: 0
  882. }
  883. }
  884. layer {
  885. name: "conv6/dw/scale"
  886. type: "Scale"
  887. bottom: "conv6/dw"
  888. top: "conv6/dw"
  889. param {
  890. lr_mult: 0.1
  891. decay_mult: 0.0
  892. }
  893. param {
  894. lr_mult: 0.2
  895. decay_mult: 0.0
  896. }
  897. scale_param {
  898. filler {
  899. value: 1
  900. }
  901. bias_term: true
  902. bias_filler {
  903. value: 0
  904. }
  905. }
  906. }
  907. layer {
  908. name: "conv6/dw/relu"
  909. type: "ReLU"
  910. bottom: "conv6/dw"
  911. top: "conv6/dw"
  912. }
  913. layer {
  914. name: "conv6"
  915. type: "Convolution"
  916. bottom: "conv6/dw"
  917. top: "conv6"
  918. param {
  919. lr_mult: 0.1
  920. decay_mult: 0.1
  921. }
  922. convolution_param {
  923. num_output: 512
  924. bias_term: false
  925. kernel_size: 1
  926. weight_filler {
  927. type: "msra"
  928. }
  929. }
  930. }
  931. layer {
  932. name: "conv6/bn"
  933. type: "BatchNorm"
  934. bottom: "conv6"
  935. top: "conv6"
  936. param {
  937. lr_mult: 0
  938. decay_mult: 0
  939. }
  940. param {
  941. lr_mult: 0
  942. decay_mult: 0
  943. }
  944. param {
  945. lr_mult: 0
  946. decay_mult: 0
  947. }
  948. }
  949. layer {
  950. name: "conv6/scale"
  951. type: "Scale"
  952. bottom: "conv6"
  953. top: "conv6"
  954. param {
  955. lr_mult: 0.1
  956. decay_mult: 0.0
  957. }
  958. param {
  959. lr_mult: 0.2
  960. decay_mult: 0.0
  961. }
  962. scale_param {
  963. filler {
  964. value: 1
  965. }
  966. bias_term: true
  967. bias_filler {
  968. value: 0
  969. }
  970. }
  971. }
  972. layer {
  973. name: "conv6/relu"
  974. type: "ReLU"
  975. bottom: "conv6"
  976. top: "conv6"
  977. }
  978. layer {
  979. name: "conv7/dw"
  980. type: "Convolution"
  981. bottom: "conv6"
  982. top: "conv7/dw"
  983. param {
  984. lr_mult: 0.1
  985. decay_mult: 0.1
  986. }
  987. convolution_param {
  988. num_output: 512
  989. bias_term: false
  990. pad: 1
  991. kernel_size: 3
  992. group: 512
  993. engine: CAFFE
  994. weight_filler {
  995. type: "msra"
  996. }
  997. }
  998. }
  999. layer {
  1000. name: "conv7/dw/bn"
  1001. type: "BatchNorm"
  1002. bottom: "conv7/dw"
  1003. top: "conv7/dw"
  1004. param {
  1005. lr_mult: 0
  1006. decay_mult: 0
  1007. }
  1008. param {
  1009. lr_mult: 0
  1010. decay_mult: 0
  1011. }
  1012. param {
  1013. lr_mult: 0
  1014. decay_mult: 0
  1015. }
  1016. }
  1017. layer {
  1018. name: "conv7/dw/scale"
  1019. type: "Scale"
  1020. bottom: "conv7/dw"
  1021. top: "conv7/dw"
  1022. param {
  1023. lr_mult: 0.1
  1024. decay_mult: 0.0
  1025. }
  1026. param {
  1027. lr_mult: 0.2
  1028. decay_mult: 0.0
  1029. }
  1030. scale_param {
  1031. filler {
  1032. value: 1
  1033. }
  1034. bias_term: true
  1035. bias_filler {
  1036. value: 0
  1037. }
  1038. }
  1039. }
  1040. layer {
  1041. name: "conv7/dw/relu"
  1042. type: "ReLU"
  1043. bottom: "conv7/dw"
  1044. top: "conv7/dw"
  1045. }
  1046. layer {
  1047. name: "conv7"
  1048. type: "Convolution"
  1049. bottom: "conv7/dw"
  1050. top: "conv7"
  1051. param {
  1052. lr_mult: 0.1
  1053. decay_mult: 0.1
  1054. }
  1055. convolution_param {
  1056. num_output: 512
  1057. bias_term: false
  1058. kernel_size: 1
  1059. weight_filler {
  1060. type: "msra"
  1061. }
  1062. }
  1063. }
  1064. layer {
  1065. name: "conv7/bn"
  1066. type: "BatchNorm"
  1067. bottom: "conv7"
  1068. top: "conv7"
  1069. param {
  1070. lr_mult: 0
  1071. decay_mult: 0
  1072. }
  1073. param {
  1074. lr_mult: 0
  1075. decay_mult: 0
  1076. }
  1077. param {
  1078. lr_mult: 0
  1079. decay_mult: 0
  1080. }
  1081. }
  1082. layer {
  1083. name: "conv7/scale"
  1084. type: "Scale"
  1085. bottom: "conv7"
  1086. top: "conv7"
  1087. param {
  1088. lr_mult: 0.1
  1089. decay_mult: 0.0
  1090. }
  1091. param {
  1092. lr_mult: 0.2
  1093. decay_mult: 0.0
  1094. }
  1095. scale_param {
  1096. filler {
  1097. value: 1
  1098. }
  1099. bias_term: true
  1100. bias_filler {
  1101. value: 0
  1102. }
  1103. }
  1104. }
  1105. layer {
  1106. name: "conv7/relu"
  1107. type: "ReLU"
  1108. bottom: "conv7"
  1109. top: "conv7"
  1110. }
  1111. layer {
  1112. name: "conv8/dw"
  1113. type: "Convolution"
  1114. bottom: "conv7"
  1115. top: "conv8/dw"
  1116. param {
  1117. lr_mult: 0.1
  1118. decay_mult: 0.1
  1119. }
  1120. convolution_param {
  1121. num_output: 512
  1122. bias_term: false
  1123. pad: 1
  1124. kernel_size: 3
  1125. group: 512
  1126. engine: CAFFE
  1127. weight_filler {
  1128. type: "msra"
  1129. }
  1130. }
  1131. }
  1132. layer {
  1133. name: "conv8/dw/bn"
  1134. type: "BatchNorm"
  1135. bottom: "conv8/dw"
  1136. top: "conv8/dw"
  1137. param {
  1138. lr_mult: 0
  1139. decay_mult: 0
  1140. }
  1141. param {
  1142. lr_mult: 0
  1143. decay_mult: 0
  1144. }
  1145. param {
  1146. lr_mult: 0
  1147. decay_mult: 0
  1148. }
  1149. }
  1150. layer {
  1151. name: "conv8/dw/scale"
  1152. type: "Scale"
  1153. bottom: "conv8/dw"
  1154. top: "conv8/dw"
  1155. param {
  1156. lr_mult: 0.1
  1157. decay_mult: 0.0
  1158. }
  1159. param {
  1160. lr_mult: 0.2
  1161. decay_mult: 0.0
  1162. }
  1163. scale_param {
  1164. filler {
  1165. value: 1
  1166. }
  1167. bias_term: true
  1168. bias_filler {
  1169. value: 0
  1170. }
  1171. }
  1172. }
  1173. layer {
  1174. name: "conv8/dw/relu"
  1175. type: "ReLU"
  1176. bottom: "conv8/dw"
  1177. top: "conv8/dw"
  1178. }
  1179. layer {
  1180. name: "conv8"
  1181. type: "Convolution"
  1182. bottom: "conv8/dw"
  1183. top: "conv8"
  1184. param {
  1185. lr_mult: 0.1
  1186. decay_mult: 0.1
  1187. }
  1188. convolution_param {
  1189. num_output: 512
  1190. bias_term: false
  1191. kernel_size: 1
  1192. weight_filler {
  1193. type: "msra"
  1194. }
  1195. }
  1196. }
  1197. layer {
  1198. name: "conv8/bn"
  1199. type: "BatchNorm"
  1200. bottom: "conv8"
  1201. top: "conv8"
  1202. param {
  1203. lr_mult: 0
  1204. decay_mult: 0
  1205. }
  1206. param {
  1207. lr_mult: 0
  1208. decay_mult: 0
  1209. }
  1210. param {
  1211. lr_mult: 0
  1212. decay_mult: 0
  1213. }
  1214. }
  1215. layer {
  1216. name: "conv8/scale"
  1217. type: "Scale"
  1218. bottom: "conv8"
  1219. top: "conv8"
  1220. param {
  1221. lr_mult: 0.1
  1222. decay_mult: 0.0
  1223. }
  1224. param {
  1225. lr_mult: 0.2
  1226. decay_mult: 0.0
  1227. }
  1228. scale_param {
  1229. filler {
  1230. value: 1
  1231. }
  1232. bias_term: true
  1233. bias_filler {
  1234. value: 0
  1235. }
  1236. }
  1237. }
  1238. layer {
  1239. name: "conv8/relu"
  1240. type: "ReLU"
  1241. bottom: "conv8"
  1242. top: "conv8"
  1243. }
  1244. layer {
  1245. name: "conv9/dw"
  1246. type: "Convolution"
  1247. bottom: "conv8"
  1248. top: "conv9/dw"
  1249. param {
  1250. lr_mult: 0.1
  1251. decay_mult: 0.1
  1252. }
  1253. convolution_param {
  1254. num_output: 512
  1255. bias_term: false
  1256. pad: 1
  1257. kernel_size: 3
  1258. group: 512
  1259. engine: CAFFE
  1260. weight_filler {
  1261. type: "msra"
  1262. }
  1263. }
  1264. }
  1265. layer {
  1266. name: "conv9/dw/bn"
  1267. type: "BatchNorm"
  1268. bottom: "conv9/dw"
  1269. top: "conv9/dw"
  1270. param {
  1271. lr_mult: 0
  1272. decay_mult: 0
  1273. }
  1274. param {
  1275. lr_mult: 0
  1276. decay_mult: 0
  1277. }
  1278. param {
  1279. lr_mult: 0
  1280. decay_mult: 0
  1281. }
  1282. }
  1283. layer {
  1284. name: "conv9/dw/scale"
  1285. type: "Scale"
  1286. bottom: "conv9/dw"
  1287. top: "conv9/dw"
  1288. param {
  1289. lr_mult: 0.1
  1290. decay_mult: 0.0
  1291. }
  1292. param {
  1293. lr_mult: 0.2
  1294. decay_mult: 0.0
  1295. }
  1296. scale_param {
  1297. filler {
  1298. value: 1
  1299. }
  1300. bias_term: true
  1301. bias_filler {
  1302. value: 0
  1303. }
  1304. }
  1305. }
  1306. layer {
  1307. name: "conv9/dw/relu"
  1308. type: "ReLU"
  1309. bottom: "conv9/dw"
  1310. top: "conv9/dw"
  1311. }
  1312. layer {
  1313. name: "conv9"
  1314. type: "Convolution"
  1315. bottom: "conv9/dw"
  1316. top: "conv9"
  1317. param {
  1318. lr_mult: 0.1
  1319. decay_mult: 0.1
  1320. }
  1321. convolution_param {
  1322. num_output: 512
  1323. bias_term: false
  1324. kernel_size: 1
  1325. weight_filler {
  1326. type: "msra"
  1327. }
  1328. }
  1329. }
  1330. layer {
  1331. name: "conv9/bn"
  1332. type: "BatchNorm"
  1333. bottom: "conv9"
  1334. top: "conv9"
  1335. param {
  1336. lr_mult: 0
  1337. decay_mult: 0
  1338. }
  1339. param {
  1340. lr_mult: 0
  1341. decay_mult: 0
  1342. }
  1343. param {
  1344. lr_mult: 0
  1345. decay_mult: 0
  1346. }
  1347. }
  1348. layer {
  1349. name: "conv9/scale"
  1350. type: "Scale"
  1351. bottom: "conv9"
  1352. top: "conv9"
  1353. param {
  1354. lr_mult: 0.1
  1355. decay_mult: 0.0
  1356. }
  1357. param {
  1358. lr_mult: 0.2
  1359. decay_mult: 0.0
  1360. }
  1361. scale_param {
  1362. filler {
  1363. value: 1
  1364. }
  1365. bias_term: true
  1366. bias_filler {
  1367. value: 0
  1368. }
  1369. }
  1370. }
  1371. layer {
  1372. name: "conv9/relu"
  1373. type: "ReLU"
  1374. bottom: "conv9"
  1375. top: "conv9"
  1376. }
  1377. layer {
  1378. name: "conv10/dw"
  1379. type: "Convolution"
  1380. bottom: "conv9"
  1381. top: "conv10/dw"
  1382. param {
  1383. lr_mult: 0.1
  1384. decay_mult: 0.1
  1385. }
  1386. convolution_param {
  1387. num_output: 512
  1388. bias_term: false
  1389. pad: 1
  1390. kernel_size: 3
  1391. group: 512
  1392. engine: CAFFE
  1393. weight_filler {
  1394. type: "msra"
  1395. }
  1396. }
  1397. }
  1398. layer {
  1399. name: "conv10/dw/bn"
  1400. type: "BatchNorm"
  1401. bottom: "conv10/dw"
  1402. top: "conv10/dw"
  1403. param {
  1404. lr_mult: 0
  1405. decay_mult: 0
  1406. }
  1407. param {
  1408. lr_mult: 0
  1409. decay_mult: 0
  1410. }
  1411. param {
  1412. lr_mult: 0
  1413. decay_mult: 0
  1414. }
  1415. }
  1416. layer {
  1417. name: "conv10/dw/scale"
  1418. type: "Scale"
  1419. bottom: "conv10/dw"
  1420. top: "conv10/dw"
  1421. param {
  1422. lr_mult: 0.1
  1423. decay_mult: 0.0
  1424. }
  1425. param {
  1426. lr_mult: 0.2
  1427. decay_mult: 0.0
  1428. }
  1429. scale_param {
  1430. filler {
  1431. value: 1
  1432. }
  1433. bias_term: true
  1434. bias_filler {
  1435. value: 0
  1436. }
  1437. }
  1438. }
  1439. layer {
  1440. name: "conv10/dw/relu"
  1441. type: "ReLU"
  1442. bottom: "conv10/dw"
  1443. top: "conv10/dw"
  1444. }
  1445. layer {
  1446. name: "conv10"
  1447. type: "Convolution"
  1448. bottom: "conv10/dw"
  1449. top: "conv10"
  1450. param {
  1451. lr_mult: 0.1
  1452. decay_mult: 0.1
  1453. }
  1454. convolution_param {
  1455. num_output: 512
  1456. bias_term: false
  1457. kernel_size: 1
  1458. weight_filler {
  1459. type: "msra"
  1460. }
  1461. }
  1462. }
  1463. layer {
  1464. name: "conv10/bn"
  1465. type: "BatchNorm"
  1466. bottom: "conv10"
  1467. top: "conv10"
  1468. param {
  1469. lr_mult: 0
  1470. decay_mult: 0
  1471. }
  1472. param {
  1473. lr_mult: 0
  1474. decay_mult: 0
  1475. }
  1476. param {
  1477. lr_mult: 0
  1478. decay_mult: 0
  1479. }
  1480. }
  1481. layer {
  1482. name: "conv10/scale"
  1483. type: "Scale"
  1484. bottom: "conv10"
  1485. top: "conv10"
  1486. param {
  1487. lr_mult: 0.1
  1488. decay_mult: 0.0
  1489. }
  1490. param {
  1491. lr_mult: 0.2
  1492. decay_mult: 0.0
  1493. }
  1494. scale_param {
  1495. filler {
  1496. value: 1
  1497. }
  1498. bias_term: true
  1499. bias_filler {
  1500. value: 0
  1501. }
  1502. }
  1503. }
  1504. layer {
  1505. name: "conv10/relu"
  1506. type: "ReLU"
  1507. bottom: "conv10"
  1508. top: "conv10"
  1509. }
  1510. layer {
  1511. name: "conv11/dw"
  1512. type: "Convolution"
  1513. bottom: "conv10"
  1514. top: "conv11/dw"
  1515. param {
  1516. lr_mult: 0.1
  1517. decay_mult: 0.1
  1518. }
  1519. convolution_param {
  1520. num_output: 512
  1521. bias_term: false
  1522. pad: 1
  1523. kernel_size: 3
  1524. group: 512
  1525. engine: CAFFE
  1526. weight_filler {
  1527. type: "msra"
  1528. }
  1529. }
  1530. }
  1531. layer {
  1532. name: "conv11/dw/bn"
  1533. type: "BatchNorm"
  1534. bottom: "conv11/dw"
  1535. top: "conv11/dw"
  1536. param {
  1537. lr_mult: 0
  1538. decay_mult: 0
  1539. }
  1540. param {
  1541. lr_mult: 0
  1542. decay_mult: 0
  1543. }
  1544. param {
  1545. lr_mult: 0
  1546. decay_mult: 0
  1547. }
  1548. }
  1549. layer {
  1550. name: "conv11/dw/scale"
  1551. type: "Scale"
  1552. bottom: "conv11/dw"
  1553. top: "conv11/dw"
  1554. param {
  1555. lr_mult: 0.1
  1556. decay_mult: 0.0
  1557. }
  1558. param {
  1559. lr_mult: 0.2
  1560. decay_mult: 0.0
  1561. }
  1562. scale_param {
  1563. filler {
  1564. value: 1
  1565. }
  1566. bias_term: true
  1567. bias_filler {
  1568. value: 0
  1569. }
  1570. }
  1571. }
  1572. layer {
  1573. name: "conv11/dw/relu"
  1574. type: "ReLU"
  1575. bottom: "conv11/dw"
  1576. top: "conv11/dw"
  1577. }
  1578.  
  1579.  
  1580. layer {
  1581. name: "conv1_sub"
  1582. type: "Convolution"
  1583. bottom: "conv1"
  1584. top: "conv1_sub"
  1585. param {
  1586. lr_mult: 1
  1587. decay_mult: 1
  1588. }
  1589. convolution_param {
  1590. num_output: 512
  1591. bias_term: false
  1592. kernel_size: 1
  1593. weight_filler {
  1594. type: "msra"
  1595. }
  1596. }
  1597. }
  1598.  
  1599. layer {
  1600. name: "conv1_sub_perm"
  1601. type: "Permute"
  1602. bottom: "conv1_sub"
  1603. top: "conv1_sub_perm"
  1604. permute_param {
  1605. order: 0
  1606. order: 2
  1607. order: 3
  1608. order: 1
  1609. }
  1610. }
  1611. layer {
  1612. name: "conv1_sub_flat"
  1613. type: "Flatten"
  1614. bottom: "conv1_sub_perm"
  1615. top: "conv1_sub_flat"
  1616. flatten_param {
  1617. axis: 1
  1618. }
  1619. }
  1620.  
  1621. layer {
  1622. name: "conv11_feature_extend"
  1623. type: "Concat"
  1624. bottom: "conv1"
  1625. bottom: "conv11/dw"
  1626. top: "conv11_feature_extend"
  1627. concat_param {
  1628. axis: 1
  1629. }
  1630. }
  1631.  
  1632. layer {
  1633. name: "conv11"
  1634. type: "Convolution"
  1635. bottom: "conv11_feature_extend"
  1636. top: "conv11"
  1637. param {
  1638. lr_mult: 0.1
  1639. decay_mult: 0.1
  1640. }
  1641. convolution_param {
  1642. num_output: 512
  1643. bias_term: false
  1644. kernel_size: 1
  1645. weight_filler {
  1646. type: "msra"
  1647. }
  1648. }
  1649. }
  1650. layer {
  1651. name: "conv11/bn"
  1652. type: "BatchNorm"
  1653. bottom: "conv11"
  1654. top: "conv11"
  1655. param {
  1656. lr_mult: 0
  1657. decay_mult: 0
  1658. }
  1659. param {
  1660. lr_mult: 0
  1661. decay_mult: 0
  1662. }
  1663. param {
  1664. lr_mult: 0
  1665. decay_mult: 0
  1666. }
  1667. }
  1668. layer {
  1669. name: "conv11/scale"
  1670. type: "Scale"
  1671. bottom: "conv11"
  1672. top: "conv11"
  1673. param {
  1674. lr_mult: 0.1
  1675. decay_mult: 0.0
  1676. }
  1677. param {
  1678. lr_mult: 0.2
  1679. decay_mult: 0.0
  1680. }
  1681. scale_param {
  1682. filler {
  1683. value: 1
  1684. }
  1685. bias_term: true
  1686. bias_filler {
  1687. value: 0
  1688. }
  1689. }
  1690. }
  1691. layer {
  1692. name: "conv11/relu"
  1693. type: "ReLU"
  1694. bottom: "conv11"
  1695. top: "conv11"
  1696. }
  1697. layer {
  1698. name: "conv12/dw"
  1699. type: "Convolution"
  1700. bottom: "conv11"
  1701. top: "conv12/dw"
  1702. param {
  1703. lr_mult: 0.1
  1704. decay_mult: 0.1
  1705. }
  1706. convolution_param {
  1707. num_output: 512
  1708. bias_term: false
  1709. pad: 1
  1710. kernel_size: 3
  1711. stride: 2
  1712. group: 512
  1713. engine: CAFFE
  1714. weight_filler {
  1715. type: "msra"
  1716. }
  1717. }
  1718. }
  1719. layer {
  1720. name: "conv12/dw/bn"
  1721. type: "BatchNorm"
  1722. bottom: "conv12/dw"
  1723. top: "conv12/dw"
  1724. param {
  1725. lr_mult: 0
  1726. decay_mult: 0
  1727. }
  1728. param {
  1729. lr_mult: 0
  1730. decay_mult: 0
  1731. }
  1732. param {
  1733. lr_mult: 0
  1734. decay_mult: 0
  1735. }
  1736. }
  1737. layer {
  1738. name: "conv12/dw/scale"
  1739. type: "Scale"
  1740. bottom: "conv12/dw"
  1741. top: "conv12/dw"
  1742. param {
  1743. lr_mult: 0.1
  1744. decay_mult: 0.0
  1745. }
  1746. param {
  1747. lr_mult: 0.2
  1748. decay_mult: 0.0
  1749. }
  1750. scale_param {
  1751. filler {
  1752. value: 1
  1753. }
  1754. bias_term: true
  1755. bias_filler {
  1756. value: 0
  1757. }
  1758. }
  1759. }
  1760. layer {
  1761. name: "conv12/dw/relu"
  1762. type: "ReLU"
  1763. bottom: "conv12/dw"
  1764. top: "conv12/dw"
  1765. }
  1766. layer {
  1767. name: "conv12"
  1768. type: "Convolution"
  1769. bottom: "conv12/dw"
  1770. top: "conv12"
  1771. param {
  1772. lr_mult: 0.1
  1773. decay_mult: 0.1
  1774. }
  1775. convolution_param {
  1776. num_output: 1024
  1777. bias_term: false
  1778. kernel_size: 1
  1779. weight_filler {
  1780. type: "msra"
  1781. }
  1782. }
  1783. }
  1784. layer {
  1785. name: "conv12/bn"
  1786. type: "BatchNorm"
  1787. bottom: "conv12"
  1788. top: "conv12"
  1789. param {
  1790. lr_mult: 0
  1791. decay_mult: 0
  1792. }
  1793. param {
  1794. lr_mult: 0
  1795. decay_mult: 0
  1796. }
  1797. param {
  1798. lr_mult: 0
  1799. decay_mult: 0
  1800. }
  1801. }
  1802. layer {
  1803. name: "conv12/scale"
  1804. type: "Scale"
  1805. bottom: "conv12"
  1806. top: "conv12"
  1807. param {
  1808. lr_mult: 0.1
  1809. decay_mult: 0.0
  1810. }
  1811. param {
  1812. lr_mult: 0.2
  1813. decay_mult: 0.0
  1814. }
  1815. scale_param {
  1816. filler {
  1817. value: 1
  1818. }
  1819. bias_term: true
  1820. bias_filler {
  1821. value: 0
  1822. }
  1823. }
  1824. }
  1825. layer {
  1826. name: "conv12/relu"
  1827. type: "ReLU"
  1828. bottom: "conv12"
  1829. top: "conv12"
  1830. }
  1831. layer {
  1832. name: "conv13/dw"
  1833. type: "Convolution"
  1834. bottom: "conv12"
  1835. top: "conv13/dw"
  1836. param {
  1837. lr_mult: 0.1
  1838. decay_mult: 0.1
  1839. }
  1840. convolution_param {
  1841. num_output: 1024
  1842. bias_term: false
  1843. pad: 1
  1844. kernel_size: 3
  1845. group: 1024
  1846. engine: CAFFE
  1847. weight_filler {
  1848. type: "msra"
  1849. }
  1850. }
  1851. }
  1852. layer {
  1853. name: "conv13/dw/bn"
  1854. type: "BatchNorm"
  1855. bottom: "conv13/dw"
  1856. top: "conv13/dw"
  1857. param {
  1858. lr_mult: 0
  1859. decay_mult: 0
  1860. }
  1861. param {
  1862. lr_mult: 0
  1863. decay_mult: 0
  1864. }
  1865. param {
  1866. lr_mult: 0
  1867. decay_mult: 0
  1868. }
  1869. }
  1870. layer {
  1871. name: "conv13/dw/scale"
  1872. type: "Scale"
  1873. bottom: "conv13/dw"
  1874. top: "conv13/dw"
  1875. param {
  1876. lr_mult: 0.1
  1877. decay_mult: 0.0
  1878. }
  1879. param {
  1880. lr_mult: 0.2
  1881. decay_mult: 0.0
  1882. }
  1883. scale_param {
  1884. filler {
  1885. value: 1
  1886. }
  1887. bias_term: true
  1888. bias_filler {
  1889. value: 0
  1890. }
  1891. }
  1892. }
  1893. layer {
  1894. name: "conv13/dw/relu"
  1895. type: "ReLU"
  1896. bottom: "conv13/dw"
  1897. top: "conv13/dw"
  1898. }
  1899. layer {
  1900. name: "conv13"
  1901. type: "Convolution"
  1902. bottom: "conv13/dw"
  1903. top: "conv13"
  1904. param {
  1905. lr_mult: 0.1
  1906. decay_mult: 0.1
  1907. }
  1908. convolution_param {
  1909. num_output: 1024
  1910. bias_term: false
  1911. kernel_size: 1
  1912. weight_filler {
  1913. type: "msra"
  1914. }
  1915. }
  1916. }
  1917. layer {
  1918. name: "conv13/bn"
  1919. type: "BatchNorm"
  1920. bottom: "conv13"
  1921. top: "conv13"
  1922. param {
  1923. lr_mult: 0
  1924. decay_mult: 0
  1925. }
  1926. param {
  1927. lr_mult: 0
  1928. decay_mult: 0
  1929. }
  1930. param {
  1931. lr_mult: 0
  1932. decay_mult: 0
  1933. }
  1934. }
  1935. layer {
  1936. name: "conv13/scale"
  1937. type: "Scale"
  1938. bottom: "conv13"
  1939. top: "conv13"
  1940. param {
  1941. lr_mult: 0.1
  1942. decay_mult: 0.0
  1943. }
  1944. param {
  1945. lr_mult: 0.2
  1946. decay_mult: 0.0
  1947. }
  1948. scale_param {
  1949. filler {
  1950. value: 1
  1951. }
  1952. bias_term: true
  1953. bias_filler {
  1954. value: 0
  1955. }
  1956. }
  1957. }
  1958. layer {
  1959. name: "conv13/relu"
  1960. type: "ReLU"
  1961. bottom: "conv13"
  1962. top: "conv13"
  1963. }
  1964. layer {
  1965. name: "conv14_1"
  1966. type: "Convolution"
  1967. bottom: "conv13"
  1968. top: "conv14_1"
  1969. param {
  1970. lr_mult: 0.1
  1971. decay_mult: 0.1
  1972. }
  1973. convolution_param {
  1974. num_output: 256
  1975. bias_term: false
  1976. kernel_size: 1
  1977. weight_filler {
  1978. type: "msra"
  1979. }
  1980. }
  1981. }
  1982. layer {
  1983. name: "conv14_1/bn"
  1984. type: "BatchNorm"
  1985. bottom: "conv14_1"
  1986. top: "conv14_1"
  1987. param {
  1988. lr_mult: 0
  1989. decay_mult: 0
  1990. }
  1991. param {
  1992. lr_mult: 0
  1993. decay_mult: 0
  1994. }
  1995. param {
  1996. lr_mult: 0
  1997. decay_mult: 0
  1998. }
  1999. }
  2000. layer {
  2001. name: "conv14_1/scale"
  2002. type: "Scale"
  2003. bottom: "conv14_1"
  2004. top: "conv14_1"
  2005. param {
  2006. lr_mult: 0.1
  2007. decay_mult: 0.0
  2008. }
  2009. param {
  2010. lr_mult: 0.2
  2011. decay_mult: 0.0
  2012. }
  2013. scale_param {
  2014. filler {
  2015. value: 1
  2016. }
  2017. bias_term: true
  2018. bias_filler {
  2019. value: 0
  2020. }
  2021. }
  2022. }
  2023. layer {
  2024. name: "conv14_1/relu"
  2025. type: "ReLU"
  2026. bottom: "conv14_1"
  2027. top: "conv14_1"
  2028. }
  2029. layer {
  2030. name: "conv14_2"
  2031. type: "Convolution"
  2032. bottom: "conv14_1"
  2033. top: "conv14_2"
  2034. param {
  2035. lr_mult: 0.1
  2036. decay_mult: 0.1
  2037. }
  2038. convolution_param {
  2039. num_output: 512
  2040. bias_term: false
  2041. pad: 1
  2042. kernel_size: 3
  2043. stride: 2
  2044. weight_filler {
  2045. type: "msra"
  2046. }
  2047. }
  2048. }
  2049. layer {
  2050. name: "conv14_2/bn"
  2051. type: "BatchNorm"
  2052. bottom: "conv14_2"
  2053. top: "conv14_2"
  2054. param {
  2055. lr_mult: 0
  2056. decay_mult: 0
  2057. }
  2058. param {
  2059. lr_mult: 0
  2060. decay_mult: 0
  2061. }
  2062. param {
  2063. lr_mult: 0
  2064. decay_mult: 0
  2065. }
  2066. }
  2067. layer {
  2068. name: "conv14_2/scale"
  2069. type: "Scale"
  2070. bottom: "conv14_2"
  2071. top: "conv14_2"
  2072. param {
  2073. lr_mult: 0.1
  2074. decay_mult: 0.0
  2075. }
  2076. param {
  2077. lr_mult: 0.2
  2078. decay_mult: 0.0
  2079. }
  2080. scale_param {
  2081. filler {
  2082. value: 1
  2083. }
  2084. bias_term: true
  2085. bias_filler {
  2086. value: 0
  2087. }
  2088. }
  2089. }
  2090. layer {
  2091. name: "conv14_2/relu"
  2092. type: "ReLU"
  2093. bottom: "conv14_2"
  2094. top: "conv14_2"
  2095. }
  2096. layer {
  2097. name: "conv15_1"
  2098. type: "Convolution"
  2099. bottom: "conv14_2"
  2100. top: "conv15_1"
  2101. param {
  2102. lr_mult: 0.1
  2103. decay_mult: 0.1
  2104. }
  2105. convolution_param {
  2106. num_output: 128
  2107. bias_term: false
  2108. kernel_size: 1
  2109. weight_filler {
  2110. type: "msra"
  2111. }
  2112. }
  2113. }
  2114. layer {
  2115. name: "conv15_1/bn"
  2116. type: "BatchNorm"
  2117. bottom: "conv15_1"
  2118. top: "conv15_1"
  2119. param {
  2120. lr_mult: 0
  2121. decay_mult: 0
  2122. }
  2123. param {
  2124. lr_mult: 0
  2125. decay_mult: 0
  2126. }
  2127. param {
  2128. lr_mult: 0
  2129. decay_mult: 0
  2130. }
  2131. }
  2132. layer {
  2133. name: "conv15_1/scale"
  2134. type: "Scale"
  2135. bottom: "conv15_1"
  2136. top: "conv15_1"
  2137. param {
  2138. lr_mult: 0.1
  2139. decay_mult: 0.0
  2140. }
  2141. param {
  2142. lr_mult: 0.2
  2143. decay_mult: 0.0
  2144. }
  2145. scale_param {
  2146. filler {
  2147. value: 1
  2148. }
  2149. bias_term: true
  2150. bias_filler {
  2151. value: 0
  2152. }
  2153. }
  2154. }
  2155. layer {
  2156. name: "conv15_1/relu"
  2157. type: "ReLU"
  2158. bottom: "conv15_1"
  2159. top: "conv15_1"
  2160. }
  2161. layer {
  2162. name: "conv15_2"
  2163. type: "Convolution"
  2164. bottom: "conv15_1"
  2165. top: "conv15_2"
  2166. param {
  2167. lr_mult: 0.1
  2168. decay_mult: 0.1
  2169. }
  2170. convolution_param {
  2171. num_output: 256
  2172. bias_term: false
  2173. pad: 1
  2174. kernel_size: 3
  2175. stride: 2
  2176. weight_filler {
  2177. type: "msra"
  2178. }
  2179. }
  2180. }
  2181. layer {
  2182. name: "conv15_2/bn"
  2183. type: "BatchNorm"
  2184. bottom: "conv15_2"
  2185. top: "conv15_2"
  2186. param {
  2187. lr_mult: 0
  2188. decay_mult: 0
  2189. }
  2190. param {
  2191. lr_mult: 0
  2192. decay_mult: 0
  2193. }
  2194. param {
  2195. lr_mult: 0
  2196. decay_mult: 0
  2197. }
  2198. }
  2199. layer {
  2200. name: "conv15_2/scale"
  2201. type: "Scale"
  2202. bottom: "conv15_2"
  2203. top: "conv15_2"
  2204. param {
  2205. lr_mult: 0.1
  2206. decay_mult: 0.0
  2207. }
  2208. param {
  2209. lr_mult: 0.2
  2210. decay_mult: 0.0
  2211. }
  2212. scale_param {
  2213. filler {
  2214. value: 1
  2215. }
  2216. bias_term: true
  2217. bias_filler {
  2218. value: 0
  2219. }
  2220. }
  2221. }
  2222. layer {
  2223. name: "conv15_2/relu"
  2224. type: "ReLU"
  2225. bottom: "conv15_2"
  2226. top: "conv15_2"
  2227. }
  2228. layer {
  2229. name: "conv1_dense"
  2230. type: "Convolution"
  2231. bottom: "conv1"
  2232. top: "conv1_dense"
  2233. param {
  2234. lr_mult: 1
  2235. decay_mult: 1
  2236. }
  2237. convolution_param {
  2238. num_output: 256
  2239. bias_term: false
  2240. pad: 1
  2241. kernel_size: 3
  2242. weight_filler {
  2243. type: "msra"
  2244. }
  2245. }
  2246. }
  2247. layer {
  2248. name: "conv11_mbox_loc_0929"
  2249. type: "Convolution"
  2250. bottom: "conv11"
  2251. top: "conv11_mbox_loc"
  2252. param {
  2253. lr_mult: 0.1
  2254. decay_mult: 0.1
  2255. }
  2256. param {
  2257. lr_mult: 0.2
  2258. decay_mult: 0.0
  2259. }
  2260. convolution_param {
  2261. num_output: 12
  2262. kernel_size: 1
  2263. weight_filler {
  2264. type: "msra"
  2265. }
  2266. bias_filler {
  2267. type: "constant"
  2268. value: 0.0
  2269. }
  2270. }
  2271. }
  2272. layer {
  2273. name: "conv11_mbox_loc_perm"
  2274. type: "Permute"
  2275. bottom: "conv11_mbox_loc"
  2276. top: "conv11_mbox_loc_perm"
  2277. permute_param {
  2278. order: 0
  2279. order: 2
  2280. order: 3
  2281. order: 1
  2282. }
  2283. }
  2284. layer {
  2285. name: "conv11_mbox_loc_flat"
  2286. type: "Flatten"
  2287. bottom: "conv11_mbox_loc_perm"
  2288. top: "conv11_mbox_loc_flat"
  2289. flatten_param {
  2290. axis: 1
  2291. }
  2292. }
  2293. layer {
  2294. name: "conv11_mbox_conf_new_0929"
  2295. type: "Convolution"
  2296. bottom: "conv11"
  2297. top: "conv11_mbox_conf"
  2298. param {
  2299. lr_mult: 1.0
  2300. decay_mult: 1.0
  2301. }
  2302. param {
  2303. lr_mult: 2.0
  2304. decay_mult: 0.0
  2305. }
  2306. convolution_param {
  2307. num_output: 12
  2308. kernel_size: 1
  2309. weight_filler {
  2310. type: "msra"
  2311. }
  2312. bias_filler {
  2313. type: "constant"
  2314. value: 0.0
  2315. }
  2316. }
  2317. }
  2318. layer {
  2319. name: "conv11_mbox_conf_perm"
  2320. type: "Permute"
  2321. bottom: "conv11_mbox_conf"
  2322. top: "conv11_mbox_conf_perm"
  2323. permute_param {
  2324. order: 0
  2325. order: 2
  2326. order: 3
  2327. order: 1
  2328. }
  2329. }
  2330. layer {
  2331. name: "conv11_mbox_conf_flat"
  2332. type: "Flatten"
  2333. bottom: "conv11_mbox_conf_perm"
  2334. top: "conv11_mbox_conf_flat"
  2335. flatten_param {
  2336. axis: 1
  2337. }
  2338. }
  2339. layer {
  2340. name: "conv11_mbox_priorbox"
  2341. type: "PriorBox"
  2342. bottom: "conv11"
  2343. bottom: "data"
  2344. top: "conv11_mbox_priorbox"
  2345. prior_box_param {
  2346. min_size: 6.0
  2347. aspect_ratio: 4.0
  2348. flip: true
  2349. clip: false
  2350. variance: 0.1
  2351. variance: 0.1
  2352. variance: 0.2
  2353. variance: 0.2
  2354. offset: 0.5
  2355. }
  2356. }
  2357. layer {
  2358. name: "conv13_mbox_loc_0929"
  2359. type: "Convolution"
  2360. bottom: "conv13"
  2361. top: "conv13_mbox_loc"
  2362. param {
  2363. lr_mult: 0.1
  2364. decay_mult: 0.1
  2365. }
  2366. param {
  2367. lr_mult: 0.2
  2368. decay_mult: 0.0
  2369. }
  2370. convolution_param {
  2371. num_output: 16
  2372. kernel_size: 1
  2373. weight_filler {
  2374. type: "msra"
  2375. }
  2376. bias_filler {
  2377. type: "constant"
  2378. value: 0.0
  2379. }
  2380. }
  2381. }
  2382. layer {
  2383. name: "conv13_mbox_loc_perm"
  2384. type: "Permute"
  2385. bottom: "conv13_mbox_loc"
  2386. top: "conv13_mbox_loc_perm"
  2387. permute_param {
  2388. order: 0
  2389. order: 2
  2390. order: 3
  2391. order: 1
  2392. }
  2393. }
  2394. layer {
  2395. name: "conv13_mbox_loc_flat"
  2396. type: "Flatten"
  2397. bottom: "conv13_mbox_loc_perm"
  2398. top: "conv13_mbox_loc_flat"
  2399. flatten_param {
  2400. axis: 1
  2401. }
  2402. }
  2403. layer {
  2404. name: "conv13_mbox_conf_new_0929"
  2405. type: "Convolution"
  2406. bottom: "conv13"
  2407. top: "conv13_mbox_conf"
  2408. param {
  2409. lr_mult: 1.0
  2410. decay_mult: 1.0
  2411. }
  2412. param {
  2413. lr_mult: 2.0
  2414. decay_mult: 0.0
  2415. }
  2416. convolution_param {
  2417. num_output: 16
  2418. kernel_size: 1
  2419. weight_filler {
  2420. type: "msra"
  2421. }
  2422. bias_filler {
  2423. type: "constant"
  2424. value: 0.0
  2425. }
  2426. }
  2427. }
  2428. layer {
  2429. name: "conv13_mbox_conf_perm"
  2430. type: "Permute"
  2431. bottom: "conv13_mbox_conf"
  2432. top: "conv13_mbox_conf_perm"
  2433. permute_param {
  2434. order: 0
  2435. order: 2
  2436. order: 3
  2437. order: 1
  2438. }
  2439. }
  2440. layer {
  2441. name: "conv13_mbox_conf_flat"
  2442. type: "Flatten"
  2443. bottom: "conv13_mbox_conf_perm"
  2444. top: "conv13_mbox_conf_flat"
  2445. flatten_param {
  2446. axis: 1
  2447. }
  2448. }
  2449. layer {
  2450. name: "conv13_mbox_priorbox"
  2451. type: "PriorBox"
  2452. bottom: "conv13"
  2453. bottom: "data"
  2454. top: "conv13_mbox_priorbox"
  2455. prior_box_param {
  2456. min_size: 30.0
  2457. max_size: 55.0
  2458. aspect_ratio: 4.0
  2459. flip: true
  2460. clip: false
  2461. variance: 0.1
  2462. variance: 0.1
  2463. variance: 0.2
  2464. variance: 0.2
  2465. offset: 0.5
  2466. }
  2467. }
  2468. layer {
  2469. name: "conv14_2_mbox_loc_0929"
  2470. type: "Convolution"
  2471. bottom: "conv14_2"
  2472. top: "conv14_2_mbox_loc"
  2473. param {
  2474. lr_mult: 0.1
  2475. decay_mult: 0.1
  2476. }
  2477. param {
  2478. lr_mult: 0.2
  2479. decay_mult: 0.0
  2480. }
  2481. convolution_param {
  2482. num_output: 16
  2483. kernel_size: 1
  2484. weight_filler {
  2485. type: "msra"
  2486. }
  2487. bias_filler {
  2488. type: "constant"
  2489. value: 0.0
  2490. }
  2491. }
  2492. }
  2493. layer {
  2494. name: "conv14_2_mbox_loc_perm"
  2495. type: "Permute"
  2496. bottom: "conv14_2_mbox_loc"
  2497. top: "conv14_2_mbox_loc_perm"
  2498. permute_param {
  2499. order: 0
  2500. order: 2
  2501. order: 3
  2502. order: 1
  2503. }
  2504. }
  2505. layer {
  2506. name: "conv14_2_mbox_loc_flat"
  2507. type: "Flatten"
  2508. bottom: "conv14_2_mbox_loc_perm"
  2509. top: "conv14_2_mbox_loc_flat"
  2510. flatten_param {
  2511. axis: 1
  2512. }
  2513. }
  2514. layer {
  2515. name: "conv14_2_mbox_conf_new_0929"
  2516. type: "Convolution"
  2517. bottom: "conv14_2"
  2518. top: "conv14_2_mbox_conf"
  2519. param {
  2520. lr_mult: 1.0
  2521. decay_mult: 1.0
  2522. }
  2523. param {
  2524. lr_mult: 2.0
  2525. decay_mult: 0.0
  2526. }
  2527. convolution_param {
  2528. num_output: 16
  2529. kernel_size: 1
  2530. weight_filler {
  2531. type: "msra"
  2532. }
  2533. bias_filler {
  2534. type: "constant"
  2535. value: 0.0
  2536. }
  2537. }
  2538. }
  2539. layer {
  2540. name: "conv14_2_mbox_conf_perm"
  2541. type: "Permute"
  2542. bottom: "conv14_2_mbox_conf"
  2543. top: "conv14_2_mbox_conf_perm"
  2544. permute_param {
  2545. order: 0
  2546. order: 2
  2547. order: 3
  2548. order: 1
  2549. }
  2550. }
  2551. layer {
  2552. name: "conv14_2_mbox_conf_flat"
  2553. type: "Flatten"
  2554. bottom: "conv14_2_mbox_conf_perm"
  2555. top: "conv14_2_mbox_conf_flat"
  2556. flatten_param {
  2557. axis: 1
  2558. }
  2559. }
  2560. layer {
  2561. name: "conv14_2_mbox_priorbox"
  2562. type: "PriorBox"
  2563. bottom: "conv14_2"
  2564. bottom: "data"
  2565. top: "conv14_2_mbox_priorbox"
  2566. prior_box_param {
  2567. min_size: 55.0
  2568. max_size: 79.0
  2569. aspect_ratio: 4.0
  2570. flip: true
  2571. clip: false
  2572. variance: 0.1
  2573. variance: 0.1
  2574. variance: 0.2
  2575. variance: 0.2
  2576. offset: 0.5
  2577. }
  2578. }
  2579. layer {
  2580. name: "conv15_2_mbox_loc_0929"
  2581. type: "Convolution"
  2582. bottom: "conv15_2"
  2583. top: "conv15_2_mbox_loc"
  2584. param {
  2585. lr_mult: 0.1
  2586. decay_mult: 0.1
  2587. }
  2588. param {
  2589. lr_mult: 0.2
  2590. decay_mult: 0.0
  2591. }
  2592. convolution_param {
  2593. num_output: 16
  2594. kernel_size: 1
  2595. weight_filler {
  2596. type: "msra"
  2597. }
  2598. bias_filler {
  2599. type: "constant"
  2600. value: 0.0
  2601. }
  2602. }
  2603. }
  2604. layer {
  2605. name: "conv15_2_mbox_loc_perm"
  2606. type: "Permute"
  2607. bottom: "conv15_2_mbox_loc"
  2608. top: "conv15_2_mbox_loc_perm"
  2609. permute_param {
  2610. order: 0
  2611. order: 2
  2612. order: 3
  2613. order: 1
  2614. }
  2615. }
  2616. layer {
  2617. name: "conv15_2_mbox_loc_flat"
  2618. type: "Flatten"
  2619. bottom: "conv15_2_mbox_loc_perm"
  2620. top: "conv15_2_mbox_loc_flat"
  2621. flatten_param {
  2622. axis: 1
  2623. }
  2624. }
  2625. layer {
  2626. name: "conv15_2_mbox_conf_new_0929"
  2627. type: "Convolution"
  2628. bottom: "conv15_2"
  2629. top: "conv15_2_mbox_conf"
  2630. param {
  2631. lr_mult: 1.0
  2632. decay_mult: 1.0
  2633. }
  2634. param {
  2635. lr_mult: 2.0
  2636. decay_mult: 0.0
  2637. }
  2638. convolution_param {
  2639. num_output: 16
  2640. kernel_size: 1
  2641. weight_filler {
  2642. type: "msra"
  2643. }
  2644. bias_filler {
  2645. type: "constant"
  2646. value: 0.0
  2647. }
  2648. }
  2649. }
  2650. layer {
  2651. name: "conv15_2_mbox_conf_perm"
  2652. type: "Permute"
  2653. bottom: "conv15_2_mbox_conf"
  2654. top: "conv15_2_mbox_conf_perm"
  2655. permute_param {
  2656. order: 0
  2657. order: 2
  2658. order: 3
  2659. order: 1
  2660. }
  2661. }
  2662. layer {
  2663. name: "conv15_2_mbox_conf_flat"
  2664. type: "Flatten"
  2665. bottom: "conv15_2_mbox_conf_perm"
  2666. top: "conv15_2_mbox_conf_flat"
  2667. flatten_param {
  2668. axis: 1
  2669. }
  2670. }
  2671. layer {
  2672. name: "conv15_2_mbox_priorbox"
  2673. type: "PriorBox"
  2674. bottom: "conv15_2"
  2675. bottom: "data"
  2676. top: "conv15_2_mbox_priorbox"
  2677. prior_box_param {
  2678. min_size: 79.0
  2679. max_size: 103.0
  2680. aspect_ratio: 4.0
  2681. flip: true
  2682. clip: false
  2683. variance: 0.1
  2684. variance: 0.1
  2685. variance: 0.2
  2686. variance: 0.2
  2687. offset: 0.5
  2688. }
  2689. }
  2690. layer {
  2691. name: "mbox_loc"
  2692. type: "Concat"
  2693. bottom: "conv11_mbox_loc_flat"
  2694. bottom: "conv13_mbox_loc_flat"
  2695. bottom: "conv14_2_mbox_loc_flat"
  2696. bottom: "conv15_2_mbox_loc_flat"
  2697. top: "mbox_loc"
  2698. concat_param {
  2699. axis: 1
  2700. }
  2701. }
  2702. layer {
  2703. name: "mbox_conf"
  2704. type: "Concat"
  2705. bottom: "conv11_mbox_conf_flat"
  2706. bottom: "conv13_mbox_conf_flat"
  2707. bottom: "conv14_2_mbox_conf_flat"
  2708. bottom: "conv15_2_mbox_conf_flat"
  2709. top: "mbox_conf"
  2710. concat_param {
  2711. axis: 1
  2712. }
  2713. }
  2714. layer {
  2715. name: "mbox_priorbox"
  2716. type: "Concat"
  2717. bottom: "conv11_mbox_priorbox"
  2718. bottom: "conv13_mbox_priorbox"
  2719. bottom: "conv14_2_mbox_priorbox"
  2720. bottom: "conv15_2_mbox_priorbox"
  2721. top: "mbox_priorbox"
  2722. concat_param {
  2723. axis: 2
  2724. }
  2725. }
  2726. layer {
  2727. name: "mbox_loss"
  2728. type: "MultiBoxLoss"
  2729. bottom: "mbox_loc"
  2730. bottom: "mbox_conf"
  2731. bottom: "mbox_priorbox"
  2732. bottom: "label"
  2733. top: "mbox_loss"
  2734. include {
  2735. phase: TRAIN
  2736. }
  2737. propagate_down: true
  2738. propagate_down: true
  2739. propagate_down: false
  2740. propagate_down: false
  2741. loss_param {
  2742. normalization: VALID
  2743. }
  2744. multibox_loss_param {
  2745. loc_loss_type: SMOOTH_L1
  2746. conf_loss_type: SOFTMAX
  2747. loc_weight: 1.0
  2748. num_classes: 4
  2749. share_location: true
  2750. match_type: PER_PREDICTION
  2751. overlap_threshold: 0.5
  2752. use_prior_for_matching: true
  2753. background_label_id: 0
  2754. use_difficult_gt: true
  2755. neg_pos_ratio: 3.0
  2756. neg_overlap: 0.5
  2757. code_type: CENTER_SIZE
  2758. ignore_cross_boundary_bbox: false
  2759. mining_type: MAX_NEGATIVE
  2760. }
  2761. }
Add Comment
Please, Sign In to add comment