Guest User

Untitled

a guest
Sep 24th, 2018
60
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 57.82 KB | None | 0 0
  1. name: "Darkent2Caffe"
  2. input: "data"
  3. input_dim: 1
  4. input_dim: 3
  5. input_dim: 608
  6. input_dim: 608
  7.  
  8. layer {
  9. bottom: "data"
  10. top: "layer1-conv"
  11. name: "layer1-conv"
  12. type: "Convolution"
  13. convolution_param {
  14. num_output: 32
  15. kernel_size: 3
  16. pad: 1
  17. stride: 1
  18. bias_term: false
  19. }
  20. }
  21. layer {
  22. bottom: "layer1-conv"
  23. top: "layer1-conv"
  24. name: "layer1-bn"
  25. type: "BatchNorm"
  26. batch_norm_param {
  27. use_global_stats: true
  28. }
  29. }
  30. layer {
  31. bottom: "layer1-conv"
  32. top: "layer1-conv"
  33. name: "layer1-scale"
  34. type: "Scale"
  35. scale_param {
  36. bias_term: true
  37. }
  38. }
  39. layer {
  40. bottom: "layer1-conv"
  41. top: "layer1-conv"
  42. name: "layer1-act"
  43. type: "ReLU"
  44. relu_param {
  45. negative_slope: 0.1
  46. }
  47. }
  48. layer {
  49. bottom: "layer1-conv"
  50. top: "layer2-conv"
  51. name: "layer2-conv"
  52. type: "Convolution"
  53. convolution_param {
  54. num_output: 64
  55. kernel_size: 3
  56. pad: 1
  57. stride: 2
  58. bias_term: false
  59. }
  60. }
  61. layer {
  62. bottom: "layer2-conv"
  63. top: "layer2-conv"
  64. name: "layer2-bn"
  65. type: "BatchNorm"
  66. batch_norm_param {
  67. use_global_stats: true
  68. }
  69. }
  70. layer {
  71. bottom: "layer2-conv"
  72. top: "layer2-conv"
  73. name: "layer2-scale"
  74. type: "Scale"
  75. scale_param {
  76. bias_term: true
  77. }
  78. }
  79. layer {
  80. bottom: "layer2-conv"
  81. top: "layer2-conv"
  82. name: "layer2-act"
  83. type: "ReLU"
  84. relu_param {
  85. negative_slope: 0.1
  86. }
  87. }
  88. layer {
  89. bottom: "layer2-conv"
  90. top: "layer3-conv"
  91. name: "layer3-conv"
  92. type: "Convolution"
  93. convolution_param {
  94. num_output: 32
  95. kernel_size: 1
  96. pad: 0
  97. stride: 1
  98. bias_term: false
  99. }
  100. }
  101. layer {
  102. bottom: "layer3-conv"
  103. top: "layer3-conv"
  104. name: "layer3-bn"
  105. type: "BatchNorm"
  106. batch_norm_param {
  107. use_global_stats: true
  108. }
  109. }
  110. layer {
  111. bottom: "layer3-conv"
  112. top: "layer3-conv"
  113. name: "layer3-scale"
  114. type: "Scale"
  115. scale_param {
  116. bias_term: true
  117. }
  118. }
  119. layer {
  120. bottom: "layer3-conv"
  121. top: "layer3-conv"
  122. name: "layer3-act"
  123. type: "ReLU"
  124. relu_param {
  125. negative_slope: 0.1
  126. }
  127. }
  128. layer {
  129. bottom: "layer3-conv"
  130. top: "layer4-conv"
  131. name: "layer4-conv"
  132. type: "Convolution"
  133. convolution_param {
  134. num_output: 64
  135. kernel_size: 3
  136. pad: 1
  137. stride: 1
  138. bias_term: false
  139. }
  140. }
  141. layer {
  142. bottom: "layer4-conv"
  143. top: "layer4-conv"
  144. name: "layer4-bn"
  145. type: "BatchNorm"
  146. batch_norm_param {
  147. use_global_stats: true
  148. }
  149. }
  150. layer {
  151. bottom: "layer4-conv"
  152. top: "layer4-conv"
  153. name: "layer4-scale"
  154. type: "Scale"
  155. scale_param {
  156. bias_term: true
  157. }
  158. }
  159. layer {
  160. bottom: "layer4-conv"
  161. top: "layer4-conv"
  162. name: "layer4-act"
  163. type: "ReLU"
  164. relu_param {
  165. negative_slope: 0.1
  166. }
  167. }
  168. layer {
  169. bottom: "layer2-conv"
  170. bottom: "layer4-conv"
  171. top: "layer5-shortcut"
  172. name: "layer5-shortcut"
  173. type: "Eltwise"
  174. eltwise_param {
  175. operation: SUM
  176. }
  177. }
  178. layer {
  179. bottom: "layer5-shortcut"
  180. top: "layer6-conv"
  181. name: "layer6-conv"
  182. type: "Convolution"
  183. convolution_param {
  184. num_output: 128
  185. kernel_size: 3
  186. pad: 1
  187. stride: 2
  188. bias_term: false
  189. }
  190. }
  191. layer {
  192. bottom: "layer6-conv"
  193. top: "layer6-conv"
  194. name: "layer6-bn"
  195. type: "BatchNorm"
  196. batch_norm_param {
  197. use_global_stats: true
  198. }
  199. }
  200. layer {
  201. bottom: "layer6-conv"
  202. top: "layer6-conv"
  203. name: "layer6-scale"
  204. type: "Scale"
  205. scale_param {
  206. bias_term: true
  207. }
  208. }
  209. layer {
  210. bottom: "layer6-conv"
  211. top: "layer6-conv"
  212. name: "layer6-act"
  213. type: "ReLU"
  214. relu_param {
  215. negative_slope: 0.1
  216. }
  217. }
  218. layer {
  219. bottom: "layer6-conv"
  220. top: "layer7-conv"
  221. name: "layer7-conv"
  222. type: "Convolution"
  223. convolution_param {
  224. num_output: 64
  225. kernel_size: 1
  226. pad: 0
  227. stride: 1
  228. bias_term: false
  229. }
  230. }
  231. layer {
  232. bottom: "layer7-conv"
  233. top: "layer7-conv"
  234. name: "layer7-bn"
  235. type: "BatchNorm"
  236. batch_norm_param {
  237. use_global_stats: true
  238. }
  239. }
  240. layer {
  241. bottom: "layer7-conv"
  242. top: "layer7-conv"
  243. name: "layer7-scale"
  244. type: "Scale"
  245. scale_param {
  246. bias_term: true
  247. }
  248. }
  249. layer {
  250. bottom: "layer7-conv"
  251. top: "layer7-conv"
  252. name: "layer7-act"
  253. type: "ReLU"
  254. relu_param {
  255. negative_slope: 0.1
  256. }
  257. }
  258. layer {
  259. bottom: "layer7-conv"
  260. top: "layer8-conv"
  261. name: "layer8-conv"
  262. type: "Convolution"
  263. convolution_param {
  264. num_output: 128
  265. kernel_size: 3
  266. pad: 1
  267. stride: 1
  268. bias_term: false
  269. }
  270. }
  271. layer {
  272. bottom: "layer8-conv"
  273. top: "layer8-conv"
  274. name: "layer8-bn"
  275. type: "BatchNorm"
  276. batch_norm_param {
  277. use_global_stats: true
  278. }
  279. }
  280. layer {
  281. bottom: "layer8-conv"
  282. top: "layer8-conv"
  283. name: "layer8-scale"
  284. type: "Scale"
  285. scale_param {
  286. bias_term: true
  287. }
  288. }
  289. layer {
  290. bottom: "layer8-conv"
  291. top: "layer8-conv"
  292. name: "layer8-act"
  293. type: "ReLU"
  294. relu_param {
  295. negative_slope: 0.1
  296. }
  297. }
  298. layer {
  299. bottom: "layer6-conv"
  300. bottom: "layer8-conv"
  301. top: "layer9-shortcut"
  302. name: "layer9-shortcut"
  303. type: "Eltwise"
  304. eltwise_param {
  305. operation: SUM
  306. }
  307. }
  308. layer {
  309. bottom: "layer9-shortcut"
  310. top: "layer10-conv"
  311. name: "layer10-conv"
  312. type: "Convolution"
  313. convolution_param {
  314. num_output: 64
  315. kernel_size: 1
  316. pad: 0
  317. stride: 1
  318. bias_term: false
  319. }
  320. }
  321. layer {
  322. bottom: "layer10-conv"
  323. top: "layer10-conv"
  324. name: "layer10-bn"
  325. type: "BatchNorm"
  326. batch_norm_param {
  327. use_global_stats: true
  328. }
  329. }
  330. layer {
  331. bottom: "layer10-conv"
  332. top: "layer10-conv"
  333. name: "layer10-scale"
  334. type: "Scale"
  335. scale_param {
  336. bias_term: true
  337. }
  338. }
  339. layer {
  340. bottom: "layer10-conv"
  341. top: "layer10-conv"
  342. name: "layer10-act"
  343. type: "ReLU"
  344. relu_param {
  345. negative_slope: 0.1
  346. }
  347. }
  348. layer {
  349. bottom: "layer10-conv"
  350. top: "layer11-conv"
  351. name: "layer11-conv"
  352. type: "Convolution"
  353. convolution_param {
  354. num_output: 128
  355. kernel_size: 3
  356. pad: 1
  357. stride: 1
  358. bias_term: false
  359. }
  360. }
  361. layer {
  362. bottom: "layer11-conv"
  363. top: "layer11-conv"
  364. name: "layer11-bn"
  365. type: "BatchNorm"
  366. batch_norm_param {
  367. use_global_stats: true
  368. }
  369. }
  370. layer {
  371. bottom: "layer11-conv"
  372. top: "layer11-conv"
  373. name: "layer11-scale"
  374. type: "Scale"
  375. scale_param {
  376. bias_term: true
  377. }
  378. }
  379. layer {
  380. bottom: "layer11-conv"
  381. top: "layer11-conv"
  382. name: "layer11-act"
  383. type: "ReLU"
  384. relu_param {
  385. negative_slope: 0.1
  386. }
  387. }
  388. layer {
  389. bottom: "layer9-shortcut"
  390. bottom: "layer11-conv"
  391. top: "layer12-shortcut"
  392. name: "layer12-shortcut"
  393. type: "Eltwise"
  394. eltwise_param {
  395. operation: SUM
  396. }
  397. }
  398. layer {
  399. bottom: "layer12-shortcut"
  400. top: "layer13-conv"
  401. name: "layer13-conv"
  402. type: "Convolution"
  403. convolution_param {
  404. num_output: 256
  405. kernel_size: 3
  406. pad: 1
  407. stride: 2
  408. bias_term: false
  409. }
  410. }
  411. layer {
  412. bottom: "layer13-conv"
  413. top: "layer13-conv"
  414. name: "layer13-bn"
  415. type: "BatchNorm"
  416. batch_norm_param {
  417. use_global_stats: true
  418. }
  419. }
  420. layer {
  421. bottom: "layer13-conv"
  422. top: "layer13-conv"
  423. name: "layer13-scale"
  424. type: "Scale"
  425. scale_param {
  426. bias_term: true
  427. }
  428. }
  429. layer {
  430. bottom: "layer13-conv"
  431. top: "layer13-conv"
  432. name: "layer13-act"
  433. type: "ReLU"
  434. relu_param {
  435. negative_slope: 0.1
  436. }
  437. }
  438. layer {
  439. bottom: "layer13-conv"
  440. top: "layer14-conv"
  441. name: "layer14-conv"
  442. type: "Convolution"
  443. convolution_param {
  444. num_output: 128
  445. kernel_size: 1
  446. pad: 0
  447. stride: 1
  448. bias_term: false
  449. }
  450. }
  451. layer {
  452. bottom: "layer14-conv"
  453. top: "layer14-conv"
  454. name: "layer14-bn"
  455. type: "BatchNorm"
  456. batch_norm_param {
  457. use_global_stats: true
  458. }
  459. }
  460. layer {
  461. bottom: "layer14-conv"
  462. top: "layer14-conv"
  463. name: "layer14-scale"
  464. type: "Scale"
  465. scale_param {
  466. bias_term: true
  467. }
  468. }
  469. layer {
  470. bottom: "layer14-conv"
  471. top: "layer14-conv"
  472. name: "layer14-act"
  473. type: "ReLU"
  474. relu_param {
  475. negative_slope: 0.1
  476. }
  477. }
  478. layer {
  479. bottom: "layer14-conv"
  480. top: "layer15-conv"
  481. name: "layer15-conv"
  482. type: "Convolution"
  483. convolution_param {
  484. num_output: 256
  485. kernel_size: 3
  486. pad: 1
  487. stride: 1
  488. bias_term: false
  489. }
  490. }
  491. layer {
  492. bottom: "layer15-conv"
  493. top: "layer15-conv"
  494. name: "layer15-bn"
  495. type: "BatchNorm"
  496. batch_norm_param {
  497. use_global_stats: true
  498. }
  499. }
  500. layer {
  501. bottom: "layer15-conv"
  502. top: "layer15-conv"
  503. name: "layer15-scale"
  504. type: "Scale"
  505. scale_param {
  506. bias_term: true
  507. }
  508. }
  509. layer {
  510. bottom: "layer15-conv"
  511. top: "layer15-conv"
  512. name: "layer15-act"
  513. type: "ReLU"
  514. relu_param {
  515. negative_slope: 0.1
  516. }
  517. }
  518. layer {
  519. bottom: "layer13-conv"
  520. bottom: "layer15-conv"
  521. top: "layer16-shortcut"
  522. name: "layer16-shortcut"
  523. type: "Eltwise"
  524. eltwise_param {
  525. operation: SUM
  526. }
  527. }
  528. layer {
  529. bottom: "layer16-shortcut"
  530. top: "layer17-conv"
  531. name: "layer17-conv"
  532. type: "Convolution"
  533. convolution_param {
  534. num_output: 128
  535. kernel_size: 1
  536. pad: 0
  537. stride: 1
  538. bias_term: false
  539. }
  540. }
  541. layer {
  542. bottom: "layer17-conv"
  543. top: "layer17-conv"
  544. name: "layer17-bn"
  545. type: "BatchNorm"
  546. batch_norm_param {
  547. use_global_stats: true
  548. }
  549. }
  550. layer {
  551. bottom: "layer17-conv"
  552. top: "layer17-conv"
  553. name: "layer17-scale"
  554. type: "Scale"
  555. scale_param {
  556. bias_term: true
  557. }
  558. }
  559. layer {
  560. bottom: "layer17-conv"
  561. top: "layer17-conv"
  562. name: "layer17-act"
  563. type: "ReLU"
  564. relu_param {
  565. negative_slope: 0.1
  566. }
  567. }
  568. layer {
  569. bottom: "layer17-conv"
  570. top: "layer18-conv"
  571. name: "layer18-conv"
  572. type: "Convolution"
  573. convolution_param {
  574. num_output: 256
  575. kernel_size: 3
  576. pad: 1
  577. stride: 1
  578. bias_term: false
  579. }
  580. }
  581. layer {
  582. bottom: "layer18-conv"
  583. top: "layer18-conv"
  584. name: "layer18-bn"
  585. type: "BatchNorm"
  586. batch_norm_param {
  587. use_global_stats: true
  588. }
  589. }
  590. layer {
  591. bottom: "layer18-conv"
  592. top: "layer18-conv"
  593. name: "layer18-scale"
  594. type: "Scale"
  595. scale_param {
  596. bias_term: true
  597. }
  598. }
  599. layer {
  600. bottom: "layer18-conv"
  601. top: "layer18-conv"
  602. name: "layer18-act"
  603. type: "ReLU"
  604. relu_param {
  605. negative_slope: 0.1
  606. }
  607. }
  608. layer {
  609. bottom: "layer16-shortcut"
  610. bottom: "layer18-conv"
  611. top: "layer19-shortcut"
  612. name: "layer19-shortcut"
  613. type: "Eltwise"
  614. eltwise_param {
  615. operation: SUM
  616. }
  617. }
  618. layer {
  619. bottom: "layer19-shortcut"
  620. top: "layer20-conv"
  621. name: "layer20-conv"
  622. type: "Convolution"
  623. convolution_param {
  624. num_output: 128
  625. kernel_size: 1
  626. pad: 0
  627. stride: 1
  628. bias_term: false
  629. }
  630. }
  631. layer {
  632. bottom: "layer20-conv"
  633. top: "layer20-conv"
  634. name: "layer20-bn"
  635. type: "BatchNorm"
  636. batch_norm_param {
  637. use_global_stats: true
  638. }
  639. }
  640. layer {
  641. bottom: "layer20-conv"
  642. top: "layer20-conv"
  643. name: "layer20-scale"
  644. type: "Scale"
  645. scale_param {
  646. bias_term: true
  647. }
  648. }
  649. layer {
  650. bottom: "layer20-conv"
  651. top: "layer20-conv"
  652. name: "layer20-act"
  653. type: "ReLU"
  654. relu_param {
  655. negative_slope: 0.1
  656. }
  657. }
  658. layer {
  659. bottom: "layer20-conv"
  660. top: "layer21-conv"
  661. name: "layer21-conv"
  662. type: "Convolution"
  663. convolution_param {
  664. num_output: 256
  665. kernel_size: 3
  666. pad: 1
  667. stride: 1
  668. bias_term: false
  669. }
  670. }
  671. layer {
  672. bottom: "layer21-conv"
  673. top: "layer21-conv"
  674. name: "layer21-bn"
  675. type: "BatchNorm"
  676. batch_norm_param {
  677. use_global_stats: true
  678. }
  679. }
  680. layer {
  681. bottom: "layer21-conv"
  682. top: "layer21-conv"
  683. name: "layer21-scale"
  684. type: "Scale"
  685. scale_param {
  686. bias_term: true
  687. }
  688. }
  689. layer {
  690. bottom: "layer21-conv"
  691. top: "layer21-conv"
  692. name: "layer21-act"
  693. type: "ReLU"
  694. relu_param {
  695. negative_slope: 0.1
  696. }
  697. }
  698. layer {
  699. bottom: "layer19-shortcut"
  700. bottom: "layer21-conv"
  701. top: "layer22-shortcut"
  702. name: "layer22-shortcut"
  703. type: "Eltwise"
  704. eltwise_param {
  705. operation: SUM
  706. }
  707. }
  708. layer {
  709. bottom: "layer22-shortcut"
  710. top: "layer23-conv"
  711. name: "layer23-conv"
  712. type: "Convolution"
  713. convolution_param {
  714. num_output: 128
  715. kernel_size: 1
  716. pad: 0
  717. stride: 1
  718. bias_term: false
  719. }
  720. }
  721. layer {
  722. bottom: "layer23-conv"
  723. top: "layer23-conv"
  724. name: "layer23-bn"
  725. type: "BatchNorm"
  726. batch_norm_param {
  727. use_global_stats: true
  728. }
  729. }
  730. layer {
  731. bottom: "layer23-conv"
  732. top: "layer23-conv"
  733. name: "layer23-scale"
  734. type: "Scale"
  735. scale_param {
  736. bias_term: true
  737. }
  738. }
  739. layer {
  740. bottom: "layer23-conv"
  741. top: "layer23-conv"
  742. name: "layer23-act"
  743. type: "ReLU"
  744. relu_param {
  745. negative_slope: 0.1
  746. }
  747. }
  748. layer {
  749. bottom: "layer23-conv"
  750. top: "layer24-conv"
  751. name: "layer24-conv"
  752. type: "Convolution"
  753. convolution_param {
  754. num_output: 256
  755. kernel_size: 3
  756. pad: 1
  757. stride: 1
  758. bias_term: false
  759. }
  760. }
  761. layer {
  762. bottom: "layer24-conv"
  763. top: "layer24-conv"
  764. name: "layer24-bn"
  765. type: "BatchNorm"
  766. batch_norm_param {
  767. use_global_stats: true
  768. }
  769. }
  770. layer {
  771. bottom: "layer24-conv"
  772. top: "layer24-conv"
  773. name: "layer24-scale"
  774. type: "Scale"
  775. scale_param {
  776. bias_term: true
  777. }
  778. }
  779. layer {
  780. bottom: "layer24-conv"
  781. top: "layer24-conv"
  782. name: "layer24-act"
  783. type: "ReLU"
  784. relu_param {
  785. negative_slope: 0.1
  786. }
  787. }
  788. layer {
  789. bottom: "layer22-shortcut"
  790. bottom: "layer24-conv"
  791. top: "layer25-shortcut"
  792. name: "layer25-shortcut"
  793. type: "Eltwise"
  794. eltwise_param {
  795. operation: SUM
  796. }
  797. }
  798. layer {
  799. bottom: "layer25-shortcut"
  800. top: "layer26-conv"
  801. name: "layer26-conv"
  802. type: "Convolution"
  803. convolution_param {
  804. num_output: 128
  805. kernel_size: 1
  806. pad: 0
  807. stride: 1
  808. bias_term: false
  809. }
  810. }
  811. layer {
  812. bottom: "layer26-conv"
  813. top: "layer26-conv"
  814. name: "layer26-bn"
  815. type: "BatchNorm"
  816. batch_norm_param {
  817. use_global_stats: true
  818. }
  819. }
  820. layer {
  821. bottom: "layer26-conv"
  822. top: "layer26-conv"
  823. name: "layer26-scale"
  824. type: "Scale"
  825. scale_param {
  826. bias_term: true
  827. }
  828. }
  829. layer {
  830. bottom: "layer26-conv"
  831. top: "layer26-conv"
  832. name: "layer26-act"
  833. type: "ReLU"
  834. relu_param {
  835. negative_slope: 0.1
  836. }
  837. }
  838. layer {
  839. bottom: "layer26-conv"
  840. top: "layer27-conv"
  841. name: "layer27-conv"
  842. type: "Convolution"
  843. convolution_param {
  844. num_output: 256
  845. kernel_size: 3
  846. pad: 1
  847. stride: 1
  848. bias_term: false
  849. }
  850. }
  851. layer {
  852. bottom: "layer27-conv"
  853. top: "layer27-conv"
  854. name: "layer27-bn"
  855. type: "BatchNorm"
  856. batch_norm_param {
  857. use_global_stats: true
  858. }
  859. }
  860. layer {
  861. bottom: "layer27-conv"
  862. top: "layer27-conv"
  863. name: "layer27-scale"
  864. type: "Scale"
  865. scale_param {
  866. bias_term: true
  867. }
  868. }
  869. layer {
  870. bottom: "layer27-conv"
  871. top: "layer27-conv"
  872. name: "layer27-act"
  873. type: "ReLU"
  874. relu_param {
  875. negative_slope: 0.1
  876. }
  877. }
  878. layer {
  879. bottom: "layer25-shortcut"
  880. bottom: "layer27-conv"
  881. top: "layer28-shortcut"
  882. name: "layer28-shortcut"
  883. type: "Eltwise"
  884. eltwise_param {
  885. operation: SUM
  886. }
  887. }
  888. layer {
  889. bottom: "layer28-shortcut"
  890. top: "layer29-conv"
  891. name: "layer29-conv"
  892. type: "Convolution"
  893. convolution_param {
  894. num_output: 128
  895. kernel_size: 1
  896. pad: 0
  897. stride: 1
  898. bias_term: false
  899. }
  900. }
  901. layer {
  902. bottom: "layer29-conv"
  903. top: "layer29-conv"
  904. name: "layer29-bn"
  905. type: "BatchNorm"
  906. batch_norm_param {
  907. use_global_stats: true
  908. }
  909. }
  910. layer {
  911. bottom: "layer29-conv"
  912. top: "layer29-conv"
  913. name: "layer29-scale"
  914. type: "Scale"
  915. scale_param {
  916. bias_term: true
  917. }
  918. }
  919. layer {
  920. bottom: "layer29-conv"
  921. top: "layer29-conv"
  922. name: "layer29-act"
  923. type: "ReLU"
  924. relu_param {
  925. negative_slope: 0.1
  926. }
  927. }
  928. layer {
  929. bottom: "layer29-conv"
  930. top: "layer30-conv"
  931. name: "layer30-conv"
  932. type: "Convolution"
  933. convolution_param {
  934. num_output: 256
  935. kernel_size: 3
  936. pad: 1
  937. stride: 1
  938. bias_term: false
  939. }
  940. }
  941. layer {
  942. bottom: "layer30-conv"
  943. top: "layer30-conv"
  944. name: "layer30-bn"
  945. type: "BatchNorm"
  946. batch_norm_param {
  947. use_global_stats: true
  948. }
  949. }
  950. layer {
  951. bottom: "layer30-conv"
  952. top: "layer30-conv"
  953. name: "layer30-scale"
  954. type: "Scale"
  955. scale_param {
  956. bias_term: true
  957. }
  958. }
  959. layer {
  960. bottom: "layer30-conv"
  961. top: "layer30-conv"
  962. name: "layer30-act"
  963. type: "ReLU"
  964. relu_param {
  965. negative_slope: 0.1
  966. }
  967. }
  968. layer {
  969. bottom: "layer28-shortcut"
  970. bottom: "layer30-conv"
  971. top: "layer31-shortcut"
  972. name: "layer31-shortcut"
  973. type: "Eltwise"
  974. eltwise_param {
  975. operation: SUM
  976. }
  977. }
  978. layer {
  979. bottom: "layer31-shortcut"
  980. top: "layer32-conv"
  981. name: "layer32-conv"
  982. type: "Convolution"
  983. convolution_param {
  984. num_output: 128
  985. kernel_size: 1
  986. pad: 0
  987. stride: 1
  988. bias_term: false
  989. }
  990. }
  991. layer {
  992. bottom: "layer32-conv"
  993. top: "layer32-conv"
  994. name: "layer32-bn"
  995. type: "BatchNorm"
  996. batch_norm_param {
  997. use_global_stats: true
  998. }
  999. }
  1000. layer {
  1001. bottom: "layer32-conv"
  1002. top: "layer32-conv"
  1003. name: "layer32-scale"
  1004. type: "Scale"
  1005. scale_param {
  1006. bias_term: true
  1007. }
  1008. }
  1009. layer {
  1010. bottom: "layer32-conv"
  1011. top: "layer32-conv"
  1012. name: "layer32-act"
  1013. type: "ReLU"
  1014. relu_param {
  1015. negative_slope: 0.1
  1016. }
  1017. }
  1018. layer {
  1019. bottom: "layer32-conv"
  1020. top: "layer33-conv"
  1021. name: "layer33-conv"
  1022. type: "Convolution"
  1023. convolution_param {
  1024. num_output: 256
  1025. kernel_size: 3
  1026. pad: 1
  1027. stride: 1
  1028. bias_term: false
  1029. }
  1030. }
  1031. layer {
  1032. bottom: "layer33-conv"
  1033. top: "layer33-conv"
  1034. name: "layer33-bn"
  1035. type: "BatchNorm"
  1036. batch_norm_param {
  1037. use_global_stats: true
  1038. }
  1039. }
  1040. layer {
  1041. bottom: "layer33-conv"
  1042. top: "layer33-conv"
  1043. name: "layer33-scale"
  1044. type: "Scale"
  1045. scale_param {
  1046. bias_term: true
  1047. }
  1048. }
  1049. layer {
  1050. bottom: "layer33-conv"
  1051. top: "layer33-conv"
  1052. name: "layer33-act"
  1053. type: "ReLU"
  1054. relu_param {
  1055. negative_slope: 0.1
  1056. }
  1057. }
  1058. layer {
  1059. bottom: "layer31-shortcut"
  1060. bottom: "layer33-conv"
  1061. top: "layer34-shortcut"
  1062. name: "layer34-shortcut"
  1063. type: "Eltwise"
  1064. eltwise_param {
  1065. operation: SUM
  1066. }
  1067. }
  1068. layer {
  1069. bottom: "layer34-shortcut"
  1070. top: "layer35-conv"
  1071. name: "layer35-conv"
  1072. type: "Convolution"
  1073. convolution_param {
  1074. num_output: 128
  1075. kernel_size: 1
  1076. pad: 0
  1077. stride: 1
  1078. bias_term: false
  1079. }
  1080. }
  1081. layer {
  1082. bottom: "layer35-conv"
  1083. top: "layer35-conv"
  1084. name: "layer35-bn"
  1085. type: "BatchNorm"
  1086. batch_norm_param {
  1087. use_global_stats: true
  1088. }
  1089. }
  1090. layer {
  1091. bottom: "layer35-conv"
  1092. top: "layer35-conv"
  1093. name: "layer35-scale"
  1094. type: "Scale"
  1095. scale_param {
  1096. bias_term: true
  1097. }
  1098. }
  1099. layer {
  1100. bottom: "layer35-conv"
  1101. top: "layer35-conv"
  1102. name: "layer35-act"
  1103. type: "ReLU"
  1104. relu_param {
  1105. negative_slope: 0.1
  1106. }
  1107. }
  1108. layer {
  1109. bottom: "layer35-conv"
  1110. top: "layer36-conv"
  1111. name: "layer36-conv"
  1112. type: "Convolution"
  1113. convolution_param {
  1114. num_output: 256
  1115. kernel_size: 3
  1116. pad: 1
  1117. stride: 1
  1118. bias_term: false
  1119. }
  1120. }
  1121. layer {
  1122. bottom: "layer36-conv"
  1123. top: "layer36-conv"
  1124. name: "layer36-bn"
  1125. type: "BatchNorm"
  1126. batch_norm_param {
  1127. use_global_stats: true
  1128. }
  1129. }
  1130. layer {
  1131. bottom: "layer36-conv"
  1132. top: "layer36-conv"
  1133. name: "layer36-scale"
  1134. type: "Scale"
  1135. scale_param {
  1136. bias_term: true
  1137. }
  1138. }
  1139. layer {
  1140. bottom: "layer36-conv"
  1141. top: "layer36-conv"
  1142. name: "layer36-act"
  1143. type: "ReLU"
  1144. relu_param {
  1145. negative_slope: 0.1
  1146. }
  1147. }
  1148. layer {
  1149. bottom: "layer34-shortcut"
  1150. bottom: "layer36-conv"
  1151. top: "layer37-shortcut"
  1152. name: "layer37-shortcut"
  1153. type: "Eltwise"
  1154. eltwise_param {
  1155. operation: SUM
  1156. }
  1157. }
  1158. layer {
  1159. bottom: "layer37-shortcut"
  1160. top: "layer38-conv"
  1161. name: "layer38-conv"
  1162. type: "Convolution"
  1163. convolution_param {
  1164. num_output: 512
  1165. kernel_size: 3
  1166. pad: 1
  1167. stride: 2
  1168. bias_term: false
  1169. }
  1170. }
  1171. layer {
  1172. bottom: "layer38-conv"
  1173. top: "layer38-conv"
  1174. name: "layer38-bn"
  1175. type: "BatchNorm"
  1176. batch_norm_param {
  1177. use_global_stats: true
  1178. }
  1179. }
  1180. layer {
  1181. bottom: "layer38-conv"
  1182. top: "layer38-conv"
  1183. name: "layer38-scale"
  1184. type: "Scale"
  1185. scale_param {
  1186. bias_term: true
  1187. }
  1188. }
  1189. layer {
  1190. bottom: "layer38-conv"
  1191. top: "layer38-conv"
  1192. name: "layer38-act"
  1193. type: "ReLU"
  1194. relu_param {
  1195. negative_slope: 0.1
  1196. }
  1197. }
  1198. layer {
  1199. bottom: "layer38-conv"
  1200. top: "layer39-conv"
  1201. name: "layer39-conv"
  1202. type: "Convolution"
  1203. convolution_param {
  1204. num_output: 256
  1205. kernel_size: 1
  1206. pad: 0
  1207. stride: 1
  1208. bias_term: false
  1209. }
  1210. }
  1211. layer {
  1212. bottom: "layer39-conv"
  1213. top: "layer39-conv"
  1214. name: "layer39-bn"
  1215. type: "BatchNorm"
  1216. batch_norm_param {
  1217. use_global_stats: true
  1218. }
  1219. }
  1220. layer {
  1221. bottom: "layer39-conv"
  1222. top: "layer39-conv"
  1223. name: "layer39-scale"
  1224. type: "Scale"
  1225. scale_param {
  1226. bias_term: true
  1227. }
  1228. }
  1229. layer {
  1230. bottom: "layer39-conv"
  1231. top: "layer39-conv"
  1232. name: "layer39-act"
  1233. type: "ReLU"
  1234. relu_param {
  1235. negative_slope: 0.1
  1236. }
  1237. }
  1238. layer {
  1239. bottom: "layer39-conv"
  1240. top: "layer40-conv"
  1241. name: "layer40-conv"
  1242. type: "Convolution"
  1243. convolution_param {
  1244. num_output: 512
  1245. kernel_size: 3
  1246. pad: 1
  1247. stride: 1
  1248. bias_term: false
  1249. }
  1250. }
  1251. layer {
  1252. bottom: "layer40-conv"
  1253. top: "layer40-conv"
  1254. name: "layer40-bn"
  1255. type: "BatchNorm"
  1256. batch_norm_param {
  1257. use_global_stats: true
  1258. }
  1259. }
  1260. layer {
  1261. bottom: "layer40-conv"
  1262. top: "layer40-conv"
  1263. name: "layer40-scale"
  1264. type: "Scale"
  1265. scale_param {
  1266. bias_term: true
  1267. }
  1268. }
  1269. layer {
  1270. bottom: "layer40-conv"
  1271. top: "layer40-conv"
  1272. name: "layer40-act"
  1273. type: "ReLU"
  1274. relu_param {
  1275. negative_slope: 0.1
  1276. }
  1277. }
  1278. layer {
  1279. bottom: "layer38-conv"
  1280. bottom: "layer40-conv"
  1281. top: "layer41-shortcut"
  1282. name: "layer41-shortcut"
  1283. type: "Eltwise"
  1284. eltwise_param {
  1285. operation: SUM
  1286. }
  1287. }
  1288. layer {
  1289. bottom: "layer41-shortcut"
  1290. top: "layer42-conv"
  1291. name: "layer42-conv"
  1292. type: "Convolution"
  1293. convolution_param {
  1294. num_output: 256
  1295. kernel_size: 1
  1296. pad: 0
  1297. stride: 1
  1298. bias_term: false
  1299. }
  1300. }
  1301. layer {
  1302. bottom: "layer42-conv"
  1303. top: "layer42-conv"
  1304. name: "layer42-bn"
  1305. type: "BatchNorm"
  1306. batch_norm_param {
  1307. use_global_stats: true
  1308. }
  1309. }
  1310. layer {
  1311. bottom: "layer42-conv"
  1312. top: "layer42-conv"
  1313. name: "layer42-scale"
  1314. type: "Scale"
  1315. scale_param {
  1316. bias_term: true
  1317. }
  1318. }
  1319. layer {
  1320. bottom: "layer42-conv"
  1321. top: "layer42-conv"
  1322. name: "layer42-act"
  1323. type: "ReLU"
  1324. relu_param {
  1325. negative_slope: 0.1
  1326. }
  1327. }
  1328. layer {
  1329. bottom: "layer42-conv"
  1330. top: "layer43-conv"
  1331. name: "layer43-conv"
  1332. type: "Convolution"
  1333. convolution_param {
  1334. num_output: 512
  1335. kernel_size: 3
  1336. pad: 1
  1337. stride: 1
  1338. bias_term: false
  1339. }
  1340. }
  1341. layer {
  1342. bottom: "layer43-conv"
  1343. top: "layer43-conv"
  1344. name: "layer43-bn"
  1345. type: "BatchNorm"
  1346. batch_norm_param {
  1347. use_global_stats: true
  1348. }
  1349. }
  1350. layer {
  1351. bottom: "layer43-conv"
  1352. top: "layer43-conv"
  1353. name: "layer43-scale"
  1354. type: "Scale"
  1355. scale_param {
  1356. bias_term: true
  1357. }
  1358. }
  1359. layer {
  1360. bottom: "layer43-conv"
  1361. top: "layer43-conv"
  1362. name: "layer43-act"
  1363. type: "ReLU"
  1364. relu_param {
  1365. negative_slope: 0.1
  1366. }
  1367. }
  1368. layer {
  1369. bottom: "layer41-shortcut"
  1370. bottom: "layer43-conv"
  1371. top: "layer44-shortcut"
  1372. name: "layer44-shortcut"
  1373. type: "Eltwise"
  1374. eltwise_param {
  1375. operation: SUM
  1376. }
  1377. }
  1378. layer {
  1379. bottom: "layer44-shortcut"
  1380. top: "layer45-conv"
  1381. name: "layer45-conv"
  1382. type: "Convolution"
  1383. convolution_param {
  1384. num_output: 256
  1385. kernel_size: 1
  1386. pad: 0
  1387. stride: 1
  1388. bias_term: false
  1389. }
  1390. }
  1391. layer {
  1392. bottom: "layer45-conv"
  1393. top: "layer45-conv"
  1394. name: "layer45-bn"
  1395. type: "BatchNorm"
  1396. batch_norm_param {
  1397. use_global_stats: true
  1398. }
  1399. }
  1400. layer {
  1401. bottom: "layer45-conv"
  1402. top: "layer45-conv"
  1403. name: "layer45-scale"
  1404. type: "Scale"
  1405. scale_param {
  1406. bias_term: true
  1407. }
  1408. }
  1409. layer {
  1410. bottom: "layer45-conv"
  1411. top: "layer45-conv"
  1412. name: "layer45-act"
  1413. type: "ReLU"
  1414. relu_param {
  1415. negative_slope: 0.1
  1416. }
  1417. }
  1418. layer {
  1419. bottom: "layer45-conv"
  1420. top: "layer46-conv"
  1421. name: "layer46-conv"
  1422. type: "Convolution"
  1423. convolution_param {
  1424. num_output: 512
  1425. kernel_size: 3
  1426. pad: 1
  1427. stride: 1
  1428. bias_term: false
  1429. }
  1430. }
  1431. layer {
  1432. bottom: "layer46-conv"
  1433. top: "layer46-conv"
  1434. name: "layer46-bn"
  1435. type: "BatchNorm"
  1436. batch_norm_param {
  1437. use_global_stats: true
  1438. }
  1439. }
  1440. layer {
  1441. bottom: "layer46-conv"
  1442. top: "layer46-conv"
  1443. name: "layer46-scale"
  1444. type: "Scale"
  1445. scale_param {
  1446. bias_term: true
  1447. }
  1448. }
  1449. layer {
  1450. bottom: "layer46-conv"
  1451. top: "layer46-conv"
  1452. name: "layer46-act"
  1453. type: "ReLU"
  1454. relu_param {
  1455. negative_slope: 0.1
  1456. }
  1457. }
  1458. layer {
  1459. bottom: "layer44-shortcut"
  1460. bottom: "layer46-conv"
  1461. top: "layer47-shortcut"
  1462. name: "layer47-shortcut"
  1463. type: "Eltwise"
  1464. eltwise_param {
  1465. operation: SUM
  1466. }
  1467. }
  1468. layer {
  1469. bottom: "layer47-shortcut"
  1470. top: "layer48-conv"
  1471. name: "layer48-conv"
  1472. type: "Convolution"
  1473. convolution_param {
  1474. num_output: 256
  1475. kernel_size: 1
  1476. pad: 0
  1477. stride: 1
  1478. bias_term: false
  1479. }
  1480. }
  1481. layer {
  1482. bottom: "layer48-conv"
  1483. top: "layer48-conv"
  1484. name: "layer48-bn"
  1485. type: "BatchNorm"
  1486. batch_norm_param {
  1487. use_global_stats: true
  1488. }
  1489. }
  1490. layer {
  1491. bottom: "layer48-conv"
  1492. top: "layer48-conv"
  1493. name: "layer48-scale"
  1494. type: "Scale"
  1495. scale_param {
  1496. bias_term: true
  1497. }
  1498. }
  1499. layer {
  1500. bottom: "layer48-conv"
  1501. top: "layer48-conv"
  1502. name: "layer48-act"
  1503. type: "ReLU"
  1504. relu_param {
  1505. negative_slope: 0.1
  1506. }
  1507. }
  1508. layer {
  1509. bottom: "layer48-conv"
  1510. top: "layer49-conv"
  1511. name: "layer49-conv"
  1512. type: "Convolution"
  1513. convolution_param {
  1514. num_output: 512
  1515. kernel_size: 3
  1516. pad: 1
  1517. stride: 1
  1518. bias_term: false
  1519. }
  1520. }
  1521. layer {
  1522. bottom: "layer49-conv"
  1523. top: "layer49-conv"
  1524. name: "layer49-bn"
  1525. type: "BatchNorm"
  1526. batch_norm_param {
  1527. use_global_stats: true
  1528. }
  1529. }
  1530. layer {
  1531. bottom: "layer49-conv"
  1532. top: "layer49-conv"
  1533. name: "layer49-scale"
  1534. type: "Scale"
  1535. scale_param {
  1536. bias_term: true
  1537. }
  1538. }
  1539. layer {
  1540. bottom: "layer49-conv"
  1541. top: "layer49-conv"
  1542. name: "layer49-act"
  1543. type: "ReLU"
  1544. relu_param {
  1545. negative_slope: 0.1
  1546. }
  1547. }
  1548. layer {
  1549. bottom: "layer47-shortcut"
  1550. bottom: "layer49-conv"
  1551. top: "layer50-shortcut"
  1552. name: "layer50-shortcut"
  1553. type: "Eltwise"
  1554. eltwise_param {
  1555. operation: SUM
  1556. }
  1557. }
  1558. layer {
  1559. bottom: "layer50-shortcut"
  1560. top: "layer51-conv"
  1561. name: "layer51-conv"
  1562. type: "Convolution"
  1563. convolution_param {
  1564. num_output: 256
  1565. kernel_size: 1
  1566. pad: 0
  1567. stride: 1
  1568. bias_term: false
  1569. }
  1570. }
  1571. layer {
  1572. bottom: "layer51-conv"
  1573. top: "layer51-conv"
  1574. name: "layer51-bn"
  1575. type: "BatchNorm"
  1576. batch_norm_param {
  1577. use_global_stats: true
  1578. }
  1579. }
  1580. layer {
  1581. bottom: "layer51-conv"
  1582. top: "layer51-conv"
  1583. name: "layer51-scale"
  1584. type: "Scale"
  1585. scale_param {
  1586. bias_term: true
  1587. }
  1588. }
  1589. layer {
  1590. bottom: "layer51-conv"
  1591. top: "layer51-conv"
  1592. name: "layer51-act"
  1593. type: "ReLU"
  1594. relu_param {
  1595. negative_slope: 0.1
  1596. }
  1597. }
  1598. layer {
  1599. bottom: "layer51-conv"
  1600. top: "layer52-conv"
  1601. name: "layer52-conv"
  1602. type: "Convolution"
  1603. convolution_param {
  1604. num_output: 512
  1605. kernel_size: 3
  1606. pad: 1
  1607. stride: 1
  1608. bias_term: false
  1609. }
  1610. }
  1611. layer {
  1612. bottom: "layer52-conv"
  1613. top: "layer52-conv"
  1614. name: "layer52-bn"
  1615. type: "BatchNorm"
  1616. batch_norm_param {
  1617. use_global_stats: true
  1618. }
  1619. }
  1620. layer {
  1621. bottom: "layer52-conv"
  1622. top: "layer52-conv"
  1623. name: "layer52-scale"
  1624. type: "Scale"
  1625. scale_param {
  1626. bias_term: true
  1627. }
  1628. }
  1629. layer {
  1630. bottom: "layer52-conv"
  1631. top: "layer52-conv"
  1632. name: "layer52-act"
  1633. type: "ReLU"
  1634. relu_param {
  1635. negative_slope: 0.1
  1636. }
  1637. }
  1638. layer {
  1639. bottom: "layer50-shortcut"
  1640. bottom: "layer52-conv"
  1641. top: "layer53-shortcut"
  1642. name: "layer53-shortcut"
  1643. type: "Eltwise"
  1644. eltwise_param {
  1645. operation: SUM
  1646. }
  1647. }
  1648. layer {
  1649. bottom: "layer53-shortcut"
  1650. top: "layer54-conv"
  1651. name: "layer54-conv"
  1652. type: "Convolution"
  1653. convolution_param {
  1654. num_output: 256
  1655. kernel_size: 1
  1656. pad: 0
  1657. stride: 1
  1658. bias_term: false
  1659. }
  1660. }
  1661. layer {
  1662. bottom: "layer54-conv"
  1663. top: "layer54-conv"
  1664. name: "layer54-bn"
  1665. type: "BatchNorm"
  1666. batch_norm_param {
  1667. use_global_stats: true
  1668. }
  1669. }
  1670. layer {
  1671. bottom: "layer54-conv"
  1672. top: "layer54-conv"
  1673. name: "layer54-scale"
  1674. type: "Scale"
  1675. scale_param {
  1676. bias_term: true
  1677. }
  1678. }
  1679. layer {
  1680. bottom: "layer54-conv"
  1681. top: "layer54-conv"
  1682. name: "layer54-act"
  1683. type: "ReLU"
  1684. relu_param {
  1685. negative_slope: 0.1
  1686. }
  1687. }
  1688. layer {
  1689. bottom: "layer54-conv"
  1690. top: "layer55-conv"
  1691. name: "layer55-conv"
  1692. type: "Convolution"
  1693. convolution_param {
  1694. num_output: 512
  1695. kernel_size: 3
  1696. pad: 1
  1697. stride: 1
  1698. bias_term: false
  1699. }
  1700. }
  1701. layer {
  1702. bottom: "layer55-conv"
  1703. top: "layer55-conv"
  1704. name: "layer55-bn"
  1705. type: "BatchNorm"
  1706. batch_norm_param {
  1707. use_global_stats: true
  1708. }
  1709. }
  1710. layer {
  1711. bottom: "layer55-conv"
  1712. top: "layer55-conv"
  1713. name: "layer55-scale"
  1714. type: "Scale"
  1715. scale_param {
  1716. bias_term: true
  1717. }
  1718. }
  1719. layer {
  1720. bottom: "layer55-conv"
  1721. top: "layer55-conv"
  1722. name: "layer55-act"
  1723. type: "ReLU"
  1724. relu_param {
  1725. negative_slope: 0.1
  1726. }
  1727. }
  1728. layer {
  1729. bottom: "layer53-shortcut"
  1730. bottom: "layer55-conv"
  1731. top: "layer56-shortcut"
  1732. name: "layer56-shortcut"
  1733. type: "Eltwise"
  1734. eltwise_param {
  1735. operation: SUM
  1736. }
  1737. }
  1738. layer {
  1739. bottom: "layer56-shortcut"
  1740. top: "layer57-conv"
  1741. name: "layer57-conv"
  1742. type: "Convolution"
  1743. convolution_param {
  1744. num_output: 256
  1745. kernel_size: 1
  1746. pad: 0
  1747. stride: 1
  1748. bias_term: false
  1749. }
  1750. }
  1751. layer {
  1752. bottom: "layer57-conv"
  1753. top: "layer57-conv"
  1754. name: "layer57-bn"
  1755. type: "BatchNorm"
  1756. batch_norm_param {
  1757. use_global_stats: true
  1758. }
  1759. }
  1760. layer {
  1761. bottom: "layer57-conv"
  1762. top: "layer57-conv"
  1763. name: "layer57-scale"
  1764. type: "Scale"
  1765. scale_param {
  1766. bias_term: true
  1767. }
  1768. }
  1769. layer {
  1770. bottom: "layer57-conv"
  1771. top: "layer57-conv"
  1772. name: "layer57-act"
  1773. type: "ReLU"
  1774. relu_param {
  1775. negative_slope: 0.1
  1776. }
  1777. }
  1778. layer {
  1779. bottom: "layer57-conv"
  1780. top: "layer58-conv"
  1781. name: "layer58-conv"
  1782. type: "Convolution"
  1783. convolution_param {
  1784. num_output: 512
  1785. kernel_size: 3
  1786. pad: 1
  1787. stride: 1
  1788. bias_term: false
  1789. }
  1790. }
  1791. layer {
  1792. bottom: "layer58-conv"
  1793. top: "layer58-conv"
  1794. name: "layer58-bn"
  1795. type: "BatchNorm"
  1796. batch_norm_param {
  1797. use_global_stats: true
  1798. }
  1799. }
  1800. layer {
  1801. bottom: "layer58-conv"
  1802. top: "layer58-conv"
  1803. name: "layer58-scale"
  1804. type: "Scale"
  1805. scale_param {
  1806. bias_term: true
  1807. }
  1808. }
  1809. layer {
  1810. bottom: "layer58-conv"
  1811. top: "layer58-conv"
  1812. name: "layer58-act"
  1813. type: "ReLU"
  1814. relu_param {
  1815. negative_slope: 0.1
  1816. }
  1817. }
  1818. layer {
  1819. bottom: "layer56-shortcut"
  1820. bottom: "layer58-conv"
  1821. top: "layer59-shortcut"
  1822. name: "layer59-shortcut"
  1823. type: "Eltwise"
  1824. eltwise_param {
  1825. operation: SUM
  1826. }
  1827. }
  1828. layer {
  1829. bottom: "layer59-shortcut"
  1830. top: "layer60-conv"
  1831. name: "layer60-conv"
  1832. type: "Convolution"
  1833. convolution_param {
  1834. num_output: 256
  1835. kernel_size: 1
  1836. pad: 0
  1837. stride: 1
  1838. bias_term: false
  1839. }
  1840. }
  1841. layer {
  1842. bottom: "layer60-conv"
  1843. top: "layer60-conv"
  1844. name: "layer60-bn"
  1845. type: "BatchNorm"
  1846. batch_norm_param {
  1847. use_global_stats: true
  1848. }
  1849. }
  1850. layer {
  1851. bottom: "layer60-conv"
  1852. top: "layer60-conv"
  1853. name: "layer60-scale"
  1854. type: "Scale"
  1855. scale_param {
  1856. bias_term: true
  1857. }
  1858. }
  1859. layer {
  1860. bottom: "layer60-conv"
  1861. top: "layer60-conv"
  1862. name: "layer60-act"
  1863. type: "ReLU"
  1864. relu_param {
  1865. negative_slope: 0.1
  1866. }
  1867. }
  1868. layer {
  1869. bottom: "layer60-conv"
  1870. top: "layer61-conv"
  1871. name: "layer61-conv"
  1872. type: "Convolution"
  1873. convolution_param {
  1874. num_output: 512
  1875. kernel_size: 3
  1876. pad: 1
  1877. stride: 1
  1878. bias_term: false
  1879. }
  1880. }
  1881. layer {
  1882. bottom: "layer61-conv"
  1883. top: "layer61-conv"
  1884. name: "layer61-bn"
  1885. type: "BatchNorm"
  1886. batch_norm_param {
  1887. use_global_stats: true
  1888. }
  1889. }
  1890. layer {
  1891. bottom: "layer61-conv"
  1892. top: "layer61-conv"
  1893. name: "layer61-scale"
  1894. type: "Scale"
  1895. scale_param {
  1896. bias_term: true
  1897. }
  1898. }
  1899. layer {
  1900. bottom: "layer61-conv"
  1901. top: "layer61-conv"
  1902. name: "layer61-act"
  1903. type: "ReLU"
  1904. relu_param {
  1905. negative_slope: 0.1
  1906. }
  1907. }
  1908. layer {
  1909. bottom: "layer59-shortcut"
  1910. bottom: "layer61-conv"
  1911. top: "layer62-shortcut"
  1912. name: "layer62-shortcut"
  1913. type: "Eltwise"
  1914. eltwise_param {
  1915. operation: SUM
  1916. }
  1917. }
  1918. layer {
  1919. bottom: "layer62-shortcut"
  1920. top: "layer63-conv"
  1921. name: "layer63-conv"
  1922. type: "Convolution"
  1923. convolution_param {
  1924. num_output: 1024
  1925. kernel_size: 3
  1926. pad: 1
  1927. stride: 2
  1928. bias_term: false
  1929. }
  1930. }
  1931. layer {
  1932. bottom: "layer63-conv"
  1933. top: "layer63-conv"
  1934. name: "layer63-bn"
  1935. type: "BatchNorm"
  1936. batch_norm_param {
  1937. use_global_stats: true
  1938. }
  1939. }
  1940. layer {
  1941. bottom: "layer63-conv"
  1942. top: "layer63-conv"
  1943. name: "layer63-scale"
  1944. type: "Scale"
  1945. scale_param {
  1946. bias_term: true
  1947. }
  1948. }
  1949. layer {
  1950. bottom: "layer63-conv"
  1951. top: "layer63-conv"
  1952. name: "layer63-act"
  1953. type: "ReLU"
  1954. relu_param {
  1955. negative_slope: 0.1
  1956. }
  1957. }
  1958. layer {
  1959. bottom: "layer63-conv"
  1960. top: "layer64-conv"
  1961. name: "layer64-conv"
  1962. type: "Convolution"
  1963. convolution_param {
  1964. num_output: 512
  1965. kernel_size: 1
  1966. pad: 0
  1967. stride: 1
  1968. bias_term: false
  1969. }
  1970. }
  1971. layer {
  1972. bottom: "layer64-conv"
  1973. top: "layer64-conv"
  1974. name: "layer64-bn"
  1975. type: "BatchNorm"
  1976. batch_norm_param {
  1977. use_global_stats: true
  1978. }
  1979. }
  1980. layer {
  1981. bottom: "layer64-conv"
  1982. top: "layer64-conv"
  1983. name: "layer64-scale"
  1984. type: "Scale"
  1985. scale_param {
  1986. bias_term: true
  1987. }
  1988. }
  1989. layer {
  1990. bottom: "layer64-conv"
  1991. top: "layer64-conv"
  1992. name: "layer64-act"
  1993. type: "ReLU"
  1994. relu_param {
  1995. negative_slope: 0.1
  1996. }
  1997. }
  1998. layer {
  1999. bottom: "layer64-conv"
  2000. top: "layer65-conv"
  2001. name: "layer65-conv"
  2002. type: "Convolution"
  2003. convolution_param {
  2004. num_output: 1024
  2005. kernel_size: 3
  2006. pad: 1
  2007. stride: 1
  2008. bias_term: false
  2009. }
  2010. }
  2011. layer {
  2012. bottom: "layer65-conv"
  2013. top: "layer65-conv"
  2014. name: "layer65-bn"
  2015. type: "BatchNorm"
  2016. batch_norm_param {
  2017. use_global_stats: true
  2018. }
  2019. }
  2020. layer {
  2021. bottom: "layer65-conv"
  2022. top: "layer65-conv"
  2023. name: "layer65-scale"
  2024. type: "Scale"
  2025. scale_param {
  2026. bias_term: true
  2027. }
  2028. }
  2029. layer {
  2030. bottom: "layer65-conv"
  2031. top: "layer65-conv"
  2032. name: "layer65-act"
  2033. type: "ReLU"
  2034. relu_param {
  2035. negative_slope: 0.1
  2036. }
  2037. }
  2038. layer {
  2039. bottom: "layer63-conv"
  2040. bottom: "layer65-conv"
  2041. top: "layer66-shortcut"
  2042. name: "layer66-shortcut"
  2043. type: "Eltwise"
  2044. eltwise_param {
  2045. operation: SUM
  2046. }
  2047. }
  2048. layer {
  2049. bottom: "layer66-shortcut"
  2050. top: "layer67-conv"
  2051. name: "layer67-conv"
  2052. type: "Convolution"
  2053. convolution_param {
  2054. num_output: 512
  2055. kernel_size: 1
  2056. pad: 0
  2057. stride: 1
  2058. bias_term: false
  2059. }
  2060. }
  2061. layer {
  2062. bottom: "layer67-conv"
  2063. top: "layer67-conv"
  2064. name: "layer67-bn"
  2065. type: "BatchNorm"
  2066. batch_norm_param {
  2067. use_global_stats: true
  2068. }
  2069. }
  2070. layer {
  2071. bottom: "layer67-conv"
  2072. top: "layer67-conv"
  2073. name: "layer67-scale"
  2074. type: "Scale"
  2075. scale_param {
  2076. bias_term: true
  2077. }
  2078. }
  2079. layer {
  2080. bottom: "layer67-conv"
  2081. top: "layer67-conv"
  2082. name: "layer67-act"
  2083. type: "ReLU"
  2084. relu_param {
  2085. negative_slope: 0.1
  2086. }
  2087. }
  2088. layer {
  2089. bottom: "layer67-conv"
  2090. top: "layer68-conv"
  2091. name: "layer68-conv"
  2092. type: "Convolution"
  2093. convolution_param {
  2094. num_output: 1024
  2095. kernel_size: 3
  2096. pad: 1
  2097. stride: 1
  2098. bias_term: false
  2099. }
  2100. }
  2101. layer {
  2102. bottom: "layer68-conv"
  2103. top: "layer68-conv"
  2104. name: "layer68-bn"
  2105. type: "BatchNorm"
  2106. batch_norm_param {
  2107. use_global_stats: true
  2108. }
  2109. }
  2110. layer {
  2111. bottom: "layer68-conv"
  2112. top: "layer68-conv"
  2113. name: "layer68-scale"
  2114. type: "Scale"
  2115. scale_param {
  2116. bias_term: true
  2117. }
  2118. }
  2119. layer {
  2120. bottom: "layer68-conv"
  2121. top: "layer68-conv"
  2122. name: "layer68-act"
  2123. type: "ReLU"
  2124. relu_param {
  2125. negative_slope: 0.1
  2126. }
  2127. }
  2128. layer {
  2129. bottom: "layer66-shortcut"
  2130. bottom: "layer68-conv"
  2131. top: "layer69-shortcut"
  2132. name: "layer69-shortcut"
  2133. type: "Eltwise"
  2134. eltwise_param {
  2135. operation: SUM
  2136. }
  2137. }
  2138. layer {
  2139. bottom: "layer69-shortcut"
  2140. top: "layer70-conv"
  2141. name: "layer70-conv"
  2142. type: "Convolution"
  2143. convolution_param {
  2144. num_output: 512
  2145. kernel_size: 1
  2146. pad: 0
  2147. stride: 1
  2148. bias_term: false
  2149. }
  2150. }
  2151. layer {
  2152. bottom: "layer70-conv"
  2153. top: "layer70-conv"
  2154. name: "layer70-bn"
  2155. type: "BatchNorm"
  2156. batch_norm_param {
  2157. use_global_stats: true
  2158. }
  2159. }
  2160. layer {
  2161. bottom: "layer70-conv"
  2162. top: "layer70-conv"
  2163. name: "layer70-scale"
  2164. type: "Scale"
  2165. scale_param {
  2166. bias_term: true
  2167. }
  2168. }
  2169. layer {
  2170. bottom: "layer70-conv"
  2171. top: "layer70-conv"
  2172. name: "layer70-act"
  2173. type: "ReLU"
  2174. relu_param {
  2175. negative_slope: 0.1
  2176. }
  2177. }
  2178. layer {
  2179. bottom: "layer70-conv"
  2180. top: "layer71-conv"
  2181. name: "layer71-conv"
  2182. type: "Convolution"
  2183. convolution_param {
  2184. num_output: 1024
  2185. kernel_size: 3
  2186. pad: 1
  2187. stride: 1
  2188. bias_term: false
  2189. }
  2190. }
  2191. layer {
  2192. bottom: "layer71-conv"
  2193. top: "layer71-conv"
  2194. name: "layer71-bn"
  2195. type: "BatchNorm"
  2196. batch_norm_param {
  2197. use_global_stats: true
  2198. }
  2199. }
  2200. layer {
  2201. bottom: "layer71-conv"
  2202. top: "layer71-conv"
  2203. name: "layer71-scale"
  2204. type: "Scale"
  2205. scale_param {
  2206. bias_term: true
  2207. }
  2208. }
  2209. layer {
  2210. bottom: "layer71-conv"
  2211. top: "layer71-conv"
  2212. name: "layer71-act"
  2213. type: "ReLU"
  2214. relu_param {
  2215. negative_slope: 0.1
  2216. }
  2217. }
  2218. layer {
  2219. bottom: "layer69-shortcut"
  2220. bottom: "layer71-conv"
  2221. top: "layer72-shortcut"
  2222. name: "layer72-shortcut"
  2223. type: "Eltwise"
  2224. eltwise_param {
  2225. operation: SUM
  2226. }
  2227. }
  2228. layer {
  2229. bottom: "layer72-shortcut"
  2230. top: "layer73-conv"
  2231. name: "layer73-conv"
  2232. type: "Convolution"
  2233. convolution_param {
  2234. num_output: 512
  2235. kernel_size: 1
  2236. pad: 0
  2237. stride: 1
  2238. bias_term: false
  2239. }
  2240. }
  2241. layer {
  2242. bottom: "layer73-conv"
  2243. top: "layer73-conv"
  2244. name: "layer73-bn"
  2245. type: "BatchNorm"
  2246. batch_norm_param {
  2247. use_global_stats: true
  2248. }
  2249. }
  2250. layer {
  2251. bottom: "layer73-conv"
  2252. top: "layer73-conv"
  2253. name: "layer73-scale"
  2254. type: "Scale"
  2255. scale_param {
  2256. bias_term: true
  2257. }
  2258. }
  2259. layer {
  2260. bottom: "layer73-conv"
  2261. top: "layer73-conv"
  2262. name: "layer73-act"
  2263. type: "ReLU"
  2264. relu_param {
  2265. negative_slope: 0.1
  2266. }
  2267. }
  2268. layer {
  2269. bottom: "layer73-conv"
  2270. top: "layer74-conv"
  2271. name: "layer74-conv"
  2272. type: "Convolution"
  2273. convolution_param {
  2274. num_output: 1024
  2275. kernel_size: 3
  2276. pad: 1
  2277. stride: 1
  2278. bias_term: false
  2279. }
  2280. }
  2281. layer {
  2282. bottom: "layer74-conv"
  2283. top: "layer74-conv"
  2284. name: "layer74-bn"
  2285. type: "BatchNorm"
  2286. batch_norm_param {
  2287. use_global_stats: true
  2288. }
  2289. }
  2290. layer {
  2291. bottom: "layer74-conv"
  2292. top: "layer74-conv"
  2293. name: "layer74-scale"
  2294. type: "Scale"
  2295. scale_param {
  2296. bias_term: true
  2297. }
  2298. }
  2299. layer {
  2300. bottom: "layer74-conv"
  2301. top: "layer74-conv"
  2302. name: "layer74-act"
  2303. type: "ReLU"
  2304. relu_param {
  2305. negative_slope: 0.1
  2306. }
  2307. }
  2308. layer {
  2309. bottom: "layer72-shortcut"
  2310. bottom: "layer74-conv"
  2311. top: "layer75-shortcut"
  2312. name: "layer75-shortcut"
  2313. type: "Eltwise"
  2314. eltwise_param {
  2315. operation: SUM
  2316. }
  2317. }
  2318. layer {
  2319. bottom: "layer75-shortcut"
  2320. top: "layer76-conv"
  2321. name: "layer76-conv"
  2322. type: "Convolution"
  2323. convolution_param {
  2324. num_output: 512
  2325. kernel_size: 1
  2326. pad: 0
  2327. stride: 1
  2328. bias_term: false
  2329. }
  2330. }
  2331. layer {
  2332. bottom: "layer76-conv"
  2333. top: "layer76-conv"
  2334. name: "layer76-bn"
  2335. type: "BatchNorm"
  2336. batch_norm_param {
  2337. use_global_stats: true
  2338. }
  2339. }
  2340. layer {
  2341. bottom: "layer76-conv"
  2342. top: "layer76-conv"
  2343. name: "layer76-scale"
  2344. type: "Scale"
  2345. scale_param {
  2346. bias_term: true
  2347. }
  2348. }
  2349. layer {
  2350. bottom: "layer76-conv"
  2351. top: "layer76-conv"
  2352. name: "layer76-act"
  2353. type: "ReLU"
  2354. relu_param {
  2355. negative_slope: 0.1
  2356. }
  2357. }
  2358. layer {
  2359. bottom: "layer76-conv"
  2360. top: "layer77-conv"
  2361. name: "layer77-conv"
  2362. type: "Convolution"
  2363. convolution_param {
  2364. num_output: 1024
  2365. kernel_size: 3
  2366. pad: 1
  2367. stride: 1
  2368. bias_term: false
  2369. }
  2370. }
  2371. layer {
  2372. bottom: "layer77-conv"
  2373. top: "layer77-conv"
  2374. name: "layer77-bn"
  2375. type: "BatchNorm"
  2376. batch_norm_param {
  2377. use_global_stats: true
  2378. }
  2379. }
  2380. layer {
  2381. bottom: "layer77-conv"
  2382. top: "layer77-conv"
  2383. name: "layer77-scale"
  2384. type: "Scale"
  2385. scale_param {
  2386. bias_term: true
  2387. }
  2388. }
  2389. layer {
  2390. bottom: "layer77-conv"
  2391. top: "layer77-conv"
  2392. name: "layer77-act"
  2393. type: "ReLU"
  2394. relu_param {
  2395. negative_slope: 0.1
  2396. }
  2397. }
  2398. layer {
  2399. bottom: "layer77-conv"
  2400. top: "layer78-conv"
  2401. name: "layer78-conv"
  2402. type: "Convolution"
  2403. convolution_param {
  2404. num_output: 512
  2405. kernel_size: 1
  2406. pad: 0
  2407. stride: 1
  2408. bias_term: false
  2409. }
  2410. }
  2411. layer {
  2412. bottom: "layer78-conv"
  2413. top: "layer78-conv"
  2414. name: "layer78-bn"
  2415. type: "BatchNorm"
  2416. batch_norm_param {
  2417. use_global_stats: true
  2418. }
  2419. }
  2420. layer {
  2421. bottom: "layer78-conv"
  2422. top: "layer78-conv"
  2423. name: "layer78-scale"
  2424. type: "Scale"
  2425. scale_param {
  2426. bias_term: true
  2427. }
  2428. }
  2429. layer {
  2430. bottom: "layer78-conv"
  2431. top: "layer78-conv"
  2432. name: "layer78-act"
  2433. type: "ReLU"
  2434. relu_param {
  2435. negative_slope: 0.1
  2436. }
  2437. }
  2438. layer {
  2439. bottom: "layer78-conv"
  2440. top: "layer79-conv"
  2441. name: "layer79-conv"
  2442. type: "Convolution"
  2443. convolution_param {
  2444. num_output: 1024
  2445. kernel_size: 3
  2446. pad: 1
  2447. stride: 1
  2448. bias_term: false
  2449. }
  2450. }
  2451. layer {
  2452. bottom: "layer79-conv"
  2453. top: "layer79-conv"
  2454. name: "layer79-bn"
  2455. type: "BatchNorm"
  2456. batch_norm_param {
  2457. use_global_stats: true
  2458. }
  2459. }
  2460. layer {
  2461. bottom: "layer79-conv"
  2462. top: "layer79-conv"
  2463. name: "layer79-scale"
  2464. type: "Scale"
  2465. scale_param {
  2466. bias_term: true
  2467. }
  2468. }
  2469. layer {
  2470. bottom: "layer79-conv"
  2471. top: "layer79-conv"
  2472. name: "layer79-act"
  2473. type: "ReLU"
  2474. relu_param {
  2475. negative_slope: 0.1
  2476. }
  2477. }
  2478. layer {
  2479. bottom: "layer79-conv"
  2480. top: "layer80-conv"
  2481. name: "layer80-conv"
  2482. type: "Convolution"
  2483. convolution_param {
  2484. num_output: 512
  2485. kernel_size: 1
  2486. pad: 0
  2487. stride: 1
  2488. bias_term: false
  2489. }
  2490. }
  2491. layer {
  2492. bottom: "layer80-conv"
  2493. top: "layer80-conv"
  2494. name: "layer80-bn"
  2495. type: "BatchNorm"
  2496. batch_norm_param {
  2497. use_global_stats: true
  2498. }
  2499. }
  2500. layer {
  2501. bottom: "layer80-conv"
  2502. top: "layer80-conv"
  2503. name: "layer80-scale"
  2504. type: "Scale"
  2505. scale_param {
  2506. bias_term: true
  2507. }
  2508. }
  2509. layer {
  2510. bottom: "layer80-conv"
  2511. top: "layer80-conv"
  2512. name: "layer80-act"
  2513. type: "ReLU"
  2514. relu_param {
  2515. negative_slope: 0.1
  2516. }
  2517. }
  2518. layer {
  2519. bottom: "layer80-conv"
  2520. top: "layer81-conv"
  2521. name: "layer81-conv"
  2522. type: "Convolution"
  2523. convolution_param {
  2524. num_output: 1024
  2525. kernel_size: 3
  2526. pad: 1
  2527. stride: 1
  2528. bias_term: false
  2529. }
  2530. }
  2531. layer {
  2532. bottom: "layer81-conv"
  2533. top: "layer81-conv"
  2534. name: "layer81-bn"
  2535. type: "BatchNorm"
  2536. batch_norm_param {
  2537. use_global_stats: true
  2538. }
  2539. }
  2540. layer {
  2541. bottom: "layer81-conv"
  2542. top: "layer81-conv"
  2543. name: "layer81-scale"
  2544. type: "Scale"
  2545. scale_param {
  2546. bias_term: true
  2547. }
  2548. }
  2549. layer {
  2550. bottom: "layer81-conv"
  2551. top: "layer81-conv"
  2552. name: "layer81-act"
  2553. type: "ReLU"
  2554. relu_param {
  2555. negative_slope: 0.1
  2556. }
  2557. }
  2558. layer {
  2559. bottom: "layer81-conv"
  2560. top: "layer82-conv"
  2561. name: "layer82-conv"
  2562. type: "Convolution"
  2563. convolution_param {
  2564. num_output: 255
  2565. kernel_size: 1
  2566. pad: 0
  2567. stride: 1
  2568. bias_term: true
  2569. }
  2570. }
  2571. layer {
  2572. bottom: "layer82-conv"
  2573. type: "Concat"
  2574. top: "layer83-yolo"
  2575. name: "layer83-yolo"
  2576. }
  2577. layer {
  2578. bottom: "layer80-conv"
  2579. top: "layer84-route"
  2580. name: "layer84-route"
  2581. type: "Concat"
  2582. }
  2583. layer {
  2584. bottom: "layer84-route"
  2585. top: "layer85-conv"
  2586. name: "layer85-conv"
  2587. type: "Convolution"
  2588. convolution_param {
  2589. num_output: 256
  2590. kernel_size: 1
  2591. pad: 0
  2592. stride: 1
  2593. bias_term: false
  2594. }
  2595. }
  2596. layer {
  2597. bottom: "layer85-conv"
  2598. top: "layer85-conv"
  2599. name: "layer85-bn"
  2600. type: "BatchNorm"
  2601. batch_norm_param {
  2602. use_global_stats: true
  2603. }
  2604. }
  2605. layer {
  2606. bottom: "layer85-conv"
  2607. top: "layer85-conv"
  2608. name: "layer85-scale"
  2609. type: "Scale"
  2610. scale_param {
  2611. bias_term: true
  2612. }
  2613. }
  2614. layer {
  2615. bottom: "layer85-conv"
  2616. top: "layer85-conv"
  2617. name: "layer85-act"
  2618. type: "ReLU"
  2619. relu_param {
  2620. negative_slope: 0.1
  2621. }
  2622. }
  2623. layer {
  2624. bottom: "layer85-conv"
  2625. top: "layer86-upsample"
  2626. name: "layer86-upsample"
  2627. type: "Deconvolution"
  2628. convolution_param {
  2629. stride: 2
  2630. kernel_size: 4
  2631. num_output: 256
  2632. group: 256
  2633. pad: 1
  2634. bias_term: false
  2635. weight_filler {
  2636. type: "bilinear"
  2637. }
  2638. }
  2639. }
  2640. layer {
  2641. bottom: "layer86-upsample"
  2642. bottom: "layer62-shortcut"
  2643. top: "layer87-route"
  2644. name: "layer87-route"
  2645. type: "Concat"
  2646. }
  2647. layer {
  2648. bottom: "layer87-route"
  2649. top: "layer88-conv"
  2650. name: "layer88-conv"
  2651. type: "Convolution"
  2652. convolution_param {
  2653. num_output: 256
  2654. kernel_size: 1
  2655. pad: 0
  2656. stride: 1
  2657. bias_term: false
  2658. }
  2659. }
  2660. layer {
  2661. bottom: "layer88-conv"
  2662. top: "layer88-conv"
  2663. name: "layer88-bn"
  2664. type: "BatchNorm"
  2665. batch_norm_param {
  2666. use_global_stats: true
  2667. }
  2668. }
  2669. layer {
  2670. bottom: "layer88-conv"
  2671. top: "layer88-conv"
  2672. name: "layer88-scale"
  2673. type: "Scale"
  2674. scale_param {
  2675. bias_term: true
  2676. }
  2677. }
  2678. layer {
  2679. bottom: "layer88-conv"
  2680. top: "layer88-conv"
  2681. name: "layer88-act"
  2682. type: "ReLU"
  2683. relu_param {
  2684. negative_slope: 0.1
  2685. }
  2686. }
  2687. layer {
  2688. bottom: "layer88-conv"
  2689. top: "layer89-conv"
  2690. name: "layer89-conv"
  2691. type: "Convolution"
  2692. convolution_param {
  2693. num_output: 512
  2694. kernel_size: 3
  2695. pad: 1
  2696. stride: 1
  2697. bias_term: false
  2698. }
  2699. }
  2700. layer {
  2701. bottom: "layer89-conv"
  2702. top: "layer89-conv"
  2703. name: "layer89-bn"
  2704. type: "BatchNorm"
  2705. batch_norm_param {
  2706. use_global_stats: true
  2707. }
  2708. }
  2709. layer {
  2710. bottom: "layer89-conv"
  2711. top: "layer89-conv"
  2712. name: "layer89-scale"
  2713. type: "Scale"
  2714. scale_param {
  2715. bias_term: true
  2716. }
  2717. }
  2718. layer {
  2719. bottom: "layer89-conv"
  2720. top: "layer89-conv"
  2721. name: "layer89-act"
  2722. type: "ReLU"
  2723. relu_param {
  2724. negative_slope: 0.1
  2725. }
  2726. }
  2727. layer {
  2728. bottom: "layer89-conv"
  2729. top: "layer90-conv"
  2730. name: "layer90-conv"
  2731. type: "Convolution"
  2732. convolution_param {
  2733. num_output: 256
  2734. kernel_size: 1
  2735. pad: 0
  2736. stride: 1
  2737. bias_term: false
  2738. }
  2739. }
  2740. layer {
  2741. bottom: "layer90-conv"
  2742. top: "layer90-conv"
  2743. name: "layer90-bn"
  2744. type: "BatchNorm"
  2745. batch_norm_param {
  2746. use_global_stats: true
  2747. }
  2748. }
  2749. layer {
  2750. bottom: "layer90-conv"
  2751. top: "layer90-conv"
  2752. name: "layer90-scale"
  2753. type: "Scale"
  2754. scale_param {
  2755. bias_term: true
  2756. }
  2757. }
  2758. layer {
  2759. bottom: "layer90-conv"
  2760. top: "layer90-conv"
  2761. name: "layer90-act"
  2762. type: "ReLU"
  2763. relu_param {
  2764. negative_slope: 0.1
  2765. }
  2766. }
  2767. layer {
  2768. bottom: "layer90-conv"
  2769. top: "layer91-conv"
  2770. name: "layer91-conv"
  2771. type: "Convolution"
  2772. convolution_param {
  2773. num_output: 512
  2774. kernel_size: 3
  2775. pad: 1
  2776. stride: 1
  2777. bias_term: false
  2778. }
  2779. }
  2780. layer {
  2781. bottom: "layer91-conv"
  2782. top: "layer91-conv"
  2783. name: "layer91-bn"
  2784. type: "BatchNorm"
  2785. batch_norm_param {
  2786. use_global_stats: true
  2787. }
  2788. }
  2789. layer {
  2790. bottom: "layer91-conv"
  2791. top: "layer91-conv"
  2792. name: "layer91-scale"
  2793. type: "Scale"
  2794. scale_param {
  2795. bias_term: true
  2796. }
  2797. }
  2798. layer {
  2799. bottom: "layer91-conv"
  2800. top: "layer91-conv"
  2801. name: "layer91-act"
  2802. type: "ReLU"
  2803. relu_param {
  2804. negative_slope: 0.1
  2805. }
  2806. }
  2807. layer {
  2808. bottom: "layer91-conv"
  2809. top: "layer92-conv"
  2810. name: "layer92-conv"
  2811. type: "Convolution"
  2812. convolution_param {
  2813. num_output: 256
  2814. kernel_size: 1
  2815. pad: 0
  2816. stride: 1
  2817. bias_term: false
  2818. }
  2819. }
  2820. layer {
  2821. bottom: "layer92-conv"
  2822. top: "layer92-conv"
  2823. name: "layer92-bn"
  2824. type: "BatchNorm"
  2825. batch_norm_param {
  2826. use_global_stats: true
  2827. }
  2828. }
  2829. layer {
  2830. bottom: "layer92-conv"
  2831. top: "layer92-conv"
  2832. name: "layer92-scale"
  2833. type: "Scale"
  2834. scale_param {
  2835. bias_term: true
  2836. }
  2837. }
  2838. layer {
  2839. bottom: "layer92-conv"
  2840. top: "layer92-conv"
  2841. name: "layer92-act"
  2842. type: "ReLU"
  2843. relu_param {
  2844. negative_slope: 0.1
  2845. }
  2846. }
  2847. layer {
  2848. bottom: "layer92-conv"
  2849. top: "layer93-conv"
  2850. name: "layer93-conv"
  2851. type: "Convolution"
  2852. convolution_param {
  2853. num_output: 512
  2854. kernel_size: 3
  2855. pad: 1
  2856. stride: 1
  2857. bias_term: false
  2858. }
  2859. }
  2860. layer {
  2861. bottom: "layer93-conv"
  2862. top: "layer93-conv"
  2863. name: "layer93-bn"
  2864. type: "BatchNorm"
  2865. batch_norm_param {
  2866. use_global_stats: true
  2867. }
  2868. }
  2869. layer {
  2870. bottom: "layer93-conv"
  2871. top: "layer93-conv"
  2872. name: "layer93-scale"
  2873. type: "Scale"
  2874. scale_param {
  2875. bias_term: true
  2876. }
  2877. }
  2878. layer {
  2879. bottom: "layer93-conv"
  2880. top: "layer93-conv"
  2881. name: "layer93-act"
  2882. type: "ReLU"
  2883. relu_param {
  2884. negative_slope: 0.1
  2885. }
  2886. }
  2887. layer {
  2888. bottom: "layer93-conv"
  2889. top: "layer94-conv"
  2890. name: "layer94-conv"
  2891. type: "Convolution"
  2892. convolution_param {
  2893. num_output: 255
  2894. kernel_size: 1
  2895. pad: 0
  2896. stride: 1
  2897. bias_term: true
  2898. }
  2899. }
  2900. layer {
  2901. bottom: "layer94-conv"
  2902. type: "Concat"
  2903. top: "layer95-yolo"
  2904. name: "layer95-yolo"
  2905. }
  2906. layer {
  2907. bottom: "layer92-conv"
  2908. top: "layer96-route"
  2909. name: "layer96-route"
  2910. type: "Concat"
  2911. }
  2912. layer {
  2913. bottom: "layer96-route"
  2914. top: "layer97-conv"
  2915. name: "layer97-conv"
  2916. type: "Convolution"
  2917. convolution_param {
  2918. num_output: 128
  2919. kernel_size: 1
  2920. pad: 0
  2921. stride: 1
  2922. bias_term: false
  2923. }
  2924. }
  2925. layer {
  2926. bottom: "layer97-conv"
  2927. top: "layer97-conv"
  2928. name: "layer97-bn"
  2929. type: "BatchNorm"
  2930. batch_norm_param {
  2931. use_global_stats: true
  2932. }
  2933. }
  2934. layer {
  2935. bottom: "layer97-conv"
  2936. top: "layer97-conv"
  2937. name: "layer97-scale"
  2938. type: "Scale"
  2939. scale_param {
  2940. bias_term: true
  2941. }
  2942. }
  2943. layer {
  2944. bottom: "layer97-conv"
  2945. top: "layer97-conv"
  2946. name: "layer97-act"
  2947. type: "ReLU"
  2948. relu_param {
  2949. negative_slope: 0.1
  2950. }
  2951. }
  2952. layer {
  2953. bottom: "layer97-conv"
  2954. top: "layer98-upsample"
  2955. name: "layer98-upsample"
  2956. type: "Deconvolution"
  2957. convolution_param {
  2958. stride: 2
  2959. kernel_size: 4
  2960. num_output: 128
  2961. group: 128
  2962. pad: 1
  2963. bias_term: false
  2964. weight_filler {
  2965. type: "bilinear"
  2966. }
  2967. }
  2968. }
  2969. layer {
  2970. bottom: "layer98-upsample"
  2971. bottom: "layer37-shortcut"
  2972. top: "layer99-route"
  2973. name: "layer99-route"
  2974. type: "Concat"
  2975. }
  2976. layer {
  2977. bottom: "layer99-route"
  2978. top: "layer100-conv"
  2979. name: "layer100-conv"
  2980. type: "Convolution"
  2981. convolution_param {
  2982. num_output: 128
  2983. kernel_size: 1
  2984. pad: 0
  2985. stride: 1
  2986. bias_term: false
  2987. }
  2988. }
  2989. layer {
  2990. bottom: "layer100-conv"
  2991. top: "layer100-conv"
  2992. name: "layer100-bn"
  2993. type: "BatchNorm"
  2994. batch_norm_param {
  2995. use_global_stats: true
  2996. }
  2997. }
  2998. layer {
  2999. bottom: "layer100-conv"
  3000. top: "layer100-conv"
  3001. name: "layer100-scale"
  3002. type: "Scale"
  3003. scale_param {
  3004. bias_term: true
  3005. }
  3006. }
  3007. layer {
  3008. bottom: "layer100-conv"
  3009. top: "layer100-conv"
  3010. name: "layer100-act"
  3011. type: "ReLU"
  3012. relu_param {
  3013. negative_slope: 0.1
  3014. }
  3015. }
  3016. layer {
  3017. bottom: "layer100-conv"
  3018. top: "layer101-conv"
  3019. name: "layer101-conv"
  3020. type: "Convolution"
  3021. convolution_param {
  3022. num_output: 256
  3023. kernel_size: 3
  3024. pad: 1
  3025. stride: 1
  3026. bias_term: false
  3027. }
  3028. }
  3029. layer {
  3030. bottom: "layer101-conv"
  3031. top: "layer101-conv"
  3032. name: "layer101-bn"
  3033. type: "BatchNorm"
  3034. batch_norm_param {
  3035. use_global_stats: true
  3036. }
  3037. }
  3038. layer {
  3039. bottom: "layer101-conv"
  3040. top: "layer101-conv"
  3041. name: "layer101-scale"
  3042. type: "Scale"
  3043. scale_param {
  3044. bias_term: true
  3045. }
  3046. }
  3047. layer {
  3048. bottom: "layer101-conv"
  3049. top: "layer101-conv"
  3050. name: "layer101-act"
  3051. type: "ReLU"
  3052. relu_param {
  3053. negative_slope: 0.1
  3054. }
  3055. }
  3056. layer {
  3057. bottom: "layer101-conv"
  3058. top: "layer102-conv"
  3059. name: "layer102-conv"
  3060. type: "Convolution"
  3061. convolution_param {
  3062. num_output: 128
  3063. kernel_size: 1
  3064. pad: 0
  3065. stride: 1
  3066. bias_term: false
  3067. }
  3068. }
  3069. layer {
  3070. bottom: "layer102-conv"
  3071. top: "layer102-conv"
  3072. name: "layer102-bn"
  3073. type: "BatchNorm"
  3074. batch_norm_param {
  3075. use_global_stats: true
  3076. }
  3077. }
  3078. layer {
  3079. bottom: "layer102-conv"
  3080. top: "layer102-conv"
  3081. name: "layer102-scale"
  3082. type: "Scale"
  3083. scale_param {
  3084. bias_term: true
  3085. }
  3086. }
  3087. layer {
  3088. bottom: "layer102-conv"
  3089. top: "layer102-conv"
  3090. name: "layer102-act"
  3091. type: "ReLU"
  3092. relu_param {
  3093. negative_slope: 0.1
  3094. }
  3095. }
  3096. layer {
  3097. bottom: "layer102-conv"
  3098. top: "layer103-conv"
  3099. name: "layer103-conv"
  3100. type: "Convolution"
  3101. convolution_param {
  3102. num_output: 256
  3103. kernel_size: 3
  3104. pad: 1
  3105. stride: 1
  3106. bias_term: false
  3107. }
  3108. }
  3109. layer {
  3110. bottom: "layer103-conv"
  3111. top: "layer103-conv"
  3112. name: "layer103-bn"
  3113. type: "BatchNorm"
  3114. batch_norm_param {
  3115. use_global_stats: true
  3116. }
  3117. }
  3118. layer {
  3119. bottom: "layer103-conv"
  3120. top: "layer103-conv"
  3121. name: "layer103-scale"
  3122. type: "Scale"
  3123. scale_param {
  3124. bias_term: true
  3125. }
  3126. }
  3127. layer {
  3128. bottom: "layer103-conv"
  3129. top: "layer103-conv"
  3130. name: "layer103-act"
  3131. type: "ReLU"
  3132. relu_param {
  3133. negative_slope: 0.1
  3134. }
  3135. }
  3136. layer {
  3137. bottom: "layer103-conv"
  3138. top: "layer104-conv"
  3139. name: "layer104-conv"
  3140. type: "Convolution"
  3141. convolution_param {
  3142. num_output: 128
  3143. kernel_size: 1
  3144. pad: 0
  3145. stride: 1
  3146. bias_term: false
  3147. }
  3148. }
  3149. layer {
  3150. bottom: "layer104-conv"
  3151. top: "layer104-conv"
  3152. name: "layer104-bn"
  3153. type: "BatchNorm"
  3154. batch_norm_param {
  3155. use_global_stats: true
  3156. }
  3157. }
  3158. layer {
  3159. bottom: "layer104-conv"
  3160. top: "layer104-conv"
  3161. name: "layer104-scale"
  3162. type: "Scale"
  3163. scale_param {
  3164. bias_term: true
  3165. }
  3166. }
  3167. layer {
  3168. bottom: "layer104-conv"
  3169. top: "layer104-conv"
  3170. name: "layer104-act"
  3171. type: "ReLU"
  3172. relu_param {
  3173. negative_slope: 0.1
  3174. }
  3175. }
  3176. layer {
  3177. bottom: "layer104-conv"
  3178. top: "layer105-conv"
  3179. name: "layer105-conv"
  3180. type: "Convolution"
  3181. convolution_param {
  3182. num_output: 256
  3183. kernel_size: 3
  3184. pad: 1
  3185. stride: 1
  3186. bias_term: false
  3187. }
  3188. }
  3189. layer {
  3190. bottom: "layer105-conv"
  3191. top: "layer105-conv"
  3192. name: "layer105-bn"
  3193. type: "BatchNorm"
  3194. batch_norm_param {
  3195. use_global_stats: true
  3196. }
  3197. }
  3198. layer {
  3199. bottom: "layer105-conv"
  3200. top: "layer105-conv"
  3201. name: "layer105-scale"
  3202. type: "Scale"
  3203. scale_param {
  3204. bias_term: true
  3205. }
  3206. }
  3207. layer {
  3208. bottom: "layer105-conv"
  3209. top: "layer105-conv"
  3210. name: "layer105-act"
  3211. type: "ReLU"
  3212. relu_param {
  3213. negative_slope: 0.1
  3214. }
  3215. }
  3216. layer {
  3217. bottom: "layer105-conv"
  3218. top: "layer106-conv"
  3219. name: "layer106-conv"
  3220. type: "Convolution"
  3221. convolution_param {
  3222. num_output: 255
  3223. kernel_size: 1
  3224. pad: 0
  3225. stride: 1
  3226. bias_term: true
  3227. }
  3228. }
  3229. layer {
  3230. bottom: "layer83-yolo"
  3231. bottom: "layer95-yolo"
  3232. bottom: "layer106-conv"
  3233. type: "Yolov3DetectionOutput"
  3234. top: "layer107-yolo"
  3235. name: "layer107-yolo"
  3236. yolov3_detection_output_param {
  3237. nms_threshold: 0.45
  3238. num_classes: 80
  3239. biases: 10
  3240. biases: 13
  3241. biases: 16
  3242. biases: 30
  3243. biases: 33
  3244. biases: 23
  3245. biases: 30
  3246. biases: 61
  3247. biases: 62
  3248. biases: 45
  3249. biases: 59
  3250. biases: 119
  3251. biases: 116
  3252. biases: 90
  3253. biases: 156
  3254. biases: 198
  3255. biases: 373
  3256. biases: 326
  3257. mask: 6
  3258. mask: 7
  3259. mask: 8
  3260. mask: 3
  3261. mask: 4
  3262. mask: 5
  3263. mask: 0
  3264. mask: 1
  3265. mask: 2
  3266. mask_group_num: 3
  3267. anchors_scale: 32
  3268. anchors_scale: 16
  3269. anchors_scale: 8
  3270. }
  3271. }
Add Comment
Please, Sign In to add comment