Advertisement
Guest User

Untitled

a guest
Feb 24th, 2017
77
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 12.19 KB | None | 0 0
  1. name: "resnet"
  2. layer {
  3. name: "inputl"
  4. type: "Data"
  5. top: "data"
  6. top: "label"
  7. include {
  8. phase: TRAIN
  9. }
  10. data_param {
  11. source: "train.lmdb"
  12. batch_size: 32
  13. backend: LMDB
  14. }
  15. }
  16. layer {
  17. name: "inputl"
  18. type: "MemoryData"
  19. top: "data"
  20. top: "label"
  21. include {
  22. phase: TEST
  23. }
  24. memory_data_param {
  25. batch_size: 32
  26. channels: 1
  27. height: 1024
  28. width: 69
  29. }
  30. }
  31. layer {
  32. name: "conv_data"
  33. type: "Convolution"
  34. bottom: "data"
  35. top: "conv1"
  36. convolution_param {
  37. num_output: 256
  38. pad: 1
  39. stride: 1
  40. weight_filler {
  41. type: "xavier"
  42. }
  43. bias_filler {
  44. type: "constant"
  45. }
  46. kernel_h: 3
  47. kernel_w: 69
  48. }
  49. }
  50. layer {
  51. name: "bn_conv1"
  52. type: "BatchNorm"
  53. bottom: "conv1"
  54. top: "conv1"
  55. }
  56. layer {
  57. name: "scale_conv1"
  58. type: "Scale"
  59. bottom: "conv1"
  60. top: "conv1"
  61. scale_param {
  62. bias_term: true
  63. }
  64. }
  65. layer {
  66. name: "act_ReLU_conv1"
  67. type: "ReLU"
  68. bottom: "conv1"
  69. top: "conv1"
  70. }
  71. layer {
  72. name: "conv_conv1"
  73. type: "Convolution"
  74. bottom: "conv1"
  75. top: "conv1_branch1"
  76. convolution_param {
  77. num_output: 256
  78. pad: 0
  79. stride: 1
  80. weight_filler {
  81. type: "xavier"
  82. }
  83. bias_filler {
  84. type: "constant"
  85. }
  86. kernel_h: 3
  87. kernel_w: 1
  88. }
  89. }
  90. layer {
  91. name: "bn_conv1_branch1"
  92. type: "BatchNorm"
  93. bottom: "conv1_branch1"
  94. top: "conv1_branch1"
  95. }
  96. layer {
  97. name: "scale_conv1_branch1"
  98. type: "Scale"
  99. bottom: "conv1_branch1"
  100. top: "conv1_branch1"
  101. scale_param {
  102. bias_term: true
  103. }
  104. }
  105. layer {
  106. name: "act_ReLU_conv1_branch1"
  107. type: "ReLU"
  108. bottom: "conv1_branch1"
  109. top: "conv1_branch1"
  110. }
  111. layer {
  112. name: "conv_conv1_branch1"
  113. type: "Convolution"
  114. bottom: "conv1_branch1"
  115. top: "conv2_branch1"
  116. convolution_param {
  117. num_output: 256
  118. pad: 0
  119. stride: 1
  120. weight_filler {
  121. type: "xavier"
  122. }
  123. bias_filler {
  124. type: "constant"
  125. }
  126. kernel_h: 3
  127. kernel_w: 1
  128. }
  129. }
  130. layer {
  131. name: "shortcut_conv1"
  132. type: "Convolution"
  133. bottom: "conv1"
  134. top: "conv1_branch"
  135. convolution_param {
  136. num_output: 256
  137. pad: 0
  138. stride: 1
  139. weight_filler {
  140. type: "xavier"
  141. }
  142. bias_filler {
  143. type: "constant"
  144. }
  145. kernel_h: 3
  146. kernel_w: 1
  147. }
  148. }
  149. layer {
  150. name: "elt_res1"
  151. type: "Eltwise"
  152. bottom: "conv1_branch"
  153. bottom: "conv1_branch1"
  154. top: "res1"
  155. }
  156. layer {
  157. name: "bn_res1"
  158. type: "BatchNorm"
  159. bottom: "res1"
  160. top: "res1"
  161. }
  162. layer {
  163. name: "scale_res1"
  164. type: "Scale"
  165. bottom: "res1"
  166. top: "res1"
  167. scale_param {
  168. bias_term: true
  169. }
  170. }
  171. layer {
  172. name: "act_ReLU_res1"
  173. type: "ReLU"
  174. bottom: "res1"
  175. top: "res1"
  176. }
  177. layer {
  178. name: "conv_res1"
  179. type: "Convolution"
  180. bottom: "res1"
  181. top: "conv1_branch2"
  182. convolution_param {
  183. num_output: 256
  184. pad: 0
  185. stride: 1
  186. weight_filler {
  187. type: "xavier"
  188. }
  189. bias_filler {
  190. type: "constant"
  191. }
  192. kernel_h: 3
  193. kernel_w: 1
  194. }
  195. }
  196. layer {
  197. name: "bn_conv1_branch2"
  198. type: "BatchNorm"
  199. bottom: "conv1_branch2"
  200. top: "conv1_branch2"
  201. }
  202. layer {
  203. name: "scale_conv1_branch2"
  204. type: "Scale"
  205. bottom: "conv1_branch2"
  206. top: "conv1_branch2"
  207. scale_param {
  208. bias_term: true
  209. }
  210. }
  211. layer {
  212. name: "act_ReLU_conv1_branch2"
  213. type: "ReLU"
  214. bottom: "conv1_branch2"
  215. top: "conv1_branch2"
  216. }
  217. layer {
  218. name: "conv_conv1_branch2"
  219. type: "Convolution"
  220. bottom: "conv1_branch2"
  221. top: "conv2_branch2"
  222. convolution_param {
  223. num_output: 256
  224. pad: 0
  225. stride: 1
  226. weight_filler {
  227. type: "xavier"
  228. }
  229. bias_filler {
  230. type: "constant"
  231. }
  232. kernel_h: 3
  233. kernel_w: 1
  234. }
  235. }
  236. layer {
  237. name: "shortcut_res1"
  238. type: "Convolution"
  239. bottom: "res1"
  240. top: "res1_branch"
  241. convolution_param {
  242. num_output: 256
  243. pad: 0
  244. stride: 1
  245. weight_filler {
  246. type: "xavier"
  247. }
  248. bias_filler {
  249. type: "constant"
  250. }
  251. kernel_h: 3
  252. kernel_w: 1
  253. }
  254. }
  255. layer {
  256. name: "elt_res2"
  257. type: "Eltwise"
  258. bottom: "res1_branch"
  259. bottom: "conv1_branch2"
  260. top: "res2"
  261. }
  262. layer {
  263. name: "pool_res2"
  264. type: "Pooling"
  265. bottom: "res2"
  266. top: "pool3"
  267. pooling_param {
  268. pool: MAX
  269. kernel_h: 3
  270. kernel_w: 1
  271. stride_h: 2
  272. stride_w: 1
  273. }
  274. }
  275. layer {
  276. name: "bn_pool3"
  277. type: "BatchNorm"
  278. bottom: "pool3"
  279. top: "pool3"
  280. }
  281. layer {
  282. name: "scale_pool3"
  283. type: "Scale"
  284. bottom: "pool3"
  285. top: "pool3"
  286. scale_param {
  287. bias_term: true
  288. }
  289. }
  290. layer {
  291. name: "act_ReLU_pool3"
  292. type: "ReLU"
  293. bottom: "pool3"
  294. top: "pool3"
  295. }
  296. layer {
  297. name: "conv_pool3"
  298. type: "Convolution"
  299. bottom: "pool3"
  300. top: "conv1_branch3"
  301. convolution_param {
  302. num_output: 64
  303. pad: 0
  304. stride: 1
  305. weight_filler {
  306. type: "xavier"
  307. }
  308. bias_filler {
  309. type: "constant"
  310. }
  311. kernel_h: 3
  312. kernel_w: 1
  313. }
  314. }
  315. layer {
  316. name: "bn_conv1_branch3"
  317. type: "BatchNorm"
  318. bottom: "conv1_branch3"
  319. top: "conv1_branch3"
  320. }
  321. layer {
  322. name: "scale_conv1_branch3"
  323. type: "Scale"
  324. bottom: "conv1_branch3"
  325. top: "conv1_branch3"
  326. scale_param {
  327. bias_term: true
  328. }
  329. }
  330. layer {
  331. name: "act_ReLU_conv1_branch3"
  332. type: "ReLU"
  333. bottom: "conv1_branch3"
  334. top: "conv1_branch3"
  335. }
  336. layer {
  337. name: "conv_conv1_branch3"
  338. type: "Convolution"
  339. bottom: "conv1_branch3"
  340. top: "conv2_branch3"
  341. convolution_param {
  342. num_output: 64
  343. pad: 0
  344. stride: 1
  345. weight_filler {
  346. type: "xavier"
  347. }
  348. bias_filler {
  349. type: "constant"
  350. }
  351. kernel_h: 3
  352. kernel_w: 1
  353. }
  354. }
  355. layer {
  356. name: "shortcut_pool3"
  357. type: "Convolution"
  358. bottom: "pool3"
  359. top: "pool3_branch"
  360. convolution_param {
  361. num_output: 64
  362. pad: 0
  363. stride: 1
  364. weight_filler {
  365. type: "xavier"
  366. }
  367. bias_filler {
  368. type: "constant"
  369. }
  370. kernel_h: 3
  371. kernel_w: 1
  372. }
  373. }
  374. layer {
  375. name: "elt_res3"
  376. type: "Eltwise"
  377. bottom: "pool3_branch"
  378. bottom: "conv1_branch3"
  379. top: "res3"
  380. }
  381. layer {
  382. name: "bn_res3"
  383. type: "BatchNorm"
  384. bottom: "res3"
  385. top: "res3"
  386. }
  387. layer {
  388. name: "scale_res3"
  389. type: "Scale"
  390. bottom: "res3"
  391. top: "res3"
  392. scale_param {
  393. bias_term: true
  394. }
  395. }
  396. layer {
  397. name: "act_ReLU_res3"
  398. type: "ReLU"
  399. bottom: "res3"
  400. top: "res3"
  401. }
  402. layer {
  403. name: "conv_res3"
  404. type: "Convolution"
  405. bottom: "res3"
  406. top: "conv1_branch4"
  407. convolution_param {
  408. num_output: 64
  409. pad: 0
  410. stride: 1
  411. weight_filler {
  412. type: "xavier"
  413. }
  414. bias_filler {
  415. type: "constant"
  416. }
  417. kernel_h: 3
  418. kernel_w: 1
  419. }
  420. }
  421. layer {
  422. name: "bn_conv1_branch4"
  423. type: "BatchNorm"
  424. bottom: "conv1_branch4"
  425. top: "conv1_branch4"
  426. }
  427. layer {
  428. name: "scale_conv1_branch4"
  429. type: "Scale"
  430. bottom: "conv1_branch4"
  431. top: "conv1_branch4"
  432. scale_param {
  433. bias_term: true
  434. }
  435. }
  436. layer {
  437. name: "act_ReLU_conv1_branch4"
  438. type: "ReLU"
  439. bottom: "conv1_branch4"
  440. top: "conv1_branch4"
  441. }
  442. layer {
  443. name: "conv_conv1_branch4"
  444. type: "Convolution"
  445. bottom: "conv1_branch4"
  446. top: "conv2_branch4"
  447. convolution_param {
  448. num_output: 64
  449. pad: 0
  450. stride: 1
  451. weight_filler {
  452. type: "xavier"
  453. }
  454. bias_filler {
  455. type: "constant"
  456. }
  457. kernel_h: 3
  458. kernel_w: 1
  459. }
  460. }
  461. layer {
  462. name: "shortcut_res3"
  463. type: "Convolution"
  464. bottom: "res3"
  465. top: "res3_branch"
  466. convolution_param {
  467. num_output: 64
  468. pad: 0
  469. stride: 1
  470. weight_filler {
  471. type: "xavier"
  472. }
  473. bias_filler {
  474. type: "constant"
  475. }
  476. kernel_h: 3
  477. kernel_w: 1
  478. }
  479. }
  480. layer {
  481. name: "elt_res4"
  482. type: "Eltwise"
  483. bottom: "res3_branch"
  484. bottom: "conv1_branch4"
  485. top: "res4"
  486. }
  487. layer {
  488. name: "pool_res4"
  489. type: "Pooling"
  490. bottom: "res4"
  491. top: "pool5"
  492. pooling_param {
  493. pool: MAX
  494. kernel_h: 3
  495. kernel_w: 1
  496. stride_h: 2
  497. stride_w: 1
  498. }
  499. }
  500. layer {
  501. name: "bn_pool5"
  502. type: "BatchNorm"
  503. bottom: "pool5"
  504. top: "pool5"
  505. }
  506. layer {
  507. name: "scale_pool5"
  508. type: "Scale"
  509. bottom: "pool5"
  510. top: "pool5"
  511. scale_param {
  512. bias_term: true
  513. }
  514. }
  515. layer {
  516. name: "act_ReLU_pool5"
  517. type: "ReLU"
  518. bottom: "pool5"
  519. top: "pool5"
  520. }
  521. layer {
  522. name: "conv_pool5"
  523. type: "Convolution"
  524. bottom: "pool5"
  525. top: "conv1_branch5"
  526. convolution_param {
  527. num_output: 32
  528. pad: 0
  529. stride: 1
  530. weight_filler {
  531. type: "xavier"
  532. }
  533. bias_filler {
  534. type: "constant"
  535. }
  536. kernel_h: 3
  537. kernel_w: 1
  538. }
  539. }
  540. layer {
  541. name: "bn_conv1_branch5"
  542. type: "BatchNorm"
  543. bottom: "conv1_branch5"
  544. top: "conv1_branch5"
  545. }
  546. layer {
  547. name: "scale_conv1_branch5"
  548. type: "Scale"
  549. bottom: "conv1_branch5"
  550. top: "conv1_branch5"
  551. scale_param {
  552. bias_term: true
  553. }
  554. }
  555. layer {
  556. name: "act_ReLU_conv1_branch5"
  557. type: "ReLU"
  558. bottom: "conv1_branch5"
  559. top: "conv1_branch5"
  560. }
  561. layer {
  562. name: "conv_conv1_branch5"
  563. type: "Convolution"
  564. bottom: "conv1_branch5"
  565. top: "conv2_branch5"
  566. convolution_param {
  567. num_output: 32
  568. pad: 0
  569. stride: 1
  570. weight_filler {
  571. type: "xavier"
  572. }
  573. bias_filler {
  574. type: "constant"
  575. }
  576. kernel_h: 3
  577. kernel_w: 1
  578. }
  579. }
  580. layer {
  581. name: "shortcut_pool5"
  582. type: "Convolution"
  583. bottom: "pool5"
  584. top: "pool5_branch"
  585. convolution_param {
  586. num_output: 32
  587. pad: 0
  588. stride: 1
  589. weight_filler {
  590. type: "xavier"
  591. }
  592. bias_filler {
  593. type: "constant"
  594. }
  595. kernel_h: 3
  596. kernel_w: 1
  597. }
  598. }
  599. layer {
  600. name: "elt_res5"
  601. type: "Eltwise"
  602. bottom: "pool5_branch"
  603. bottom: "conv1_branch5"
  604. top: "res5"
  605. }
  606. layer {
  607. name: "bn_res5"
  608. type: "BatchNorm"
  609. bottom: "res5"
  610. top: "res5"
  611. }
  612. layer {
  613. name: "scale_res5"
  614. type: "Scale"
  615. bottom: "res5"
  616. top: "res5"
  617. scale_param {
  618. bias_term: true
  619. }
  620. }
  621. layer {
  622. name: "act_ReLU_res5"
  623. type: "ReLU"
  624. bottom: "res5"
  625. top: "res5"
  626. }
  627. layer {
  628. name: "conv_res5"
  629. type: "Convolution"
  630. bottom: "res5"
  631. top: "conv1_branch6"
  632. convolution_param {
  633. num_output: 32
  634. pad: 0
  635. stride: 1
  636. weight_filler {
  637. type: "xavier"
  638. }
  639. bias_filler {
  640. type: "constant"
  641. }
  642. kernel_h: 3
  643. kernel_w: 1
  644. }
  645. }
  646. layer {
  647. name: "bn_conv1_branch6"
  648. type: "BatchNorm"
  649. bottom: "conv1_branch6"
  650. top: "conv1_branch6"
  651. }
  652. layer {
  653. name: "scale_conv1_branch6"
  654. type: "Scale"
  655. bottom: "conv1_branch6"
  656. top: "conv1_branch6"
  657. scale_param {
  658. bias_term: true
  659. }
  660. }
  661. layer {
  662. name: "act_ReLU_conv1_branch6"
  663. type: "ReLU"
  664. bottom: "conv1_branch6"
  665. top: "conv1_branch6"
  666. }
  667. layer {
  668. name: "conv_conv1_branch6"
  669. type: "Convolution"
  670. bottom: "conv1_branch6"
  671. top: "conv2_branch6"
  672. convolution_param {
  673. num_output: 32
  674. pad: 0
  675. stride: 1
  676. weight_filler {
  677. type: "xavier"
  678. }
  679. bias_filler {
  680. type: "constant"
  681. }
  682. kernel_h: 3
  683. kernel_w: 1
  684. }
  685. }
  686. layer {
  687. name: "shortcut_res5"
  688. type: "Convolution"
  689. bottom: "res5"
  690. top: "res5_branch"
  691. convolution_param {
  692. num_output: 32
  693. pad: 0
  694. stride: 1
  695. weight_filler {
  696. type: "xavier"
  697. }
  698. bias_filler {
  699. type: "constant"
  700. }
  701. kernel_h: 3
  702. kernel_w: 1
  703. }
  704. }
  705. layer {
  706. name: "elt_res6"
  707. type: "Eltwise"
  708. bottom: "res5_branch"
  709. bottom: "conv1_branch6"
  710. top: "res6"
  711. }
  712. layer {
  713. name: "pool_res6"
  714. type: "Pooling"
  715. bottom: "res6"
  716. top: "pool7"
  717. pooling_param {
  718. pool: MAX
  719. kernel_h: 3
  720. kernel_w: 1
  721. stride_h: 2
  722. stride_w: 1
  723. }
  724. }
  725. layer {
  726. name: "fc_pool7"
  727. type: "InnerProduct"
  728. bottom: "pool7"
  729. top: "fc512_0"
  730. inner_product_param {
  731. num_output: 512
  732. weight_filler {
  733. type: "xavier"
  734. }
  735. bias_filler {
  736. type: "constant"
  737. value: 0
  738. }
  739. }
  740. }
  741. layer {
  742. name: "act_ReLU_fc512_0"
  743. type: "ReLU"
  744. bottom: "fc512_0"
  745. top: "fc512_0"
  746. }
  747. layer {
  748. name: "drop_fc512_0"
  749. type: "Dropout"
  750. bottom: "fc512_0"
  751. top: "fc512_0"
  752. dropout_param {
  753. dropout_ratio: 0.5
  754. }
  755. }
  756. layer {
  757. name: "fc_fc512_0"
  758. type: "InnerProduct"
  759. bottom: "fc512_0"
  760. top: "fc512_1"
  761. inner_product_param {
  762. num_output: 512
  763. weight_filler {
  764. type: "xavier"
  765. }
  766. bias_filler {
  767. type: "constant"
  768. value: 0
  769. }
  770. }
  771. }
  772. layer {
  773. name: "act_ReLU_fc512_1"
  774. type: "ReLU"
  775. bottom: "fc512_1"
  776. top: "fc512_1"
  777. }
  778. layer {
  779. name: "drop_fc512_1"
  780. type: "Dropout"
  781. bottom: "fc512_1"
  782. top: "fc512_1"
  783. dropout_param {
  784. dropout_ratio: 0.5
  785. }
  786. }
  787. layer {
  788. name: "fc_fc512_1"
  789. type: "InnerProduct"
  790. bottom: "fc512_1"
  791. top: "ip_losst"
  792. inner_product_param {
  793. num_output: 4
  794. weight_filler {
  795. type: "xavier"
  796. }
  797. bias_filler {
  798. type: "constant"
  799. value: 0
  800. }
  801. }
  802. }
  803. layer {
  804. name: "prob"
  805. type: "SoftmaxWithLoss"
  806. bottom: "ip_losst"
  807. bottom: "label"
  808. top: "losst"
  809. include {
  810. phase: TRAIN
  811. }
  812. }
  813. layer {
  814. name: "probt"
  815. type: "Softmax"
  816. bottom: "ip_losst"
  817. top: "losst"
  818. include {
  819. phase: TEST
  820. }
  821. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement