Advertisement
Guest User

Untitled

a guest
Aug 26th, 2016
87
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 11.56 KB | None | 0 0
  1. # please cite:
  2. # @article{SqueezeNet,
  3. # Author = {Forrest N. Iandola and Matthew W. Moskewicz and Khalid Ashraf and Song Han and William J. Dally and Kurt Keutzer},
  4. # Title = {SqueezeNet: AlexNet-level accuracy with 50x fewer parameters and $<$1MB model size},
  5. # Journal = {arXiv:1602.07360},
  6. # Year = {2016}
  7. # }
  8. layer {
  9. name: "data"
  10. type: "Data"
  11. top: "data"
  12. top: "label"
  13. include {
  14. phase: TRAIN
  15. }
  16. transform_param {
  17. crop_size: 227
  18. mean_value: 104
  19. mean_value: 117
  20. mean_value: 123
  21. }
  22. data_param {
  23. source: "examples/imagenet/ilsvrc12_train_lmdb"
  24. batch_size: 32
  25. backend: LMDB
  26. }
  27. }
  28. layer {
  29. name: "data"
  30. type: "Data"
  31. top: "data"
  32. top: "label"
  33. include {
  34. phase: TEST
  35. }
  36. transform_param {
  37. crop_size: 227
  38. mean_value: 104
  39. mean_value: 117
  40. mean_value: 123
  41. }
  42. data_param {
  43. source: "examples/imagenet/ilsvrc12_val_lmdb"
  44. batch_size: 25 #not *iter_size
  45. backend: LMDB
  46. }
  47. }
  48. layer {
  49. name: "conv1"
  50. type: "Convolution"
  51. bottom: "data"
  52. top: "conv1"
  53. convolution_param {
  54. num_output: 64
  55. kernel_size: 3
  56. stride: 2
  57. weight_filler {
  58. type: "xavier"
  59. }
  60. }
  61. }
  62. layer {
  63. name: "relu_conv1"
  64. type: "ReLU"
  65. bottom: "conv1"
  66. top: "conv1"
  67. }
  68. layer {
  69. name: "pool1"
  70. type: "Pooling"
  71. bottom: "conv1"
  72. top: "pool1"
  73. pooling_param {
  74. pool: MAX
  75. kernel_size: 3
  76. stride: 2
  77. }
  78. }
  79. layer {
  80. name: "fire2/squeeze1x1"
  81. type: "Convolution"
  82. bottom: "pool1"
  83. top: "fire2/squeeze1x1"
  84. convolution_param {
  85. num_output: 16
  86. kernel_size: 1
  87. weight_filler {
  88. type: "xavier"
  89. }
  90. }
  91. }
  92. layer {
  93. name: "fire2/relu_squeeze1x1"
  94. type: "ReLU"
  95. bottom: "fire2/squeeze1x1"
  96. top: "fire2/squeeze1x1"
  97. }
  98. layer {
  99. name: "fire2/expand1x1"
  100. type: "Convolution"
  101. bottom: "fire2/squeeze1x1"
  102. top: "fire2/expand1x1"
  103. convolution_param {
  104. num_output: 64
  105. kernel_size: 1
  106. weight_filler {
  107. type: "xavier"
  108. }
  109. }
  110. }
  111. layer {
  112. name: "fire2/relu_expand1x1"
  113. type: "ReLU"
  114. bottom: "fire2/expand1x1"
  115. top: "fire2/expand1x1"
  116. }
  117. layer {
  118. name: "fire2/expand3x3"
  119. type: "Convolution"
  120. bottom: "fire2/squeeze1x1"
  121. top: "fire2/expand3x3"
  122. convolution_param {
  123. num_output: 64
  124. pad: 1
  125. kernel_size: 3
  126. weight_filler {
  127. type: "xavier"
  128. }
  129. }
  130. }
  131. layer {
  132. name: "fire2/relu_expand3x3"
  133. type: "ReLU"
  134. bottom: "fire2/expand3x3"
  135. top: "fire2/expand3x3"
  136. }
  137. layer {
  138. name: "fire2/concat"
  139. type: "Concat"
  140. bottom: "fire2/expand1x1"
  141. bottom: "fire2/expand3x3"
  142. top: "fire2/concat"
  143. }
  144. layer {
  145. name: "fire3/squeeze1x1"
  146. type: "Convolution"
  147. bottom: "fire2/concat"
  148. top: "fire3/squeeze1x1"
  149. convolution_param {
  150. num_output: 16
  151. kernel_size: 1
  152. weight_filler {
  153. type: "xavier"
  154. }
  155. }
  156. }
  157. layer {
  158. name: "fire3/relu_squeeze1x1"
  159. type: "ReLU"
  160. bottom: "fire3/squeeze1x1"
  161. top: "fire3/squeeze1x1"
  162. }
  163. layer {
  164. name: "fire3/expand1x1"
  165. type: "Convolution"
  166. bottom: "fire3/squeeze1x1"
  167. top: "fire3/expand1x1"
  168. convolution_param {
  169. num_output: 64
  170. kernel_size: 1
  171. weight_filler {
  172. type: "xavier"
  173. }
  174. }
  175. }
  176. layer {
  177. name: "fire3/relu_expand1x1"
  178. type: "ReLU"
  179. bottom: "fire3/expand1x1"
  180. top: "fire3/expand1x1"
  181. }
  182. layer {
  183. name: "fire3/expand3x3"
  184. type: "Convolution"
  185. bottom: "fire3/squeeze1x1"
  186. top: "fire3/expand3x3"
  187. convolution_param {
  188. num_output: 64
  189. pad: 1
  190. kernel_size: 3
  191. weight_filler {
  192. type: "xavier"
  193. }
  194. }
  195. }
  196. layer {
  197. name: "fire3/relu_expand3x3"
  198. type: "ReLU"
  199. bottom: "fire3/expand3x3"
  200. top: "fire3/expand3x3"
  201. }
  202. layer {
  203. name: "fire3/concat"
  204. type: "Concat"
  205. bottom: "fire3/expand1x1"
  206. bottom: "fire3/expand3x3"
  207. top: "fire3/concat"
  208. }
  209. layer {
  210. name: "pool3"
  211. type: "Pooling"
  212. bottom: "fire3/concat"
  213. top: "pool3"
  214. pooling_param {
  215. pool: MAX
  216. kernel_size: 3
  217. stride: 2
  218. }
  219. }
  220. layer {
  221. name: "fire4/squeeze1x1"
  222. type: "Convolution"
  223. bottom: "pool3"
  224. top: "fire4/squeeze1x1"
  225. convolution_param {
  226. num_output: 32
  227. kernel_size: 1
  228. weight_filler {
  229. type: "xavier"
  230. }
  231. }
  232. }
  233. layer {
  234. name: "fire4/relu_squeeze1x1"
  235. type: "ReLU"
  236. bottom: "fire4/squeeze1x1"
  237. top: "fire4/squeeze1x1"
  238. }
  239. layer {
  240. name: "fire4/expand1x1"
  241. type: "Convolution"
  242. bottom: "fire4/squeeze1x1"
  243. top: "fire4/expand1x1"
  244. convolution_param {
  245. num_output: 128
  246. kernel_size: 1
  247. weight_filler {
  248. type: "xavier"
  249. }
  250. }
  251. }
  252. layer {
  253. name: "fire4/relu_expand1x1"
  254. type: "ReLU"
  255. bottom: "fire4/expand1x1"
  256. top: "fire4/expand1x1"
  257. }
  258. layer {
  259. name: "fire4/expand3x3"
  260. type: "Convolution"
  261. bottom: "fire4/squeeze1x1"
  262. top: "fire4/expand3x3"
  263. convolution_param {
  264. num_output: 128
  265. pad: 1
  266. kernel_size: 3
  267. weight_filler {
  268. type: "xavier"
  269. }
  270. }
  271. }
  272. layer {
  273. name: "fire4/relu_expand3x3"
  274. type: "ReLU"
  275. bottom: "fire4/expand3x3"
  276. top: "fire4/expand3x3"
  277. }
  278. layer {
  279. name: "fire4/concat"
  280. type: "Concat"
  281. bottom: "fire4/expand1x1"
  282. bottom: "fire4/expand3x3"
  283. top: "fire4/concat"
  284. }
  285. layer {
  286. name: "fire5/squeeze1x1"
  287. type: "Convolution"
  288. bottom: "fire4/concat"
  289. top: "fire5/squeeze1x1"
  290. convolution_param {
  291. num_output: 32
  292. kernel_size: 1
  293. weight_filler {
  294. type: "xavier"
  295. }
  296. }
  297. }
  298. layer {
  299. name: "fire5/relu_squeeze1x1"
  300. type: "ReLU"
  301. bottom: "fire5/squeeze1x1"
  302. top: "fire5/squeeze1x1"
  303. }
  304. layer {
  305. name: "fire5/expand1x1"
  306. type: "Convolution"
  307. bottom: "fire5/squeeze1x1"
  308. top: "fire5/expand1x1"
  309. convolution_param {
  310. num_output: 128
  311. kernel_size: 1
  312. weight_filler {
  313. type: "xavier"
  314. }
  315. }
  316. }
  317. layer {
  318. name: "fire5/relu_expand1x1"
  319. type: "ReLU"
  320. bottom: "fire5/expand1x1"
  321. top: "fire5/expand1x1"
  322. }
  323. layer {
  324. name: "fire5/expand3x3"
  325. type: "Convolution"
  326. bottom: "fire5/squeeze1x1"
  327. top: "fire5/expand3x3"
  328. convolution_param {
  329. num_output: 128
  330. pad: 1
  331. kernel_size: 3
  332. weight_filler {
  333. type: "xavier"
  334. }
  335. }
  336. }
  337. layer {
  338. name: "fire5/relu_expand3x3"
  339. type: "ReLU"
  340. bottom: "fire5/expand3x3"
  341. top: "fire5/expand3x3"
  342. }
  343. layer {
  344. name: "fire5/concat"
  345. type: "Concat"
  346. bottom: "fire5/expand1x1"
  347. bottom: "fire5/expand3x3"
  348. top: "fire5/concat"
  349. }
  350. layer {
  351. name: "pool5"
  352. type: "Pooling"
  353. bottom: "fire5/concat"
  354. top: "pool5"
  355. pooling_param {
  356. pool: MAX
  357. kernel_size: 3
  358. stride: 2
  359. }
  360. }
  361. layer {
  362. name: "fire6/squeeze1x1"
  363. type: "Convolution"
  364. bottom: "pool5"
  365. top: "fire6/squeeze1x1"
  366. convolution_param {
  367. num_output: 48
  368. kernel_size: 1
  369. weight_filler {
  370. type: "xavier"
  371. }
  372. }
  373. }
  374. layer {
  375. name: "fire6/relu_squeeze1x1"
  376. type: "ReLU"
  377. bottom: "fire6/squeeze1x1"
  378. top: "fire6/squeeze1x1"
  379. }
  380. layer {
  381. name: "fire6/expand1x1"
  382. type: "Convolution"
  383. bottom: "fire6/squeeze1x1"
  384. top: "fire6/expand1x1"
  385. convolution_param {
  386. num_output: 192
  387. kernel_size: 1
  388. weight_filler {
  389. type: "xavier"
  390. }
  391. }
  392. }
  393. layer {
  394. name: "fire6/relu_expand1x1"
  395. type: "ReLU"
  396. bottom: "fire6/expand1x1"
  397. top: "fire6/expand1x1"
  398. }
  399. layer {
  400. name: "fire6/expand3x3"
  401. type: "Convolution"
  402. bottom: "fire6/squeeze1x1"
  403. top: "fire6/expand3x3"
  404. convolution_param {
  405. num_output: 192
  406. pad: 1
  407. kernel_size: 3
  408. weight_filler {
  409. type: "xavier"
  410. }
  411. }
  412. }
  413. layer {
  414. name: "fire6/relu_expand3x3"
  415. type: "ReLU"
  416. bottom: "fire6/expand3x3"
  417. top: "fire6/expand3x3"
  418. }
  419. layer {
  420. name: "fire6/concat"
  421. type: "Concat"
  422. bottom: "fire6/expand1x1"
  423. bottom: "fire6/expand3x3"
  424. top: "fire6/concat"
  425. }
  426. layer {
  427. name: "fire7/squeeze1x1"
  428. type: "Convolution"
  429. bottom: "fire6/concat"
  430. top: "fire7/squeeze1x1"
  431. convolution_param {
  432. num_output: 48
  433. kernel_size: 1
  434. weight_filler {
  435. type: "xavier"
  436. }
  437. }
  438. }
  439. layer {
  440. name: "fire7/relu_squeeze1x1"
  441. type: "ReLU"
  442. bottom: "fire7/squeeze1x1"
  443. top: "fire7/squeeze1x1"
  444. }
  445. layer {
  446. name: "fire7/expand1x1"
  447. type: "Convolution"
  448. bottom: "fire7/squeeze1x1"
  449. top: "fire7/expand1x1"
  450. convolution_param {
  451. num_output: 192
  452. kernel_size: 1
  453. weight_filler {
  454. type: "xavier"
  455. }
  456. }
  457. }
  458. layer {
  459. name: "fire7/relu_expand1x1"
  460. type: "ReLU"
  461. bottom: "fire7/expand1x1"
  462. top: "fire7/expand1x1"
  463. }
  464. layer {
  465. name: "fire7/expand3x3"
  466. type: "Convolution"
  467. bottom: "fire7/squeeze1x1"
  468. top: "fire7/expand3x3"
  469. convolution_param {
  470. num_output: 192
  471. pad: 1
  472. kernel_size: 3
  473. weight_filler {
  474. type: "xavier"
  475. }
  476. }
  477. }
  478. layer {
  479. name: "fire7/relu_expand3x3"
  480. type: "ReLU"
  481. bottom: "fire7/expand3x3"
  482. top: "fire7/expand3x3"
  483. }
  484. layer {
  485. name: "fire7/concat"
  486. type: "Concat"
  487. bottom: "fire7/expand1x1"
  488. bottom: "fire7/expand3x3"
  489. top: "fire7/concat"
  490. }
  491. layer {
  492. name: "fire8/squeeze1x1"
  493. type: "Convolution"
  494. bottom: "fire7/concat"
  495. top: "fire8/squeeze1x1"
  496. convolution_param {
  497. num_output: 64
  498. kernel_size: 1
  499. weight_filler {
  500. type: "xavier"
  501. }
  502. }
  503. }
  504. layer {
  505. name: "fire8/relu_squeeze1x1"
  506. type: "ReLU"
  507. bottom: "fire8/squeeze1x1"
  508. top: "fire8/squeeze1x1"
  509. }
  510. layer {
  511. name: "fire8/expand1x1"
  512. type: "Convolution"
  513. bottom: "fire8/squeeze1x1"
  514. top: "fire8/expand1x1"
  515. convolution_param {
  516. num_output: 256
  517. kernel_size: 1
  518. weight_filler {
  519. type: "xavier"
  520. }
  521. }
  522. }
  523. layer {
  524. name: "fire8/relu_expand1x1"
  525. type: "ReLU"
  526. bottom: "fire8/expand1x1"
  527. top: "fire8/expand1x1"
  528. }
  529. layer {
  530. name: "fire8/expand3x3"
  531. type: "Convolution"
  532. bottom: "fire8/squeeze1x1"
  533. top: "fire8/expand3x3"
  534. convolution_param {
  535. num_output: 256
  536. pad: 1
  537. kernel_size: 3
  538. weight_filler {
  539. type: "xavier"
  540. }
  541. }
  542. }
  543. layer {
  544. name: "fire8/relu_expand3x3"
  545. type: "ReLU"
  546. bottom: "fire8/expand3x3"
  547. top: "fire8/expand3x3"
  548. }
  549. layer {
  550. name: "fire8/concat"
  551. type: "Concat"
  552. bottom: "fire8/expand1x1"
  553. bottom: "fire8/expand3x3"
  554. top: "fire8/concat"
  555. }
  556. layer {
  557. name: "fire9/squeeze1x1"
  558. type: "Convolution"
  559. bottom: "fire8/concat"
  560. top: "fire9/squeeze1x1"
  561. convolution_param {
  562. num_output: 64
  563. kernel_size: 1
  564. weight_filler {
  565. type: "xavier"
  566. }
  567. }
  568. }
  569. layer {
  570. name: "fire9/relu_squeeze1x1"
  571. type: "ReLU"
  572. bottom: "fire9/squeeze1x1"
  573. top: "fire9/squeeze1x1"
  574. }
  575. layer {
  576. name: "fire9/expand1x1"
  577. type: "Convolution"
  578. bottom: "fire9/squeeze1x1"
  579. top: "fire9/expand1x1"
  580. convolution_param {
  581. num_output: 256
  582. kernel_size: 1
  583. weight_filler {
  584. type: "xavier"
  585. }
  586. }
  587. }
  588. layer {
  589. name: "fire9/relu_expand1x1"
  590. type: "ReLU"
  591. bottom: "fire9/expand1x1"
  592. top: "fire9/expand1x1"
  593. }
  594. layer {
  595. name: "fire9/expand3x3"
  596. type: "Convolution"
  597. bottom: "fire9/squeeze1x1"
  598. top: "fire9/expand3x3"
  599. convolution_param {
  600. num_output: 256
  601. pad: 1
  602. kernel_size: 3
  603. weight_filler {
  604. type: "xavier"
  605. }
  606. }
  607. }
  608. layer {
  609. name: "fire9/relu_expand3x3"
  610. type: "ReLU"
  611. bottom: "fire9/expand3x3"
  612. top: "fire9/expand3x3"
  613. }
  614. layer {
  615. name: "fire9/concat"
  616. type: "Concat"
  617. bottom: "fire9/expand1x1"
  618. bottom: "fire9/expand3x3"
  619. top: "fire9/concat"
  620. }
  621. layer {
  622. name: "drop9"
  623. type: "Dropout"
  624. bottom: "fire9/concat"
  625. top: "fire9/concat"
  626. dropout_param {
  627. dropout_ratio: 0.5
  628. }
  629. }
  630. layer {
  631. name: "conv10"
  632. type: "Convolution"
  633. bottom: "fire9/concat"
  634. top: "conv10"
  635. convolution_param {
  636. num_output: 1000
  637. kernel_size: 1
  638. weight_filler {
  639. type: "gaussian"
  640. mean: 0.0
  641. std: 0.01
  642. }
  643. }
  644. }
  645. layer {
  646. name: "relu_conv10"
  647. type: "ReLU"
  648. bottom: "conv10"
  649. top: "conv10"
  650. }
  651. layer {
  652. name: "pool10"
  653. type: "Pooling"
  654. bottom: "conv10"
  655. top: "pool10"
  656. pooling_param {
  657. pool: AVE
  658. global_pooling: true
  659. }
  660. }
  661. layer {
  662. name: "loss"
  663. type: "SoftmaxWithLoss"
  664. bottom: "pool10"
  665. bottom: "label"
  666. top: "loss"
  667. #include {
  668. # phase: TRAIN
  669. #}
  670. }
  671. layer {
  672. name: "accuracy"
  673. type: "Accuracy"
  674. bottom: "pool10"
  675. bottom: "label"
  676. top: "accuracy"
  677. #include {
  678. # phase: TEST
  679. #}
  680. }
  681. layer {
  682. name: "accuracy_top5"
  683. type: "Accuracy"
  684. bottom: "pool10"
  685. bottom: "label"
  686. top: "accuracy_top5"
  687. #include {
  688. # phase: TEST
  689. #}
  690. accuracy_param {
  691. top_k: 5
  692. }
  693. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement