Advertisement
Guest User

Untitled

a guest
Aug 26th, 2016
70
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 11.57 KB | None | 0 0
  1. # please cite:
  2. # @article{SqueezeNet,
  3. # Author = {Forrest N. Iandola and Matthew W. Moskewicz and Khalid Ashraf and Song Han and William J. Dally and Kurt Keutzer},
  4. # Title = {SqueezeNet: AlexNet-level accuracy with 50x fewer parameters and $<$1MB model size},
  5. # Journal = {arXiv:1602.07360},
  6. # Year = {2016}
  7. # }
  8. layer {
  9. name: "data"
  10. type: "Data"
  11. top: "data"
  12. top: "label"
  13. include {
  14. phase: TRAIN
  15. }
  16. transform_param {
  17. crop_size: 227
  18. mean_value: 104
  19. mean_value: 117
  20. mean_value: 123
  21. }
  22. data_param {
  23. source: "examples/imagenet/ilsvrc12_train_lmdb"
  24. batch_size: 32 #*iter_size
  25. backend: LMDB
  26. }
  27. }
  28. layer {
  29. name: "data"
  30. type: "Data"
  31. top: "data"
  32. top: "label"
  33. include {
  34. phase: TEST
  35. }
  36. transform_param {
  37. crop_size: 227
  38. mean_value: 104
  39. mean_value: 117
  40. mean_value: 123
  41. }
  42. data_param {
  43. source: "examples/imagenet/ilsvrc12_val_lmdb"
  44. batch_size: 25 #not *iter_size
  45. backend: LMDB
  46. }
  47. }
  48. layer {
  49. name: "conv1"
  50. type: "Convolution"
  51. bottom: "data"
  52. top: "conv1"
  53. convolution_param {
  54. num_output: 96
  55. kernel_size: 7
  56. stride: 2
  57. weight_filler {
  58. type: "xavier"
  59. }
  60. }
  61. }
  62. layer {
  63. name: "relu_conv1"
  64. type: "ReLU"
  65. bottom: "conv1"
  66. top: "conv1"
  67. }
  68. layer {
  69. name: "pool1"
  70. type: "Pooling"
  71. bottom: "conv1"
  72. top: "pool1"
  73. pooling_param {
  74. pool: MAX
  75. kernel_size: 3
  76. stride: 2
  77. }
  78. }
  79. layer {
  80. name: "fire2/squeeze1x1"
  81. type: "Convolution"
  82. bottom: "pool1"
  83. top: "fire2/squeeze1x1"
  84. convolution_param {
  85. num_output: 16
  86. kernel_size: 1
  87. weight_filler {
  88. type: "xavier"
  89. }
  90. }
  91. }
  92. layer {
  93. name: "fire2/relu_squeeze1x1"
  94. type: "ReLU"
  95. bottom: "fire2/squeeze1x1"
  96. top: "fire2/squeeze1x1"
  97. }
  98. layer {
  99. name: "fire2/expand1x1"
  100. type: "Convolution"
  101. bottom: "fire2/squeeze1x1"
  102. top: "fire2/expand1x1"
  103. convolution_param {
  104. num_output: 64
  105. kernel_size: 1
  106. weight_filler {
  107. type: "xavier"
  108. }
  109. }
  110. }
  111. layer {
  112. name: "fire2/relu_expand1x1"
  113. type: "ReLU"
  114. bottom: "fire2/expand1x1"
  115. top: "fire2/expand1x1"
  116. }
  117. layer {
  118. name: "fire2/expand3x3"
  119. type: "Convolution"
  120. bottom: "fire2/squeeze1x1"
  121. top: "fire2/expand3x3"
  122. convolution_param {
  123. num_output: 64
  124. pad: 1
  125. kernel_size: 3
  126. weight_filler {
  127. type: "xavier"
  128. }
  129. }
  130. }
  131. layer {
  132. name: "fire2/relu_expand3x3"
  133. type: "ReLU"
  134. bottom: "fire2/expand3x3"
  135. top: "fire2/expand3x3"
  136. }
  137. layer {
  138. name: "fire2/concat"
  139. type: "Concat"
  140. bottom: "fire2/expand1x1"
  141. bottom: "fire2/expand3x3"
  142. top: "fire2/concat"
  143. }
  144. layer {
  145. name: "fire3/squeeze1x1"
  146. type: "Convolution"
  147. bottom: "fire2/concat"
  148. top: "fire3/squeeze1x1"
  149. convolution_param {
  150. num_output: 16
  151. kernel_size: 1
  152. weight_filler {
  153. type: "xavier"
  154. }
  155. }
  156. }
  157. layer {
  158. name: "fire3/relu_squeeze1x1"
  159. type: "ReLU"
  160. bottom: "fire3/squeeze1x1"
  161. top: "fire3/squeeze1x1"
  162. }
  163. layer {
  164. name: "fire3/expand1x1"
  165. type: "Convolution"
  166. bottom: "fire3/squeeze1x1"
  167. top: "fire3/expand1x1"
  168. convolution_param {
  169. num_output: 64
  170. kernel_size: 1
  171. weight_filler {
  172. type: "xavier"
  173. }
  174. }
  175. }
  176. layer {
  177. name: "fire3/relu_expand1x1"
  178. type: "ReLU"
  179. bottom: "fire3/expand1x1"
  180. top: "fire3/expand1x1"
  181. }
  182. layer {
  183. name: "fire3/expand3x3"
  184. type: "Convolution"
  185. bottom: "fire3/squeeze1x1"
  186. top: "fire3/expand3x3"
  187. convolution_param {
  188. num_output: 64
  189. pad: 1
  190. kernel_size: 3
  191. weight_filler {
  192. type: "xavier"
  193. }
  194. }
  195. }
  196. layer {
  197. name: "fire3/relu_expand3x3"
  198. type: "ReLU"
  199. bottom: "fire3/expand3x3"
  200. top: "fire3/expand3x3"
  201. }
  202. layer {
  203. name: "fire3/concat"
  204. type: "Concat"
  205. bottom: "fire3/expand1x1"
  206. bottom: "fire3/expand3x3"
  207. top: "fire3/concat"
  208. }
  209. layer {
  210. name: "fire4/squeeze1x1"
  211. type: "Convolution"
  212. bottom: "fire3/concat"
  213. top: "fire4/squeeze1x1"
  214. convolution_param {
  215. num_output: 32
  216. kernel_size: 1
  217. weight_filler {
  218. type: "xavier"
  219. }
  220. }
  221. }
  222. layer {
  223. name: "fire4/relu_squeeze1x1"
  224. type: "ReLU"
  225. bottom: "fire4/squeeze1x1"
  226. top: "fire4/squeeze1x1"
  227. }
  228. layer {
  229. name: "fire4/expand1x1"
  230. type: "Convolution"
  231. bottom: "fire4/squeeze1x1"
  232. top: "fire4/expand1x1"
  233. convolution_param {
  234. num_output: 128
  235. kernel_size: 1
  236. weight_filler {
  237. type: "xavier"
  238. }
  239. }
  240. }
  241. layer {
  242. name: "fire4/relu_expand1x1"
  243. type: "ReLU"
  244. bottom: "fire4/expand1x1"
  245. top: "fire4/expand1x1"
  246. }
  247. layer {
  248. name: "fire4/expand3x3"
  249. type: "Convolution"
  250. bottom: "fire4/squeeze1x1"
  251. top: "fire4/expand3x3"
  252. convolution_param {
  253. num_output: 128
  254. pad: 1
  255. kernel_size: 3
  256. weight_filler {
  257. type: "xavier"
  258. }
  259. }
  260. }
  261. layer {
  262. name: "fire4/relu_expand3x3"
  263. type: "ReLU"
  264. bottom: "fire4/expand3x3"
  265. top: "fire4/expand3x3"
  266. }
  267. layer {
  268. name: "fire4/concat"
  269. type: "Concat"
  270. bottom: "fire4/expand1x1"
  271. bottom: "fire4/expand3x3"
  272. top: "fire4/concat"
  273. }
  274. layer {
  275. name: "pool4"
  276. type: "Pooling"
  277. bottom: "fire4/concat"
  278. top: "pool4"
  279. pooling_param {
  280. pool: MAX
  281. kernel_size: 3
  282. stride: 2
  283. }
  284. }
  285. layer {
  286. name: "fire5/squeeze1x1"
  287. type: "Convolution"
  288. bottom: "pool4"
  289. top: "fire5/squeeze1x1"
  290. convolution_param {
  291. num_output: 32
  292. kernel_size: 1
  293. weight_filler {
  294. type: "xavier"
  295. }
  296. }
  297. }
  298. layer {
  299. name: "fire5/relu_squeeze1x1"
  300. type: "ReLU"
  301. bottom: "fire5/squeeze1x1"
  302. top: "fire5/squeeze1x1"
  303. }
  304. layer {
  305. name: "fire5/expand1x1"
  306. type: "Convolution"
  307. bottom: "fire5/squeeze1x1"
  308. top: "fire5/expand1x1"
  309. convolution_param {
  310. num_output: 128
  311. kernel_size: 1
  312. weight_filler {
  313. type: "xavier"
  314. }
  315. }
  316. }
  317. layer {
  318. name: "fire5/relu_expand1x1"
  319. type: "ReLU"
  320. bottom: "fire5/expand1x1"
  321. top: "fire5/expand1x1"
  322. }
  323. layer {
  324. name: "fire5/expand3x3"
  325. type: "Convolution"
  326. bottom: "fire5/squeeze1x1"
  327. top: "fire5/expand3x3"
  328. convolution_param {
  329. num_output: 128
  330. pad: 1
  331. kernel_size: 3
  332. weight_filler {
  333. type: "xavier"
  334. }
  335. }
  336. }
  337. layer {
  338. name: "fire5/relu_expand3x3"
  339. type: "ReLU"
  340. bottom: "fire5/expand3x3"
  341. top: "fire5/expand3x3"
  342. }
  343. layer {
  344. name: "fire5/concat"
  345. type: "Concat"
  346. bottom: "fire5/expand1x1"
  347. bottom: "fire5/expand3x3"
  348. top: "fire5/concat"
  349. }
  350. layer {
  351. name: "fire6/squeeze1x1"
  352. type: "Convolution"
  353. bottom: "fire5/concat"
  354. top: "fire6/squeeze1x1"
  355. convolution_param {
  356. num_output: 48
  357. kernel_size: 1
  358. weight_filler {
  359. type: "xavier"
  360. }
  361. }
  362. }
  363. layer {
  364. name: "fire6/relu_squeeze1x1"
  365. type: "ReLU"
  366. bottom: "fire6/squeeze1x1"
  367. top: "fire6/squeeze1x1"
  368. }
  369. layer {
  370. name: "fire6/expand1x1"
  371. type: "Convolution"
  372. bottom: "fire6/squeeze1x1"
  373. top: "fire6/expand1x1"
  374. convolution_param {
  375. num_output: 192
  376. kernel_size: 1
  377. weight_filler {
  378. type: "xavier"
  379. }
  380. }
  381. }
  382. layer {
  383. name: "fire6/relu_expand1x1"
  384. type: "ReLU"
  385. bottom: "fire6/expand1x1"
  386. top: "fire6/expand1x1"
  387. }
  388. layer {
  389. name: "fire6/expand3x3"
  390. type: "Convolution"
  391. bottom: "fire6/squeeze1x1"
  392. top: "fire6/expand3x3"
  393. convolution_param {
  394. num_output: 192
  395. pad: 1
  396. kernel_size: 3
  397. weight_filler {
  398. type: "xavier"
  399. }
  400. }
  401. }
  402. layer {
  403. name: "fire6/relu_expand3x3"
  404. type: "ReLU"
  405. bottom: "fire6/expand3x3"
  406. top: "fire6/expand3x3"
  407. }
  408. layer {
  409. name: "fire6/concat"
  410. type: "Concat"
  411. bottom: "fire6/expand1x1"
  412. bottom: "fire6/expand3x3"
  413. top: "fire6/concat"
  414. }
  415. layer {
  416. name: "fire7/squeeze1x1"
  417. type: "Convolution"
  418. bottom: "fire6/concat"
  419. top: "fire7/squeeze1x1"
  420. convolution_param {
  421. num_output: 48
  422. kernel_size: 1
  423. weight_filler {
  424. type: "xavier"
  425. }
  426. }
  427. }
  428. layer {
  429. name: "fire7/relu_squeeze1x1"
  430. type: "ReLU"
  431. bottom: "fire7/squeeze1x1"
  432. top: "fire7/squeeze1x1"
  433. }
  434. layer {
  435. name: "fire7/expand1x1"
  436. type: "Convolution"
  437. bottom: "fire7/squeeze1x1"
  438. top: "fire7/expand1x1"
  439. convolution_param {
  440. num_output: 192
  441. kernel_size: 1
  442. weight_filler {
  443. type: "xavier"
  444. }
  445. }
  446. }
  447. layer {
  448. name: "fire7/relu_expand1x1"
  449. type: "ReLU"
  450. bottom: "fire7/expand1x1"
  451. top: "fire7/expand1x1"
  452. }
  453. layer {
  454. name: "fire7/expand3x3"
  455. type: "Convolution"
  456. bottom: "fire7/squeeze1x1"
  457. top: "fire7/expand3x3"
  458. convolution_param {
  459. num_output: 192
  460. pad: 1
  461. kernel_size: 3
  462. weight_filler {
  463. type: "xavier"
  464. }
  465. }
  466. }
  467. layer {
  468. name: "fire7/relu_expand3x3"
  469. type: "ReLU"
  470. bottom: "fire7/expand3x3"
  471. top: "fire7/expand3x3"
  472. }
  473. layer {
  474. name: "fire7/concat"
  475. type: "Concat"
  476. bottom: "fire7/expand1x1"
  477. bottom: "fire7/expand3x3"
  478. top: "fire7/concat"
  479. }
  480. layer {
  481. name: "fire8/squeeze1x1"
  482. type: "Convolution"
  483. bottom: "fire7/concat"
  484. top: "fire8/squeeze1x1"
  485. convolution_param {
  486. num_output: 64
  487. kernel_size: 1
  488. weight_filler {
  489. type: "xavier"
  490. }
  491. }
  492. }
  493. layer {
  494. name: "fire8/relu_squeeze1x1"
  495. type: "ReLU"
  496. bottom: "fire8/squeeze1x1"
  497. top: "fire8/squeeze1x1"
  498. }
  499. layer {
  500. name: "fire8/expand1x1"
  501. type: "Convolution"
  502. bottom: "fire8/squeeze1x1"
  503. top: "fire8/expand1x1"
  504. convolution_param {
  505. num_output: 256
  506. kernel_size: 1
  507. weight_filler {
  508. type: "xavier"
  509. }
  510. }
  511. }
  512. layer {
  513. name: "fire8/relu_expand1x1"
  514. type: "ReLU"
  515. bottom: "fire8/expand1x1"
  516. top: "fire8/expand1x1"
  517. }
  518. layer {
  519. name: "fire8/expand3x3"
  520. type: "Convolution"
  521. bottom: "fire8/squeeze1x1"
  522. top: "fire8/expand3x3"
  523. convolution_param {
  524. num_output: 256
  525. pad: 1
  526. kernel_size: 3
  527. weight_filler {
  528. type: "xavier"
  529. }
  530. }
  531. }
  532. layer {
  533. name: "fire8/relu_expand3x3"
  534. type: "ReLU"
  535. bottom: "fire8/expand3x3"
  536. top: "fire8/expand3x3"
  537. }
  538. layer {
  539. name: "fire8/concat"
  540. type: "Concat"
  541. bottom: "fire8/expand1x1"
  542. bottom: "fire8/expand3x3"
  543. top: "fire8/concat"
  544. }
  545. layer {
  546. name: "pool8"
  547. type: "Pooling"
  548. bottom: "fire8/concat"
  549. top: "pool8"
  550. pooling_param {
  551. pool: MAX
  552. kernel_size: 3
  553. stride: 2
  554. }
  555. }
  556. layer {
  557. name: "fire9/squeeze1x1"
  558. type: "Convolution"
  559. bottom: "pool8"
  560. top: "fire9/squeeze1x1"
  561. convolution_param {
  562. num_output: 64
  563. kernel_size: 1
  564. weight_filler {
  565. type: "xavier"
  566. }
  567. }
  568. }
  569. layer {
  570. name: "fire9/relu_squeeze1x1"
  571. type: "ReLU"
  572. bottom: "fire9/squeeze1x1"
  573. top: "fire9/squeeze1x1"
  574. }
  575. layer {
  576. name: "fire9/expand1x1"
  577. type: "Convolution"
  578. bottom: "fire9/squeeze1x1"
  579. top: "fire9/expand1x1"
  580. convolution_param {
  581. num_output: 256
  582. kernel_size: 1
  583. weight_filler {
  584. type: "xavier"
  585. }
  586. }
  587. }
  588. layer {
  589. name: "fire9/relu_expand1x1"
  590. type: "ReLU"
  591. bottom: "fire9/expand1x1"
  592. top: "fire9/expand1x1"
  593. }
  594. layer {
  595. name: "fire9/expand3x3"
  596. type: "Convolution"
  597. bottom: "fire9/squeeze1x1"
  598. top: "fire9/expand3x3"
  599. convolution_param {
  600. num_output: 256
  601. pad: 1
  602. kernel_size: 3
  603. weight_filler {
  604. type: "xavier"
  605. }
  606. }
  607. }
  608. layer {
  609. name: "fire9/relu_expand3x3"
  610. type: "ReLU"
  611. bottom: "fire9/expand3x3"
  612. top: "fire9/expand3x3"
  613. }
  614. layer {
  615. name: "fire9/concat"
  616. type: "Concat"
  617. bottom: "fire9/expand1x1"
  618. bottom: "fire9/expand3x3"
  619. top: "fire9/concat"
  620. }
  621. layer {
  622. name: "drop9"
  623. type: "Dropout"
  624. bottom: "fire9/concat"
  625. top: "fire9/concat"
  626. dropout_param {
  627. dropout_ratio: 0.5
  628. }
  629. }
  630. layer {
  631. name: "conv10"
  632. type: "Convolution"
  633. bottom: "fire9/concat"
  634. top: "conv10"
  635. convolution_param {
  636. num_output: 1000
  637. pad: 1
  638. kernel_size: 1
  639. weight_filler {
  640. type: "gaussian"
  641. mean: 0.0
  642. std: 0.01
  643. }
  644. }
  645. }
  646. layer {
  647. name: "relu_conv10"
  648. type: "ReLU"
  649. bottom: "conv10"
  650. top: "conv10"
  651. }
  652. layer {
  653. name: "pool10"
  654. type: "Pooling"
  655. bottom: "conv10"
  656. top: "pool10"
  657. pooling_param {
  658. pool: AVE
  659. global_pooling: true
  660. }
  661. }
  662. layer {
  663. name: "loss"
  664. type: "SoftmaxWithLoss"
  665. bottom: "pool10"
  666. bottom: "label"
  667. top: "loss"
  668. include {
  669. phase: TRAIN
  670. }
  671. }
  672. layer {
  673. name: "accuracy"
  674. type: "Accuracy"
  675. bottom: "pool10"
  676. bottom: "label"
  677. top: "accuracy"
  678. include {
  679. phase: TEST
  680. }
  681. }
  682. layer {
  683. name: "accuracy_top5"
  684. type: "Accuracy"
  685. bottom: "pool10"
  686. bottom: "label"
  687. top: "accuracy_top5"
  688. include {
  689. phase: TEST
  690. }
  691. accuracy_param {
  692. top_k: 5
  693. }
  694. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement