Guest User

Untitled

a guest
Jun 19th, 2018
78
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 11.18 KB | None | 0 0
  1. # please cite:
  2. # @article{SqueezeNet,
  3. # Author = {Forrest N. Iandola and Matthew W. Moskewicz and Khalid Ashraf and Song Han and William J. Dally and Kurt Keutzer},
  4. # Title = {SqueezeNet: AlexNet-level accuracy with 50x fewer parameters and $<$1MB model size},
  5. # Journal = {arXiv:1602.07360},
  6. # Year = {2016}
  7. # }
  8. name: "Squeezenet"
  9. layer {
  10. name: "data"
  11. type: "Data"
  12. top: "data"
  13. top: "label"
  14. include {
  15. phase: TRAIN
  16. }
  17. transform_param {
  18. mean_file: "mean.binaryproto"
  19. }
  20. data_param {
  21. source: "train_lmdb"
  22. batch_size: 32
  23. backend: LMDB
  24. }
  25. }
  26. layer {
  27. name: "squeezenet"
  28. type: "MemoryData"
  29. top: "data"
  30. top: "label"
  31. include {
  32. phase: TEST
  33. }
  34. memory_data_param {
  35. batch_size: 25
  36. channels: 3
  37. height: 227
  38. width: 227
  39. }
  40. }
  41. layer {
  42. name: "conv1"
  43. type: "Convolution"
  44. bottom: "data"
  45. top: "conv1"
  46. convolution_param {
  47. num_output: 64
  48. kernel_size: 3
  49. stride: 2
  50. weight_filler {
  51. type: "xavier"
  52. }
  53. }
  54. }
  55. layer {
  56. name: "relu_conv1"
  57. type: "ReLU"
  58. bottom: "conv1"
  59. top: "conv1"
  60. }
  61. layer {
  62. name: "pool1"
  63. type: "Pooling"
  64. bottom: "conv1"
  65. top: "pool1"
  66. pooling_param {
  67. pool: MAX
  68. kernel_size: 3
  69. stride: 2
  70. }
  71. }
  72. layer {
  73. name: "fire2/squeeze1x1"
  74. type: "Convolution"
  75. bottom: "pool1"
  76. top: "fire2/squeeze1x1"
  77. convolution_param {
  78. num_output: 16
  79. kernel_size: 1
  80. weight_filler {
  81. type: "xavier"
  82. }
  83. }
  84. }
  85. layer {
  86. name: "fire2/relu_squeeze1x1"
  87. type: "ReLU"
  88. bottom: "fire2/squeeze1x1"
  89. top: "fire2/squeeze1x1"
  90. }
  91. layer {
  92. name: "fire2/expand1x1"
  93. type: "Convolution"
  94. bottom: "fire2/squeeze1x1"
  95. top: "fire2/expand1x1"
  96. convolution_param {
  97. num_output: 64
  98. kernel_size: 1
  99. weight_filler {
  100. type: "xavier"
  101. }
  102. }
  103. }
  104. layer {
  105. name: "fire2/relu_expand1x1"
  106. type: "ReLU"
  107. bottom: "fire2/expand1x1"
  108. top: "fire2/expand1x1"
  109. }
  110. layer {
  111. name: "fire2/expand3x3"
  112. type: "Convolution"
  113. bottom: "fire2/squeeze1x1"
  114. top: "fire2/expand3x3"
  115. convolution_param {
  116. num_output: 64
  117. pad: 1
  118. kernel_size: 3
  119. weight_filler {
  120. type: "xavier"
  121. }
  122. }
  123. }
  124. layer {
  125. name: "fire2/relu_expand3x3"
  126. type: "ReLU"
  127. bottom: "fire2/expand3x3"
  128. top: "fire2/expand3x3"
  129. }
  130. layer {
  131. name: "fire2/concat"
  132. type: "Concat"
  133. bottom: "fire2/expand1x1"
  134. bottom: "fire2/expand3x3"
  135. top: "fire2/concat"
  136. }
  137. layer {
  138. name: "fire3/squeeze1x1"
  139. type: "Convolution"
  140. bottom: "fire2/concat"
  141. top: "fire3/squeeze1x1"
  142. convolution_param {
  143. num_output: 16
  144. kernel_size: 1
  145. weight_filler {
  146. type: "xavier"
  147. }
  148. }
  149. }
  150. layer {
  151. name: "fire3/relu_squeeze1x1"
  152. type: "ReLU"
  153. bottom: "fire3/squeeze1x1"
  154. top: "fire3/squeeze1x1"
  155. }
  156. layer {
  157. name: "fire3/expand1x1"
  158. type: "Convolution"
  159. bottom: "fire3/squeeze1x1"
  160. top: "fire3/expand1x1"
  161. convolution_param {
  162. num_output: 64
  163. kernel_size: 1
  164. weight_filler {
  165. type: "xavier"
  166. }
  167. }
  168. }
  169. layer {
  170. name: "fire3/relu_expand1x1"
  171. type: "ReLU"
  172. bottom: "fire3/expand1x1"
  173. top: "fire3/expand1x1"
  174. }
  175. layer {
  176. name: "fire3/expand3x3"
  177. type: "Convolution"
  178. bottom: "fire3/squeeze1x1"
  179. top: "fire3/expand3x3"
  180. convolution_param {
  181. num_output: 64
  182. pad: 1
  183. kernel_size: 3
  184. weight_filler {
  185. type: "xavier"
  186. }
  187. }
  188. }
  189. layer {
  190. name: "fire3/relu_expand3x3"
  191. type: "ReLU"
  192. bottom: "fire3/expand3x3"
  193. top: "fire3/expand3x3"
  194. }
  195. layer {
  196. name: "fire3/concat"
  197. type: "Concat"
  198. bottom: "fire3/expand1x1"
  199. bottom: "fire3/expand3x3"
  200. top: "fire3/concat"
  201. }
  202. layer {
  203. name: "pool3"
  204. type: "Pooling"
  205. bottom: "fire3/concat"
  206. top: "pool3"
  207. pooling_param {
  208. pool: MAX
  209. kernel_size: 3
  210. stride: 2
  211. }
  212. }
  213. layer {
  214. name: "fire4/squeeze1x1"
  215. type: "Convolution"
  216. bottom: "pool3"
  217. top: "fire4/squeeze1x1"
  218. convolution_param {
  219. num_output: 32
  220. kernel_size: 1
  221. weight_filler {
  222. type: "xavier"
  223. }
  224. }
  225. }
  226. layer {
  227. name: "fire4/relu_squeeze1x1"
  228. type: "ReLU"
  229. bottom: "fire4/squeeze1x1"
  230. top: "fire4/squeeze1x1"
  231. }
  232. layer {
  233. name: "fire4/expand1x1"
  234. type: "Convolution"
  235. bottom: "fire4/squeeze1x1"
  236. top: "fire4/expand1x1"
  237. convolution_param {
  238. num_output: 128
  239. kernel_size: 1
  240. weight_filler {
  241. type: "xavier"
  242. }
  243. }
  244. }
  245. layer {
  246. name: "fire4/relu_expand1x1"
  247. type: "ReLU"
  248. bottom: "fire4/expand1x1"
  249. top: "fire4/expand1x1"
  250. }
  251. layer {
  252. name: "fire4/expand3x3"
  253. type: "Convolution"
  254. bottom: "fire4/squeeze1x1"
  255. top: "fire4/expand3x3"
  256. convolution_param {
  257. num_output: 128
  258. pad: 1
  259. kernel_size: 3
  260. weight_filler {
  261. type: "xavier"
  262. }
  263. }
  264. }
  265. layer {
  266. name: "fire4/relu_expand3x3"
  267. type: "ReLU"
  268. bottom: "fire4/expand3x3"
  269. top: "fire4/expand3x3"
  270. }
  271. layer {
  272. name: "fire4/concat"
  273. type: "Concat"
  274. bottom: "fire4/expand1x1"
  275. bottom: "fire4/expand3x3"
  276. top: "fire4/concat"
  277. }
  278. layer {
  279. name: "fire5/squeeze1x1"
  280. type: "Convolution"
  281. bottom: "fire4/concat"
  282. top: "fire5/squeeze1x1"
  283. convolution_param {
  284. num_output: 32
  285. kernel_size: 1
  286. weight_filler {
  287. type: "xavier"
  288. }
  289. }
  290. }
  291. layer {
  292. name: "fire5/relu_squeeze1x1"
  293. type: "ReLU"
  294. bottom: "fire5/squeeze1x1"
  295. top: "fire5/squeeze1x1"
  296. }
  297. layer {
  298. name: "fire5/expand1x1"
  299. type: "Convolution"
  300. bottom: "fire5/squeeze1x1"
  301. top: "fire5/expand1x1"
  302. convolution_param {
  303. num_output: 128
  304. kernel_size: 1
  305. weight_filler {
  306. type: "xavier"
  307. }
  308. }
  309. }
  310. layer {
  311. name: "fire5/relu_expand1x1"
  312. type: "ReLU"
  313. bottom: "fire5/expand1x1"
  314. top: "fire5/expand1x1"
  315. }
  316. layer {
  317. name: "fire5/expand3x3"
  318. type: "Convolution"
  319. bottom: "fire5/squeeze1x1"
  320. top: "fire5/expand3x3"
  321. convolution_param {
  322. num_output: 128
  323. pad: 1
  324. kernel_size: 3
  325. weight_filler {
  326. type: "xavier"
  327. }
  328. }
  329. }
  330. layer {
  331. name: "fire5/relu_expand3x3"
  332. type: "ReLU"
  333. bottom: "fire5/expand3x3"
  334. top: "fire5/expand3x3"
  335. }
  336. layer {
  337. name: "fire5/concat"
  338. type: "Concat"
  339. bottom: "fire5/expand1x1"
  340. bottom: "fire5/expand3x3"
  341. top: "fire5/concat"
  342. }
  343. layer {
  344. name: "pool5"
  345. type: "Pooling"
  346. bottom: "fire5/concat"
  347. top: "pool5"
  348. pooling_param {
  349. pool: MAX
  350. kernel_size: 3
  351. stride: 2
  352. }
  353. }
  354. layer {
  355. name: "fire6/squeeze1x1"
  356. type: "Convolution"
  357. bottom: "pool5"
  358. top: "fire6/squeeze1x1"
  359. convolution_param {
  360. num_output: 48
  361. kernel_size: 1
  362. weight_filler {
  363. type: "xavier"
  364. }
  365. }
  366. }
  367. layer {
  368. name: "fire6/relu_squeeze1x1"
  369. type: "ReLU"
  370. bottom: "fire6/squeeze1x1"
  371. top: "fire6/squeeze1x1"
  372. }
  373. layer {
  374. name: "fire6/expand1x1"
  375. type: "Convolution"
  376. bottom: "fire6/squeeze1x1"
  377. top: "fire6/expand1x1"
  378. convolution_param {
  379. num_output: 192
  380. kernel_size: 1
  381. weight_filler {
  382. type: "xavier"
  383. }
  384. }
  385. }
  386. layer {
  387. name: "fire6/relu_expand1x1"
  388. type: "ReLU"
  389. bottom: "fire6/expand1x1"
  390. top: "fire6/expand1x1"
  391. }
  392. layer {
  393. name: "fire6/expand3x3"
  394. type: "Convolution"
  395. bottom: "fire6/squeeze1x1"
  396. top: "fire6/expand3x3"
  397. convolution_param {
  398. num_output: 192
  399. pad: 1
  400. kernel_size: 3
  401. weight_filler {
  402. type: "xavier"
  403. }
  404. }
  405. }
  406. layer {
  407. name: "fire6/relu_expand3x3"
  408. type: "ReLU"
  409. bottom: "fire6/expand3x3"
  410. top: "fire6/expand3x3"
  411. }
  412. layer {
  413. name: "fire6/concat"
  414. type: "Concat"
  415. bottom: "fire6/expand1x1"
  416. bottom: "fire6/expand3x3"
  417. top: "fire6/concat"
  418. }
  419. layer {
  420. name: "fire7/squeeze1x1"
  421. type: "Convolution"
  422. bottom: "fire6/concat"
  423. top: "fire7/squeeze1x1"
  424. convolution_param {
  425. num_output: 48
  426. kernel_size: 1
  427. weight_filler {
  428. type: "xavier"
  429. }
  430. }
  431. }
  432. layer {
  433. name: "fire7/relu_squeeze1x1"
  434. type: "ReLU"
  435. bottom: "fire7/squeeze1x1"
  436. top: "fire7/squeeze1x1"
  437. }
  438. layer {
  439. name: "fire7/expand1x1"
  440. type: "Convolution"
  441. bottom: "fire7/squeeze1x1"
  442. top: "fire7/expand1x1"
  443. convolution_param {
  444. num_output: 192
  445. kernel_size: 1
  446. weight_filler {
  447. type: "xavier"
  448. }
  449. }
  450. }
  451. layer {
  452. name: "fire7/relu_expand1x1"
  453. type: "ReLU"
  454. bottom: "fire7/expand1x1"
  455. top: "fire7/expand1x1"
  456. }
  457. layer {
  458. name: "fire7/expand3x3"
  459. type: "Convolution"
  460. bottom: "fire7/squeeze1x1"
  461. top: "fire7/expand3x3"
  462. convolution_param {
  463. num_output: 192
  464. pad: 1
  465. kernel_size: 3
  466. weight_filler {
  467. type: "xavier"
  468. }
  469. }
  470. }
  471. layer {
  472. name: "fire7/relu_expand3x3"
  473. type: "ReLU"
  474. bottom: "fire7/expand3x3"
  475. top: "fire7/expand3x3"
  476. }
  477. layer {
  478. name: "fire7/concat"
  479. type: "Concat"
  480. bottom: "fire7/expand1x1"
  481. bottom: "fire7/expand3x3"
  482. top: "fire7/concat"
  483. }
  484. layer {
  485. name: "fire8/squeeze1x1"
  486. type: "Convolution"
  487. bottom: "fire7/concat"
  488. top: "fire8/squeeze1x1"
  489. convolution_param {
  490. num_output: 64
  491. kernel_size: 1
  492. weight_filler {
  493. type: "xavier"
  494. }
  495. }
  496. }
  497. layer {
  498. name: "fire8/relu_squeeze1x1"
  499. type: "ReLU"
  500. bottom: "fire8/squeeze1x1"
  501. top: "fire8/squeeze1x1"
  502. }
  503. layer {
  504. name: "fire8/expand1x1"
  505. type: "Convolution"
  506. bottom: "fire8/squeeze1x1"
  507. top: "fire8/expand1x1"
  508. convolution_param {
  509. num_output: 256
  510. kernel_size: 1
  511. weight_filler {
  512. type: "xavier"
  513. }
  514. }
  515. }
  516. layer {
  517. name: "fire8/relu_expand1x1"
  518. type: "ReLU"
  519. bottom: "fire8/expand1x1"
  520. top: "fire8/expand1x1"
  521. }
  522. layer {
  523. name: "fire8/expand3x3"
  524. type: "Convolution"
  525. bottom: "fire8/squeeze1x1"
  526. top: "fire8/expand3x3"
  527. convolution_param {
  528. num_output: 256
  529. pad: 1
  530. kernel_size: 3
  531. weight_filler {
  532. type: "xavier"
  533. }
  534. }
  535. }
  536. layer {
  537. name: "fire8/relu_expand3x3"
  538. type: "ReLU"
  539. bottom: "fire8/expand3x3"
  540. top: "fire8/expand3x3"
  541. }
  542. layer {
  543. name: "fire8/concat"
  544. type: "Concat"
  545. bottom: "fire8/expand1x1"
  546. bottom: "fire8/expand3x3"
  547. top: "fire8/concat"
  548. }
  549. layer {
  550. name: "fire9/squeeze1x1"
  551. type: "Convolution"
  552. bottom: "fire8/concat"
  553. top: "fire9/squeeze1x1"
  554. convolution_param {
  555. num_output: 64
  556. kernel_size: 1
  557. weight_filler {
  558. type: "xavier"
  559. }
  560. }
  561. }
  562. layer {
  563. name: "fire9/relu_squeeze1x1"
  564. type: "ReLU"
  565. bottom: "fire9/squeeze1x1"
  566. top: "fire9/squeeze1x1"
  567. }
  568. layer {
  569. name: "fire9/expand1x1"
  570. type: "Convolution"
  571. bottom: "fire9/squeeze1x1"
  572. top: "fire9/expand1x1"
  573. convolution_param {
  574. num_output: 256
  575. kernel_size: 1
  576. weight_filler {
  577. type: "xavier"
  578. }
  579. }
  580. }
  581. layer {
  582. name: "fire9/relu_expand1x1"
  583. type: "ReLU"
  584. bottom: "fire9/expand1x1"
  585. top: "fire9/expand1x1"
  586. }
  587. layer {
  588. name: "fire9/expand3x3"
  589. type: "Convolution"
  590. bottom: "fire9/squeeze1x1"
  591. top: "fire9/expand3x3"
  592. convolution_param {
  593. num_output: 256
  594. pad: 1
  595. kernel_size: 3
  596. weight_filler {
  597. type: "xavier"
  598. }
  599. }
  600. }
  601. layer {
  602. name: "fire9/relu_expand3x3"
  603. type: "ReLU"
  604. bottom: "fire9/expand3x3"
  605. top: "fire9/expand3x3"
  606. }
  607. layer {
  608. name: "fire9/concat"
  609. type: "Concat"
  610. bottom: "fire9/expand1x1"
  611. bottom: "fire9/expand3x3"
  612. top: "fire9/concat"
  613. }
  614. layer {
  615. name: "drop9"
  616. type: "Dropout"
  617. bottom: "fire9/concat"
  618. top: "fire9/concat"
  619. dropout_param {
  620. dropout_ratio: 0.5
  621. }
  622. }
  623. layer {
  624. name: "conv10"
  625. type: "Convolution"
  626. bottom: "fire9/concat"
  627. top: "conv10"
  628. convolution_param {
  629. num_output: 1000
  630. kernel_size: 1
  631. weight_filler {
  632. type: "gaussian"
  633. mean: 0.0
  634. std: 0.01
  635. }
  636. }
  637. }
  638. layer {
  639. name: "relu_conv10"
  640. type: "ReLU"
  641. bottom: "conv10"
  642. top: "conv10"
  643. }
  644. layer {
  645. name: "pool10"
  646. type: "Pooling"
  647. bottom: "conv10"
  648. top: "pool10"
  649. pooling_param {
  650. pool: AVE
  651. global_pooling: true
  652. }
  653. }
  654. layer {
  655. name: "loss"
  656. type: "SoftmaxWithLoss"
  657. bottom: "pool10"
  658. bottom: "label"
  659. top: "loss"
  660. include {
  661. phase: TRAIN
  662. }
  663. }
  664. layer {
  665. name: "losst"
  666. type: "Softmax"
  667. bottom: "pool10"
  668. top: "losst"
  669. include {
  670. phase: TEST
  671. }
  672. }
Add Comment
Please, Sign In to add comment