Guest User

Untitled

a guest
Dec 12th, 2018
115
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.22 KB | None | 0 0
  1. name: "simple_net"
  2. layer {
  3. name: "data"
  4. type: "Input"
  5. top: "data"
  6. include {
  7. phase: TRAIN
  8. }
  9. input_param {
  10. shape {
  11. dim: 100
  12. dim: 1
  13. dim: 28
  14. dim: 28
  15. }
  16. }
  17. }
  18. layer {
  19. name: "data"
  20. type: "Input"
  21. top: "data"
  22. include {
  23. phase: TEST
  24. }
  25. input_param {
  26. shape {
  27. dim: 100
  28. dim: 1
  29. dim: 28
  30. dim: 28
  31. }
  32. }
  33. }
  34. layer {
  35. name: "label"
  36. type: "Input"
  37. top: "label"
  38. include {
  39. phase: TRAIN
  40. }
  41. input_param {
  42. shape {
  43. dim: 100
  44. }
  45. }
  46. }
  47. layer {
  48. name: "label"
  49. type: "Input"
  50. top: "label"
  51. include {
  52. phase: TEST
  53. }
  54. input_param {
  55. shape {
  56. dim: 100
  57. }
  58. }
  59. }
  60. layer {
  61. name: "conv1"
  62. type: "Convolution"
  63. bottom: "data"
  64. top: "conv1"
  65. param {
  66. lr_mult: 1.0
  67. }
  68. param {
  69. lr_mult: 2.0
  70. }
  71. convolution_param {
  72. num_output: 32
  73. kernel_size: 5
  74. weight_filler {
  75. type: "xavier"
  76. }
  77. bias_filler {
  78. type: "constant"
  79. }
  80. }
  81. }
  82. layer {
  83. name: "relu1"
  84. type: "ReLU"
  85. bottom: "conv1"
  86. top: "conv1"
  87. }
  88. layer {
  89. name: "conv2"
  90. type: "Convolution"
  91. bottom: "conv1"
  92. top: "conv2"
  93. param {
  94. lr_mult: 1.0
  95. }
  96. param {
  97. lr_mult: 2.0
  98. }
  99. convolution_param {
  100. num_output: 32
  101. kernel_size: 5
  102. weight_filler {
  103. type: "xavier"
  104. }
  105. bias_filler {
  106. type: "constant"
  107. }
  108. }
  109. }
  110. layer {
  111. name: "relu2"
  112. type: "ReLU"
  113. bottom: "conv2"
  114. top: "conv2"
  115. }
  116. layer {
  117. name: "fc1"
  118. type: "InnerProduct"
  119. bottom: "conv2"
  120. top: "fc1"
  121. param {
  122. lr_mult: 1.0
  123. }
  124. param {
  125. lr_mult: 2.0
  126. }
  127. inner_product_param {
  128. num_output: 500
  129. weight_filler {
  130. type: "xavier"
  131. }
  132. bias_filler {
  133. type: "constant"
  134. }
  135. }
  136. }
  137. layer {
  138. name: "relu3"
  139. type: "ReLU"
  140. bottom: "fc1"
  141. top: "fc1"
  142. }
  143. layer {
  144. name: "out"
  145. type: "InnerProduct"
  146. bottom: "fc1"
  147. top: "out"
  148. param {
  149. lr_mult: 1.0
  150. }
  151. param {
  152. lr_mult: 2.0
  153. }
  154. inner_product_param {
  155. num_output: 10
  156. weight_filler {
  157. type: "xavier"
  158. }
  159. bias_filler {
  160. type: "constant"
  161. }
  162. }
  163. }
  164. layer {
  165. name: "loss"
  166. type: "SoftmaxWithLoss"
  167. bottom: "out"
  168. bottom: "label"
  169. top: "loss"
  170. }
  171. layer {
  172. name: "accuracy"
  173. type: "Accuracy"
  174. bottom: "out"
  175. bottom: "label"
  176. top: "accuracy"
  177. include {
  178. phase: TEST
  179. }
  180. }
Add Comment
Please, Sign In to add comment