Guest User

Untitled

a guest
Apr 20th, 2018
79
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.82 KB | None | 0 0
  1. name: "LeNetSimplifie2"
  2. layer {
  3. name: "data"
  4. type: "MemoryData"
  5. top: "data"
  6. top: "label"
  7. include {
  8. phase: TRAIN
  9. }
  10. transform_param {
  11. scale: 0.00390625
  12. }
  13. memory_data_param
  14. {
  15. batch_size: 50
  16. channels: 1
  17. height: 28
  18. width: 28
  19. }
  20. }
  21. layer {
  22. name: "test_inputdata"
  23. type: "MemoryData"
  24. top: "data"
  25. top: "label"
  26. include {
  27. phase: TEST
  28. }
  29. transform_param {
  30. scale: 0.00390625
  31. }
  32. memory_data_param
  33. {
  34. batch_size: 50
  35. channels: 1
  36. height: 28
  37. width: 28
  38. }
  39. }
  40. layer {
  41. name: "conv1a"
  42. type: "Convolution"
  43. bottom: "data"
  44. top: "conv1a"
  45. param {
  46. lr_mult: 1
  47. }
  48. param {
  49. lr_mult: 2
  50. }
  51. convolution_param {
  52. num_output: 6
  53. kernel_size: 5
  54. stride: 1
  55. weight_filler {
  56. type: "xavier"
  57. }
  58. bias_filler {
  59. type: "constant"
  60. }
  61. }
  62. }
  63. layer {
  64. name: "pool1"
  65. type: "Pooling"
  66. bottom: "conv1a"
  67. top: "pool1"
  68. pooling_param {
  69. pool: MAX
  70. kernel_size: 2
  71. stride: 2
  72. }
  73. }
  74. layer {
  75. name: "ip1a"
  76. type: "InnerProduct"
  77. bottom: "pool1"
  78. top: "ip1a"
  79. param {
  80. lr_mult: 1
  81. }
  82. param {
  83. lr_mult: 2
  84. }
  85. inner_product_param {
  86. num_output: 120
  87. weight_filler {
  88. type: "xavier"
  89. }
  90. bias_filler {
  91. type: "constant"
  92. }
  93. }
  94. }
  95. layer {
  96. name: "sigmoid"
  97. type: "Sigmoid"
  98. bottom: "ip1a"
  99. top: "sigmoid"
  100. }
  101. layer {
  102. name: "ip2"
  103. type: "InnerProduct"
  104. bottom: "sigmoid"
  105. top: "ip2"
  106. param {
  107. lr_mult: 1
  108. }
  109. param {
  110. lr_mult: 2
  111. }
  112. inner_product_param {
  113. num_output: 10
  114. weight_filler {
  115. type: "xavier"
  116. }
  117. bias_filler {
  118. type: "constant"
  119. }
  120. }
  121. }
  122.  
  123. layer {
  124. name: "accuracy"
  125. type: "Accuracy"
  126. bottom: "ip2"
  127. bottom: "label"
  128. top: "accuracy"
  129. include {
  130. phase: TEST
  131. }
  132. }
  133. layer {
  134. name: "loss"
  135. type: "SoftmaxWithLoss"
  136. bottom: "ip2"
  137. bottom: "label"
  138. top: "loss"
  139. }
Add Comment
Please, Sign In to add comment