Advertisement
Guest User

Untitled

a guest
Jun 18th, 2019
76
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 3.08 KB | None | 0 0
  1. from keras import models
  2. from keras import layers
  3. # Model
  4.  
  5. from keras.layers import LeakyReLU
  6. network = models.Sequential()
  7.  
  8. network.add(layers.Conv2D(32, (3,3), activation=None, input_shape=(img_rows, img_cols, 1), padding='valid', use_bias=False,))
  9. network.add(LeakyReLU(alpha=0.1))
  10.  
  11. network.add(layers.MaxPooling2D(pool_size=(2, 2)))
  12.  
  13. network.add(layers.Conv2D(32, (3,3), activation=None,padding='valid', use_bias=True))
  14.  
  15. network.add(LeakyReLU(alpha=0.1))
  16.  
  17. network.add(layers.MaxPooling2D(pool_size=(2,2)))
  18.  
  19. network.add(layers.Flatten())
  20.  
  21. network.add(layers.Dense(64, activation='relu'))
  22.  
  23. network.add(layers.Dense(3, activation='softmax'))
  24.  
  25. network.compile(optimizer='adam',
  26. loss='categorical_crossentropy',
  27. metrics=['accuracy'])
  28.  
  29. layer {
  30. name: "data"
  31. type: "Data"
  32. top: "data"
  33. top: "label"
  34. include {
  35. phase: TRAIN
  36. }
  37. data_param {
  38. source: "input/lmdb/train_lmdb"
  39. batch_size: 84
  40. backend: LMDB
  41. }
  42. }
  43. layer {
  44. name: "data"
  45. type: "Data"
  46. top: "data"
  47. top: "label"
  48. include {
  49. phase: TEST
  50. }
  51. data_param {
  52. source: "input/lmdb/valid_lmdb"
  53. batch_size: 84
  54. backend: LMDB
  55. }
  56. }
  57.  
  58.  
  59.  
  60. layer {
  61. name: "conv1"
  62. type: "Convolution"
  63. bottom: "data"
  64. top: "conv1"
  65. convolution_param {
  66. num_output: 32
  67. pad: 0
  68. kernel_size: 3
  69. stride: 1
  70. }
  71. }
  72. layer {
  73. name: "lr1"
  74. type: "ReLU"
  75. bottom: "conv1"
  76. top: "lr1"
  77. relu_param {
  78. negative_slope: 0.10000000149
  79. }
  80. }
  81. layer {
  82. name: "mp1"
  83. type: "Pooling"
  84. bottom: "lr1"
  85. top: "mp1"
  86. pooling_param {
  87. pool: MAX
  88. kernel_size: 2
  89. stride: 1
  90. pad: 0
  91. }
  92. }
  93. layer {
  94. name: "conv2"
  95. type: "Convolution"
  96. bottom: "mp1"
  97. top: "conv2"
  98. convolution_param {
  99. num_output: 32
  100. pad: 0
  101. kernel_size: 3
  102. stride: 1
  103. }
  104. }
  105. layer {
  106. name: "lr2"
  107. type: "ReLU"
  108. bottom: "conv2"
  109. top: "lr2"
  110. relu_param {
  111. negative_slope: 0.10000000149
  112. }
  113. }
  114. layer {
  115. name: "mp2"
  116. type: "Pooling"
  117. bottom: "lr2"
  118. top: "mp2"
  119. pooling_param {
  120. pool: MAX
  121. kernel_size: 2
  122. stride: 1
  123. pad: 0
  124. }
  125. }
  126. layer {
  127. name: "fl1"
  128. type: "Flatten"
  129. bottom: "mp2"
  130. top: "fl1"
  131. }
  132.  
  133.  
  134. layer {
  135. name: "fc1"
  136. type: "InnerProduct"
  137. bottom: "fl1"
  138. top: "fc1"
  139.  
  140. inner_product_param {
  141. num_output: 64
  142. weight_filler {
  143. type: "gaussian"
  144. #type: "xavier"
  145. std: 0.001
  146. }
  147. bias_filler {
  148. type: "constant"
  149. value: 1
  150. }
  151. }
  152. }
  153.  
  154. layer {
  155. name: "relu1"
  156. type: "ReLU"
  157. bottom: "fc1"
  158. top: "relu1"
  159.  
  160. }
  161.  
  162. layer {
  163. name: "fc2"
  164. type: "InnerProduct"
  165. bottom: "relu1"
  166. top: "fc2"
  167.  
  168. inner_product_param {
  169. num_output: 3
  170. weight_filler {
  171. type: "gaussian"
  172. #type: "xavier"
  173. std: 0.001
  174. }
  175. bias_filler {
  176. type: "constant"
  177. value: 1
  178. }
  179. }
  180. }
  181. layer {
  182. name: "softmax"
  183. type: "Softmax"
  184. bottom: "fc2"
  185. top: "softmax"
  186. }
  187. layer {
  188. name: "accuracy"
  189. type: "Accuracy"
  190. bottom: "softmax"
  191. bottom: "label"
  192. top: "accuracy"
  193. include {
  194. phase: TEST
  195. }
  196. }
  197. layer {
  198. name: "loss"
  199. type: "MultinomialLogisticLoss"
  200. bottom: "softmax"
  201. bottom: "label"
  202. top: "loss"
  203. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement