Guest User

Untitled

a guest
Mar 19th, 2018
95
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.15 KB | None | 0 0
  1. layer{
  2. name: "relu1"
  3. type: "ReLU"
  4. bottom: "pool1"
  5. top: "relu1"
  6. }
  7.  
  8. I0319 09:41:09.484148 6909 solver.cpp:44] Initializing solver from parameters:
  9. test_iter: 10
  10. test_interval: 1000
  11. base_lr: 0.001
  12. display: 20
  13. max_iter: 800
  14. lr_policy: "step"
  15. gamma: 0.1
  16. momentum: 0.9
  17. weight_decay: 0.04
  18. stepsize: 200
  19. snapshot: 10000
  20. snapshot_prefix: "models/train"
  21. solver_mode: GPU
  22. net: "train_val.prototxt"
  23. I0319 09:41:09.484392 6909 solver.cpp:87] Creating training net from net file: train_val.prototxt
  24. I0319 09:41:09.485164 6909 net.cpp:294] The NetState phase (0) differed from the phase (1) specified by a rule in layer feed2
  25. I0319 09:41:09.485183 6909 net.cpp:51] Initializing net from parameters:
  26. name: "CaffeNet"
  27. state {
  28. phase: TRAIN
  29. }
  30. layer {
  31. name: "feed"
  32. type: "HDF5Data"
  33. top: "data"
  34. top: "label"
  35. include {
  36. phase: TRAIN
  37. }
  38. hdf5_data_param {
  39. source: "train_h5_list.txt"
  40. batch_size: 50
  41. }
  42. }
  43. layer {
  44. name: "conv1"
  45. type: "Convolution"
  46. bottom: "data"
  47. top: "conv1"
  48. param {
  49. lr_mult: 1
  50. }
  51. param {
  52. lr_mult: 2
  53. }
  54. convolution_param {
  55. num_output: 1
  56. kernel_size: 3
  57. stride: 1
  58. weight_filler {
  59. type: "gaussian"
  60. }
  61. bias_filler {
  62. type: "constant"
  63. }
  64. }
  65. }
  66. layer {
  67. name: "pool1"
  68. type: "Pooling"
  69. bottom: "conv1"
  70. top: "pool1"
  71. pooling_param {
  72. pool: MAX
  73. kernel_size: 2
  74. stride: 1
  75. }
  76. }
  77. layer {
  78. name: "relu1"
  79. type: "ReLU"
  80. bottom: "pool1"
  81. top: "relu1"
  82. }
  83. layer {
  84. name: "conv2"
  85. type: "Convolution"
  86. bottom: "relu1"
  87. top: "conv2"
  88. param {
  89. lr_mult: 1
  90. }
  91. param {
  92. lr_mult: 2
  93. }
  94. convolution_param {
  95. num_output: 1
  96. kernel_size: 3
  97. stride: 1
  98. weight_filler {
  99. type: "gaussian"
  100. }
  101. bias_filler {
  102. type: "constant"
  103. }
  104. }
  105. }
  106. layer {
  107. name: "ip2"
  108. type: "InnerProduct"
  109. bottom: "conv2"
  110. top: "ip2"
  111. param {
  112. lr_mult: 1
  113. decay_mult: 1
  114. }
  115. inner_product_param {
  116. num_output: 1
  117. weight_filler {
  118. type: "gaussian"
  119. std: 0.01
  120. }
  121. bias_filler {
  122. type: "constant"
  123. value: 0
  124. }
  125. }
  126. }
  127. layer {
  128. name: "sig1"
  129. type: "Sigmoid"
  130. bottom: "ip2"
  131. top: "sig1"
  132. }
  133. layer {
  134. name: "loss"
  135. type: "EuclideanLoss"
  136. bottom: "sig1"
  137. bottom: "label"
  138. top: "loss"
  139. }
  140. I0319 09:41:09.485752 6909 layer_factory.hpp:77] Creating layer feed
  141. I0319 09:41:09.485780 6909 net.cpp:84] Creating Layer feed
  142. I0319 09:41:09.485792 6909 net.cpp:380] feed -> data
  143. I0319 09:41:09.485819 6909 net.cpp:380] feed -> label
  144. I0319 09:41:09.485836 6909 hdf5_data_layer.cpp:80] Loading list of HDF5 filenames from: train_h5_list.txt
  145. I0319 09:41:09.485860 6909 hdf5_data_layer.cpp:94] Number of HDF5 files: 1
  146. I0319 09:41:09.486469 6909 hdf5.cpp:32] Datatype class: H5T_FLOAT
  147. I0319 09:41:09.500986 6909 net.cpp:122] Setting up feed
  148. I0319 09:41:09.501011 6909 net.cpp:129] Top shape: 50 227 227 3 (7729350)
  149. I0319 09:41:09.501027 6909 net.cpp:129] Top shape: 50 1 (50)
  150. I0319 09:41:09.501039 6909 net.cpp:137] Memory required for data: 30917600
  151. I0319 09:41:09.501051 6909 layer_factory.hpp:77] Creating layer conv1
  152. I0319 09:41:09.501080 6909 net.cpp:84] Creating Layer conv1
  153. I0319 09:41:09.501087 6909 net.cpp:406] conv1 <- data
  154. I0319 09:41:09.501101 6909 net.cpp:380] conv1 -> conv1
  155. I0319 09:41:09.880740 6909 net.cpp:122] Setting up conv1
  156. I0319 09:41:09.880765 6909 net.cpp:129] Top shape: 50 1 225 1 (11250)
  157. I0319 09:41:09.880781 6909 net.cpp:137] Memory required for data: 30962600
  158. I0319 09:41:09.880808 6909 layer_factory.hpp:77] Creating layer pool1
  159. I0319 09:41:09.880836 6909 net.cpp:84] Creating Layer pool1
  160. I0319 09:41:09.880846 6909 net.cpp:406] pool1 <- conv1
  161. I0319 09:41:09.880861 6909 net.cpp:380] pool1 -> pool1
  162. I0319 09:41:09.880888 6909 net.cpp:122] Setting up pool1
  163. I0319 09:41:09.880899 6909 net.cpp:129] Top shape: 50 1 224 0 (0)
  164. I0319 09:41:09.880913 6909 net.cpp:137] Memory required for data: 30962600
  165. I0319 09:41:09.880921 6909 layer_factory.hpp:77] Creating layer relu1
  166. I0319 09:41:09.880934 6909 net.cpp:84] Creating Layer relu1
  167. I0319 09:41:09.880941 6909 net.cpp:406] relu1 <- pool1
  168. I0319 09:41:09.880952 6909 net.cpp:380] relu1 -> relu1
  169. F0319 09:41:09.881192 6909 cudnn.hpp:80] Check failed: status == CUDNN_STATUS_SUCCESS (3 vs. 0) CUDNN_STATUS_BAD_PARAM
Add Comment
Please, Sign In to add comment