Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 1
- }
- }
- }
- layer {
- name: "relu5"
- type: "ReLU"
- bottom: "conv5"
- top: "conv5"
- }
- layer {
- name: "pool5"
- type: "Pooling"
- bottom: "conv5"
- top: "pool5"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
- }
- layer {
- name: "fc6"
- type: "InnerProduct"
- bottom: "pool5"
- top: "fc6"
- param {
- lr_mult: 0
- decay_mult: 1
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- inner_product_param {
- num_output: 4096
- weight_filler {
- type: "xavier"
- std: 0.005
- }
- bias_filler {
- type: "constant"
- value: 1
- }
- }
- }
- layer {
- name: "relu6"
- type: "ReLU"
- bottom: "fc6"
- top: "fc6"
- }
- layer {
- name: "drop6"
- type: "Dropout"
- bottom: "fc6"
- top: "fc6"
- dropout_param {
- dropout_ratio: 0.5
- }
- }
- layer {
- name: "mfc7"
- type: "InnerProduct"
- bottom: "fc6"
- top: "mfc7"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 0
- }
- inner_product_param {
- num_output: 4096
- weight_filler {
- type: "xavier"
- std: 0.005
- }
- bias_filler {
- type: "constant"
- value: 1
- }
- }
- }
- layer {
- name: "relu7"
- type: "ReLU"
- bottom: "mfc7"
- top: "mfc7"
- }
- layer {
- name: "drop7"
- type: "Dropout"
- bottom: "mfc7"
- top: "mfc7"
- dropout_param {
- dropout_ratio: 0.5
- }
- }
- layer {
- name: "mfc8"
- type: "InnerProduct"
- bottom: "mfc7"
- top: "mfc8"
- param {
- lr_mult: 1
- decay_mult: 1
- }
- param {
- lr_mult: 2
- decay_mult: 0
- }
- inner_product_param {
- num_output: 7
- weight_filler {
- type: "xavier"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
- }
- layer {
- name: "accuracy"
- type: "Accuracy"
- bottom: "mfc8"
- bottom: "label"
- top: "accuracy"
- include {
- phase: TEST
- }
- }
- layer {
- name: "loss"
- type: "SoftmaxWithLoss"
- bottom: "mfc8"
- bottom: "label"
- top: "loss"
- }
- I0428 17:16:11.567231 7943 layer_factory.hpp:77] Creating layer data
- I0428 17:16:11.567540 7943 net.cpp:100] Creating Layer data
- I0428 17:16:11.567626 7943 net.cpp:408] data -> data
- I0428 17:16:11.567782 7943 net.cpp:408] data -> label
- I0428 17:16:11.567858 7943 data_transformer.cpp:25] Loading mean file from: /home/amir/Develop/caffe/data/face/imagenet_mean.binaryproto
- I0428 17:16:11.670225 7949 db_lmdb.cpp:35] Opened lmdb /home/amir/Develop/caffe/data/face/ilsvrc12_val_lmdb
- I0428 17:16:11.739864 7943 data_layer.cpp:41] output data size: 10,3,227,227
- I0428 17:16:11.781368 7943 net.cpp:150] Setting up data
- I0428 17:16:11.781474 7943 net.cpp:157] Top shape: 10 3 227 227 (1545870)
- I0428 17:16:11.781502 7943 net.cpp:157] Top shape: 10 (10)
- I0428 17:16:11.781517 7943 net.cpp:165] Memory required for data: 6183520
- I0428 17:16:11.781533 7943 layer_factory.hpp:77] Creating layer label_data_1_split
- I0428 17:16:11.781643 7943 net.cpp:100] Creating Layer label_data_1_split
- I0428 17:16:11.781692 7943 net.cpp:434] label_data_1_split <- label
- I0428 17:16:11.781716 7943 net.cpp:408] label_data_1_split -> label_data_1_split_0
- I0428 17:16:11.781747 7943 net.cpp:408] label_data_1_split -> label_data_1_split_1
- I0428 17:16:11.781777 7943 net.cpp:150] Setting up label_data_1_split
- I0428 17:16:11.781826 7943 net.cpp:157] Top shape: 10 (10)
- I0428 17:16:11.781846 7943 net.cpp:157] Top shape: 10 (10)
- I0428 17:16:11.781859 7943 net.cpp:165] Memory required for data: 6183600
- I0428 17:16:11.781873 7943 layer_factory.hpp:77] Creating layer conv1
- I0428 17:16:11.781908 7943 net.cpp:100] Creating Layer conv1
- I0428 17:16:11.782083 7943 net.cpp:434] conv1 <- data
- I0428 17:16:11.782114 7943 net.cpp:408] conv1 -> conv1
- I0428 17:16:11.782531 7943 net.cpp:150] Setting up conv1
- I0428 17:16:11.782594 7943 net.cpp:157] Top shape: 10 96 55 55 (2904000)
- I0428 17:16:11.782613 7943 net.cpp:165] Memory required for data: 17799600
- I0428 17:16:11.782639 7943 layer_factory.hpp:77] Creating layer relu1
- I0428 17:16:11.782660 7943 net.cpp:100] Creating Layer relu1
- I0428 17:16:11.782675 7943 net.cpp:434] relu1 <- conv1
- I0428 17:16:11.782691 7943 net.cpp:395] relu1 -> conv1 (in-place)
- I0428 17:16:11.782712 7943 net.cpp:150] Setting up relu1
- I0428 17:16:11.782728 7943 net.cpp:157] Top shape: 10 96 55 55 (2904000)
- I0428 17:16:11.782742 7943 net.cpp:165] Memory required for data: 29415600
- I0428 17:16:11.782753 7943 layer_factory.hpp:77] Creating layer pool1
- I0428 17:16:11.782773 7943 net.cpp:100] Creating Layer pool1
- I0428 17:16:11.782789 7943 net.cpp:434] pool1 <- conv1
- I0428 17:16:11.782805 7943 net.cpp:408] pool1 -> pool1
- I0428 17:16:11.782829 7943 net.cpp:150] Setting up pool1
- I0428 17:16:11.782846 7943 net.cpp:157] Top shape: 10 96 27 27 (699840)
- I0428 17:16:11.782860 7943 net.cpp:165] Memory required for data: 32214960
- I0428 17:16:11.782871 7943 layer_factory.hpp:77] Creating layer norm1
- I0428 17:16:11.782888 7943 net.cpp:100] Creating Layer norm1
- I0428 17:16:11.782902 7943 net.cpp:434] norm1 <- pool1
- I0428 17:16:11.782940 7943 net.cpp:408] norm1 -> norm1
- I0428 17:16:11.782975 7943 net.cpp:150] Setting up norm1
- I0428 17:16:11.782992 7943 net.cpp:157] Top shape: 10 96 27 27 (699840)
- I0428 17:16:11.783004 7943 net.cpp:165] Memory required for data: 35014320
- I0428 17:16:11.783016 7943 layer_factory.hpp:77] Creating layer conv2
- I0428 17:16:11.783040 7943 net.cpp:100] Creating Layer conv2
- I0428 17:16:11.783058 7943 net.cpp:434] conv2 <- norm1
- I0428 17:16:11.783076 7943 net.cpp:408] conv2 -> conv2
- I0428 17:16:11.787181 7943 net.cpp:150] Setting up conv2
- I0428 17:16:11.787331 7943 net.cpp:157] Top shape: 10 256 27 27 (1866240)
- I0428 17:16:11.787358 7943 net.cpp:165] Memory required for data: 42479280
- I0428 17:16:11.787452 7943 layer_factory.hpp:77] Creating layer relu2
- I0428 17:16:11.787498 7943 net.cpp:100] Creating Layer relu2
- I0428 17:16:11.787513 7943 net.cpp:434] relu2 <- conv2
- I0428 17:16:11.787525 7943 net.cpp:395] relu2 -> conv2 (in-place)
- I0428 17:16:11.787542 7943 net.cpp:150] Setting up relu2
- I0428 17:16:11.787557 7943 net.cpp:157] Top shape: 10 256 27 27 (1866240)
- I0428 17:16:11.787564 7943 net.cpp:165] Memory required for data: 49944240
- I0428 17:16:11.787571 7943 layer_factory.hpp:77] Creating layer pool2
- I0428 17:16:11.787590 7943 net.cpp:100] Creating Layer pool2
- I0428 17:16:11.787618 7943 net.cpp:434] pool2 <- conv2
- I0428 17:16:11.787631 7943 net.cpp:408] pool2 -> pool2
- I0428 17:16:11.787653 7943 net.cpp:150] Setting up pool2
- I0428 17:16:11.787667 7943 net.cpp:157] Top shape: 10 256 13 13 (432640)
- I0428 17:16:11.787674 7943 net.cpp:165] Memory required for data: 51674800
- I0428 17:16:11.787681 7943 layer_factory.hpp:77] Creating layer norm2
- I0428 17:16:11.787693 7943 net.cpp:100] Creating Layer norm2
- I0428 17:16:11.787701 7943 net.cpp:434] norm2 <- pool2
- I0428 17:16:11.787765 7943 net.cpp:408] norm2 -> norm2
- I0428 17:16:11.787786 7943 net.cpp:150] Setting up norm2
- I0428 17:16:11.787793 7943 net.cpp:157] Top shape: 10 256 13 13 (432640)
- I0428 17:16:11.787799 7943 net.cpp:165] Memory required for data: 53405360
- I0428 17:16:11.787806 7943 layer_factory.hpp:77] Creating layer conv3
- I0428 17:16:11.787824 7943 net.cpp:100] Creating Layer conv3
- I0428 17:16:11.787830 7943 net.cpp:434] conv3 <- norm2
- I0428 17:16:11.787847 7943 net.cpp:408] conv3 -> conv3
- I0428 17:16:11.796581 7943 net.cpp:150] Setting up conv3
- I0428 17:16:11.796624 7943 net.cpp:157] Top shape: 10 384 13 13 (648960)
- I0428 17:16:11.796632 7943 net.cpp:165] Memory required for data: 56001200
- I0428 17:16:11.796649 7943 layer_factory.hpp:77] Creating layer relu3
- I0428 17:16:11.796663 7943 net.cpp:100] Creating Layer relu3
- I0428 17:16:11.796671 7943 net.cpp:434] relu3 <- conv3
- I0428 17:16:11.796679 7943 net.cpp:395] relu3 -> conv3 (in-place)
- I0428 17:16:11.796692 7943 net.cpp:150] Setting up relu3
- I0428 17:16:11.796700 7943 net.cpp:157] Top shape: 10 384 13 13 (648960)
- I0428 17:16:11.796705 7943 net.cpp:165] Memory required for data: 58597040
- I0428 17:16:11.796710 7943 layer_factory.hpp:77] Creating layer conv4
- I0428 17:16:11.796725 7943 net.cpp:100] Creating Layer conv4
- I0428 17:16:11.796731 7943 net.cpp:434] conv4 <- conv3
- I0428 17:16:11.796741 7943 net.cpp:408] conv4 -> conv4
- I0428 17:16:11.806030 7943 net.cpp:150] Setting up conv4
- I0428 17:16:11.806108 7943 net.cpp:157] Top shape: 10 384 13 13 (648960)
- I0428 17:16:11.806124 7943 net.cpp:165] Memory required for data: 61192880
- I0428 17:16:11.806145 7943 layer_factory.hpp:77] Creating layer relu4
- I0428 17:16:11.806167 7943 net.cpp:100] Creating Layer relu4
- I0428 17:16:11.806182 7943 net.cpp:434] relu4 <- conv4
- I0428 17:16:11.806205 7943 net.cpp:395] relu4 -> conv4 (in-place)
- I0428 17:16:11.806227 7943 net.cpp:150] Setting up relu4
- I0428 17:16:11.806251 7943 net.cpp:157] Top shape: 10 384 13 13 (648960)
- I0428 17:16:11.806263 7943 net.cpp:165] Memory required for data: 63788720
- I0428 17:16:11.806275 7943 layer_factory.hpp:77] Creating layer conv5
- I0428 17:16:11.806299 7943 net.cpp:100] Creating Layer conv5
- I0428 17:16:11.806318 7943 net.cpp:434] conv5 <- conv4
- I0428 17:16:11.806335 7943 net.cpp:408] conv5 -> conv5
- I0428 17:16:11.811363 7943 net.cpp:150] Setting up conv5
- I0428 17:16:11.811441 7943 net.cpp:157] Top shape: 10 256 13 13 (432640)
- I0428 17:16:11.811452 7943 net.cpp:165] Memory required for data: 65519280
- I0428 17:16:11.811471 7943 layer_factory.hpp:77] Creating layer relu5
- I0428 17:16:11.811487 7943 net.cpp:100] Creating Layer relu5
- I0428 17:16:11.811496 7943 net.cpp:434] relu5 <- conv5
- I0428 17:16:11.811506 7943 net.cpp:395] relu5 -> conv5 (in-place)
- I0428 17:16:11.811520 7943 net.cpp:150] Setting up relu5
- I0428 17:16:11.811528 7943 net.cpp:157] Top shape: 10 256 13 13 (432640)
- I0428 17:16:11.811588 7943 net.cpp:165] Memory required for data: 67249840
- I0428 17:16:11.811599 7943 layer_factory.hpp:77] Creating layer pool5
- I0428 17:16:11.811614 7943 net.cpp:100] Creating Layer pool5
- I0428 17:16:11.811622 7943 net.cpp:434] pool5 <- conv5
- I0428 17:16:11.811635 7943 net.cpp:408] pool5 -> pool5
- I0428 17:16:11.811655 7943 net.cpp:150] Setting up pool5
- I0428 17:16:11.811666 7943 net.cpp:157] Top shape: 10 256 6 6 (92160)
- I0428 17:16:11.811671 7943 net.cpp:165] Memory required for data: 67618480
- I0428 17:16:11.811678 7943 layer_factory.hpp:77] Creating layer fc6
- I0428 17:16:11.811691 7943 net.cpp:100] Creating Layer fc6
- I0428 17:16:11.811697 7943 net.cpp:434] fc6 <- pool5
- I0428 17:16:11.811707 7943 net.cpp:408] fc6 -> fc6
- I0428 17:16:12.343034 7943 net.cpp:150] Setting up fc6
- I0428 17:16:12.343132 7943 net.cpp:157] Top shape: 10 4096 (40960)
- I0428 17:16:12.343147 7943 net.cpp:165] Memory required for data: 67782320
- I0428 17:16:12.343168 7943 layer_factory.hpp:77] Creating layer relu6
- I0428 17:16:12.343186 7943 net.cpp:100] Creating Layer relu6
- I0428 17:16:12.343200 7943 net.cpp:434] relu6 <- fc6
- I0428 17:16:12.343219 7943 net.cpp:395] relu6 -> fc6 (in-place)
- I0428 17:16:12.343237 7943 net.cpp:150] Setting up relu6
- I0428 17:16:12.343248 7943 net.cpp:157] Top shape: 10 4096 (40960)
- I0428 17:16:12.343255 7943 net.cpp:165] Memory required for data: 67946160
- I0428 17:16:12.343263 7943 layer_factory.hpp:77] Creating layer drop6
- I0428 17:16:12.343274 7943 net.cpp:100] Creating Layer drop6
- I0428 17:16:12.343282 7943 net.cpp:434] drop6 <- fc6
- I0428 17:16:12.343291 7943 net.cpp:395] drop6 -> fc6 (in-place)
- I0428 17:16:12.343336 7943 net.cpp:150] Setting up drop6
- I0428 17:16:12.343348 7943 net.cpp:157] Top shape: 10 4096 (40960)
- I0428 17:16:12.343354 7943 net.cpp:165] Memory required for data: 68110000
- I0428 17:16:12.343361 7943 layer_factory.hpp:77] Creating layer mfc7
- I0428 17:16:12.343379 7943 net.cpp:100] Creating Layer mfc7
- I0428 17:16:12.343399 7943 net.cpp:434] mfc7 <- fc6
- I0428 17:16:12.343408 7943 net.cpp:408] mfc7 -> mfc7
- I0428 17:16:13.070076 7943 net.cpp:150] Setting up mfc7
- I0428 17:16:13.070158 7943 net.cpp:157] Top shape: 10 4096 (40960)
- I0428 17:16:13.070173 7943 net.cpp:165] Memory required for data: 68273840
- I0428 17:16:13.070190 7943 layer_factory.hpp:77] Creating layer relu7
- I0428 17:16:13.070209 7943 net.cpp:100] Creating Layer relu7
- I0428 17:16:13.070219 7943 net.cpp:434] relu7 <- mfc7
- I0428 17:16:13.070230 7943 net.cpp:395] relu7 -> mfc7 (in-place)
- I0428 17:16:13.070246 7943 net.cpp:150] Setting up relu7
- I0428 17:16:13.070256 7943 net.cpp:157] Top shape: 10 4096 (40960)
- I0428 17:16:13.070263 7943 net.cpp:165] Memory required for data: 68437680
- I0428 17:16:13.070271 7943 layer_factory.hpp:77] Creating layer drop7
- I0428 17:16:13.070286 7943 net.cpp:100] Creating Layer drop7
- I0428 17:16:13.070293 7943 net.cpp:434] drop7 <- mfc7
- I0428 17:16:13.070303 7943 net.cpp:395] drop7 -> mfc7 (in-place)
- I0428 17:16:13.070315 7943 net.cpp:150] Setting up drop7
- I0428 17:16:13.070323 7943 net.cpp:157] Top shape: 10 4096 (40960)
- I0428 17:16:13.070330 7943 net.cpp:165] Memory required for data: 68601520
- I0428 17:16:13.070336 7943 layer_factory.hpp:77] Creating layer mfc8
- I0428 17:16:13.070348 7943 net.cpp:100] Creating Layer mfc8
- I0428 17:16:13.070363 7943 net.cpp:434] mfc8 <- mfc7
- I0428 17:16:13.070400 7943 net.cpp:408] mfc8 -> mfc8
- I0428 17:16:13.070848 7943 net.cpp:150] Setting up mfc8
- I0428 17:16:13.070905 7943 net.cpp:157] Top shape: 10 7 (70)
- I0428 17:16:13.070916 7943 net.cpp:165] Memory required for data: 68601800
- I0428 17:16:13.070929 7943 layer_factory.hpp:77] Creating layer mfc8_mfc8_0_split
- I0428 17:16:13.070943 7943 net.cpp:100] Creating Layer mfc8_mfc8_0_split
- I0428 17:16:13.070952 7943 net.cpp:434] mfc8_mfc8_0_split <- mfc8
- I0428 17:16:13.070961 7943 net.cpp:408] mfc8_mfc8_0_split -> mfc8_mfc8_0_split_0
- I0428 17:16:13.070973 7943 net.cpp:408] mfc8_mfc8_0_split -> mfc8_mfc8_0_split_1
- I0428 17:16:13.070986 7943 net.cpp:150] Setting up mfc8_mfc8_0_split
- I0428 17:16:13.071044 7943 net.cpp:157] Top shape: 10 7 (70)
- I0428 17:16:13.071054 7943 net.cpp:157] Top shape: 10 7 (70)
- I0428 17:16:13.071061 7943 net.cpp:165] Memory required for data: 68602360
- I0428 17:16:13.071069 7943 layer_factory.hpp:77] Creating layer accuracy
- I0428 17:16:13.071087 7943 net.cpp:100] Creating Layer accuracy
- I0428 17:16:13.071115 7943 net.cpp:434] accuracy <- mfc8_mfc8_0_split_0
- I0428 17:16:13.071125 7943 net.cpp:434] accuracy <- label_data_1_split_0
- I0428 17:16:13.071135 7943 net.cpp:408] accuracy -> accuracy
- I0428 17:16:13.071151 7943 net.cpp:150] Setting up accuracy
- I0428 17:16:13.071164 7943 net.cpp:157] Top shape: (1)
- I0428 17:16:13.071171 7943 net.cpp:165] Memory required for data: 68602364
- I0428 17:16:13.071178 7943 layer_factory.hpp:77] Creating layer loss
- I0428 17:16:13.071190 7943 net.cpp:100] Creating Layer loss
- I0428 17:16:13.071202 7943 net.cpp:434] loss <- mfc8_mfc8_0_split_1
- I0428 17:16:13.071210 7943 net.cpp:434] loss <- label_data_1_split_1
- I0428 17:16:13.071219 7943 net.cpp:408] loss -> loss
- I0428 17:16:13.071231 7943 layer_factory.hpp:77] Creating layer loss
- I0428 17:16:13.071251 7943 net.cpp:150] Setting up loss
- I0428 17:16:13.071265 7943 net.cpp:157] Top shape: (1)
- I0428 17:16:13.071272 7943 net.cpp:160] with loss weight 1
- I0428 17:16:13.071287 7943 net.cpp:165] Memory required for data: 68602368
- I0428 17:16:13.071295 7943 net.cpp:226] loss needs backward computation.
- I0428 17:16:13.071301 7943 net.cpp:228] accuracy does not need backward computation.
- I0428 17:16:13.071310 7943 net.cpp:226] mfc8_mfc8_0_split needs backward computation.
- I0428 17:16:13.071316 7943 net.cpp:226] mfc8 needs backward computation.
- I0428 17:16:13.071323 7943 net.cpp:226] drop7 needs backward computation.
- I0428 17:16:13.071331 7943 net.cpp:226] relu7 needs backward computation.
- I0428 17:16:13.071337 7943 net.cpp:226] mfc7 needs backward computation.
- I0428 17:16:13.071346 7943 net.cpp:228] drop6 does not need backward computation.
- I0428 17:16:13.071352 7943 net.cpp:228] relu6 does not need backward computation.
- I0428 17:16:13.071359 7943 net.cpp:228] fc6 does not need backward computation.
- I0428 17:16:13.071383 7943 net.cpp:228] pool5 does not need backward computation.
- I0428 17:16:13.071394 7943 net.cpp:228] relu5 does not need backward computation.
- I0428 17:16:13.071401 7943 net.cpp:228] conv5 does not need backward computation.
- I0428 17:16:13.071408 7943 net.cpp:228] relu4 does not need backward computation.
- I0428 17:16:13.071415 7943 net.cpp:228] conv4 does not need backward computation.
- I0428 17:16:13.071432 7943 net.cpp:228] relu3 does not need backward computation.
- I0428 17:16:13.071439 7943 net.cpp:228] conv3 does not need backward computation.
- I0428 17:16:13.071445 7943 net.cpp:228] norm2 does not need backward computation.
- I0428 17:16:13.071451 7943 net.cpp:228] pool2 does not need backward computation.
- I0428 17:16:13.071457 7943 net.cpp:228] relu2 does not need backward computation.
- I0428 17:16:13.071465 7943 net.cpp:228] conv2 does not need backward computation.
- I0428 17:16:13.071470 7943 net.cpp:228] norm1 does not need backward computation.
- I0428 17:16:13.071476 7943 net.cpp:228] pool1 does not need backward computation.
- I0428 17:16:13.071482 7943 net.cpp:228] relu1 does not need backward computation.
- I0428 17:16:13.071490 7943 net.cpp:228] conv1 does not need backward computation.
- I0428 17:16:13.071496 7943 net.cpp:228] label_data_1_split does not need backward computation.
- I0428 17:16:13.071502 7943 net.cpp:228] data does not need backward computation.
- I0428 17:16:13.071508 7943 net.cpp:270] This network produces output accuracy
- I0428 17:16:13.071514 7943 net.cpp:270] This network produces output loss
- I0428 17:16:13.071568 7943 net.cpp:283] Network initialization done.
- I0428 17:16:13.071692 7943 solver.cpp:60] Solver scaffolding done.
- I0428 17:16:13.071741 7943 caffe.cpp:155] Finetuning from models/my_face_caffenet/bvlc_reference_caffenet.caffemodel
- I0428 17:16:16.748023 7943 upgrade_proto.cpp:44] Attempting to upgrade input file specified using deprecated transformation parameters: models/my_face_caffenet/bvlc_reference_caffenet.caffemodel
- I0428 17:16:16.748179 7943 upgrade_proto.cpp:47] Successfully upgraded file specified using deprecated data transformation parameters.
- W0428 17:16:16.748208 7943 upgrade_proto.cpp:49] Note that future Caffe releases will only support transform_param messages for transformation fields.
- I0428 17:16:16.748953 7943 upgrade_proto.cpp:53] Attempting to upgrade input file specified using deprecated V1LayerParameter: models/my_face_caffenet/bvlc_reference_caffenet.caffemodel
- I0428 17:16:17.498656 7943 upgrade_proto.cpp:61] Successfully upgraded file specified using deprecated V1LayerParameter
- I0428 17:16:17.586127 7943 net.cpp:761] Ignoring source layer fc7
- I0428 17:16:17.586278 7943 net.cpp:761] Ignoring source layer fc8
- I0428 17:16:21.014925 7943 upgrade_proto.cpp:44] Attempting to upgrade input file specified using deprecated transformation parameters: models/my_face_caffenet/bvlc_reference_caffenet.caffemodel
- I0428 17:16:21.015012 7943 upgrade_proto.cpp:47] Successfully upgraded file specified using deprecated data transformation parameters.
- W0428 17:16:21.015034 7943 upgrade_proto.cpp:49] Note that future Caffe releases will only support transform_param messages for transformation fields.
- I0428 17:16:21.026691 7943 upgrade_proto.cpp:53] Attempting to upgrade input file specified using deprecated V1LayerParameter: models/my_face_caffenet/bvlc_reference_caffenet.caffemodel
- I0428 17:16:21.451776 7943 upgrade_proto.cpp:61] Successfully upgraded file specified using deprecated V1LayerParameter
- I0428 17:16:21.510260 7943 net.cpp:761] Ignoring source layer fc7
- I0428 17:16:21.510344 7943 net.cpp:761] Ignoring source layer fc8
- I0428 17:16:21.531802 7943 caffe.cpp:251] Starting Optimization
- I0428 17:16:21.576205 7943 solver.cpp:279] Solving CaffeNet
- I0428 17:16:21.576285 7943 solver.cpp:280] Learning Rate Policy: step
- I0428 17:16:21.688699 7943 solver.cpp:337] Iteration 0, Testing net (#0)
- I0428 17:16:28.050719 7943 solver.cpp:404] Test net output #0: accuracy = 0.125
- I0428 17:16:28.050822 7943 solver.cpp:404] Test net output #1: loss = 6.68885 (* 1 = 6.68885 loss)
- I0428 17:16:31.191115 7943 solver.cpp:228] Iteration 0, loss = 9.61163
- I0428 17:16:31.191215 7943 solver.cpp:244] Train net output #0: loss = 9.61163 (* 1 = 9.61163 loss)
- I0428 17:16:31.191251 7943 sgd_solver.cpp:106] Iteration 0, lr = 0.001
- I0428 17:16:34.564023 7943 solver.cpp:228] Iteration 1, loss = 10.4964
- I0428 17:16:34.564131 7943 solver.cpp:244] Train net output #0: loss = 10.4964 (* 1 = 10.4964 loss)
- I0428 17:16:34.564153 7943 sgd_solver.cpp:106] Iteration 1, lr = 0.001
- I0428 17:16:37.873615 7943 solver.cpp:228] Iteration 2, loss = 12.4448
- I0428 17:16:37.873734 7943 solver.cpp:244] Train net output #0: loss = 12.4448 (* 1 = 12.4448 loss)
- I0428 17:16:37.873764 7943 sgd_solver.cpp:106] Iteration 2, lr = 0.001
- I0428 17:16:41.167212 7943 solver.cpp:228] Iteration 3, loss = 9.48714
- I0428 17:16:41.189265 7943 solver.cpp:244] Train net output #0: loss = 9.48714 (* 1 = 9.48714 loss)
- I0428 17:16:41.189297 7943 sgd_solver.cpp:106] Iteration 3, lr = 0.001
- I0428 17:16:44.495828 7943 solver.cpp:228] Iteration 4, loss = 9.36762
- I0428 17:16:44.495931 7943 solver.cpp:244] Train net output #0: loss = 9.36762 (* 1 = 9.36762 loss)
- I0428 17:16:44.495954 7943 sgd_solver.cpp:106] Iteration 4, lr = 0.001
- I0428 17:16:47.821759 7943 solver.cpp:228] Iteration 5, loss = 11.5583
- I0428 17:16:47.821883 7943 solver.cpp:244] Train net output #0: loss = 11.5583 (* 1 = 11.5583 loss)
- I0428 17:16:47.821928 7943 sgd_solver.cpp:106] Iteration 5, lr = 0.001
- I0428 17:16:51.143823 7943 solver.cpp:228] Iteration 6, loss = 10.0145
- I0428 17:16:51.143944 7943 solver.cpp:244] Train net output #0: loss = 10.0145 (* 1 = 10.0145 loss)
- I0428 17:16:51.143975 7943 sgd_solver.cpp:106] Iteration 6, lr = 0.001
- I0428 17:16:54.446740 7943 solver.cpp:228] Iteration 7, loss = 10.547
- I0428 17:16:54.446843 7943 solver.cpp:244] Train net output #0: loss = 10.547 (* 1 = 10.547 loss)
- I0428 17:16:54.446866 7943 sgd_solver.cpp:106] Iteration 7, lr = 0.001
- I0428 17:16:57.786774 7943 solver.cpp:228] Iteration 8, loss = 12.769
- I0428 17:16:57.786885 7943 solver.cpp:244] Train net output #0: loss = 12.769 (* 1 = 12.769 loss)
- I0428 17:16:57.786909 7943 sgd_solver.cpp:106] Iteration 8, lr = 0.001
- I0428 17:17:01.098026 7943 solver.cpp:228] Iteration 9, loss = 11.7928
- I0428 17:17:01.098155 7943 solver.cpp:244] Train net output #0: loss = 11.7928 (* 1 = 11.7928 loss)
- I0428 17:17:01.098181 7943 sgd_solver.cpp:106] Iteration 9, lr = 0.001
- I0428 17:17:01.360605 7943 solver.cpp:454] Snapshotting to binary proto file /home/amir/Develop/caffe/models/my_face_caffenet/caffenet_train_iter_10.caffemodel
- *** Aborted at 1493414221 (unix time) try "date -d @1493414221" if you are using GNU date ***
- PC: @ 0x7fdbb0e76f0c (unknown)
- *** SIGSEGV (@0x0) received by PID 7943 (TID 0x7fdbb2779780) from PID 0; stack trace: ***
- @ 0x7fdbb0e2ccb0 (unknown)
- @ 0x7fdbb0e76f0c (unknown)
- @ 0x7fdbb0e786c0 (unknown)
- @ 0x7fdbb142fdad (unknown)
- @ 0x7fdbb142fea9 (unknown)
- @ 0x7fdbb2193547 google::protobuf::RepeatedField<>::Reserve()
- @ 0x7fdbb23046fb caffe::Blob<>::ToProto()
- @ 0x7fdbb21af7ac caffe::Layer<>::ToProto()
- @ 0x7fdbb22be8bf caffe::Net<>::ToProto()
- @ 0x7fdbb219dda6 caffe::Solver<>::SnapshotToBinaryProto()
- @ 0x7fdbb219dec0 caffe::Solver<>::Snapshot()
- @ 0x7fdbb219ee8a caffe::Solver<>::Step()
- @ 0x7fdbb219f619 caffe::Solver<>::Solve()
- @ 0x40821d train()
- @ 0x40589c main
- @ 0x7fdbb0e17f45 (unknown)
- @ 0x40610b (unknown)
- @ 0x0 (unknown)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement