Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- ./caffe train --solver C:/Users/mahtab/Desktop/finetune_mitindoor67/solver.prototxt --weights C:/Users/mahtab/Desktop/finetune_mitindoor
- 67/places205CNN_iter_300000_upgraded.caffemodel 2>&1 | tee -a C:/Users/mahtab/Desktop/finetune_mitindoor67/modify_caffenet.log
- >>
- ./caffe : I0412 04:12:22.239899 4704 caffe.cpp:212] Use CPU.
- At line:1 char:1
- + ./caffe train --solver C:/Users/mahtab/Desktop/finetune_mitindoor67/s ...
- + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- + CategoryInfo : NotSpecified: (I0412 04:12:22....p:212] Use CPU.:String) [], Remote
- Exception
- + FullyQualifiedErrorId : NativeCommandError
- I0412 04:12:22.239899 4704 solver.cpp:44] Initializing solver from parameters:
- test_iter: 134
- test_interval: 268
- base_lr: 1e-07
- display: 50
- max_iter: 100000
- lr_policy: "step"
- gamma: 0.1
- momentum: 0.9
- weight_decay: 0.0005
- stepsize: 20000
- snapshot: 10000
- snapshot_prefix: "C:/Users/mahtab/Desktop/finetune_mitindoor67"
- solver_mode: CPU
- net: "C:/Users/mahtab/Desktop/finetune_mitindoor67/SceneRecognitionCaffeNet.prototxt"
- train_state {
- level: 0
- stage: ""
- }
- I0412 04:12:22.239899 4704 solver.cpp:87] Creating training net from net file:
- C:/Users/mahtab/Desktop/finetune_mitindoor67/SceneRecognitionCaffeNet.prototxt
- I0412 04:12:22.239899 4704 net.cpp:294] The NetState phase (0) differed from the phase (1)
- specified by a rule in layer MITindoor67
- I0412 04:12:22.239899 4704 net.cpp:294] The NetState phase (0) differed from the phase (1)
- specified by a rule in layer accuracy
- I0412 04:12:22.239899 4704 net.cpp:51] Initializing net from parameters:
- name: "SceneRecognitionCaffeNet"
- state {
- phase: TRAIN
- level: 0
- stage: ""
- }
- layer {
- name: "MITindoor67"
- type: "Data"
- top: "data"
- top: "label"
- include {
- phase: TRAIN
- }
- transform_param {
- scale: 0.8
- mirror: true
- crop_size: 227
- mean_file: "mean.binaryproto"
- }
- data_param {
- source: "mydataset256_train_lmdb"
- batch_size: 20
- backend: LMDB
- }
- }
- layer {
- name: "conv1"
- type: "Convolution"
- bottom: "data"
- top: "conv1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 96
- kernel_size: 11
- stride: 4
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
- }
- layer {
- name: "relu1"
- type: "ReLU"
- bottom: "conv1"
- top: "conv1"
- }
- layer {
- name: "pool1"
- type: "Pooling"
- bottom: "conv1"
- top: "pool1"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
- }
- layer {
- name: "norm1"
- type: "LRN"
- bottom: "pool1"
- top: "norm1"
- lrn_param {
- local_size: 5
- alpha: 0.0001
- beta: 0.75
- }
- }
- layer {
- name: "conv2"
- type: "Convolution"
- bottom: "norm1"
- top: "conv2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 256
- pad: 2
- kernel_size: 5
- group: 2
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 1
- }
- }
- }
- layer {
- name: "relu2"
- type: "ReLU"
- bottom: "conv2"
- top: "conv2"
- }
- layer {
- name: "pool2"
- type: "Pooling"
- bottom: "conv2"
- top: "pool2"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
- }
- layer {
- name: "norm2"
- type: "LRN"
- bottom: "pool2"
- top: "norm2"
- lrn_param {
- local_size: 5
- alpha: 0.0001
- beta: 0.75
- }
- }
- layer {
- name: "conv3"
- type: "Convolution"
- bottom: "norm2"
- top: "conv3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 384
- pad: 1
- kernel_size: 3
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
- }
- layer {
- name: "relu3"
- type: "ReLU"
- bottom: "conv3"
- top: "conv3"
- }
- layer {
- name: "conv4"
- type: "Convolution"
- bottom: "conv3"
- top: "conv4"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 384
- pad: 1
- kernel_size: 3
- group: 2
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 1
- }
- }
- }
- layer {
- name: "relu4"
- type: "ReLU"
- bottom: "conv4"
- top: "conv4"
- }
- layer {
- name: "conv5"
- type: "Convolution"
- bottom: "conv4"
- top: "conv5"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 256
- pad: 1
- kernel_size: 3
- group: 2
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 1
- }
- }
- }
- layer {
- name: "relu5"
- type: "ReLU"
- bottom: "conv5"
- top: "conv5"
- }
- layer {
- name: "pool5"
- type: "Pooling"
- bottom: "conv5"
- top: "pool5"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
- }
- layer {
- name: "pool5_flatten"
- type: "Flatten"
- bottom: "pool5"
- top: "pool5_flatten"
- }
- layer {
- name: "fc6"
- type: "Convolution"
- bottom: "pool5_flatten"
- top: "fc6"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 4096
- pad: 0
- kerne
- l_size: 7
- stride: 1
- }
- }
- layer {
- name: "relu6"
- type: "ReLU"
- bottom: "fc6"
- top: "fc6"
- }
- layer {
- name: "drop6"
- type: "Dropout"
- bottom: "fc6"
- top: "fc6"
- dropout_param {
- dropout_ratio: 0.5
- }
- }
- layer {
- name: "fc7"
- type: "Convolution"
- bottom: "fc6"
- top: "fc7"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 4096
- kernel_size: 1
- stride: 1
- }
- }
- layer {
- name: "relu7"
- type: "ReLU"
- bottom: "fc7"
- top: "fc7"
- }
- layer {
- name: "drop7"
- type: "Dropout"
- bottom: "fc7"
- top: "fc7"
- dropout_param {
- dropout_ratio: 0.5
- }
- }
- layer {
- name: "fc8_mitindoor67"
- type: "Convolution"
- bottom: "fc7"
- top: "fc8_mitindoor67"
- param {
- lr_mult: 10
- decay_mult: 0
- }
- param {
- lr_mult: 20
- decay_mult: 0
- }
- convolution_param {
- num_output: 67
- kernel_size: 1
- stride: 1
- }
- }
- layer {
- name: "loss"
- type: "SoftmaxWithLoss"
- bottom: "fc8_mitindoor67"
- bottom: "label"
- top: "loss"
- }
- I0412 04:12:22.239899 4704 layer_factory.cpp:58] Creating layer MITindoor67
- I0412 04:12:22.239899 4704 db_lmdb.cpp:40] Opened lmdb mydataset256_train_lmdb
- I0412 04:12:22.239899 4704 net.cpp:84] Creating Layer MITindoor67
- I0412 04:12:22.239899 4704 net.cpp:380] MITindoor67 -> data
- I0412 04:12:22.239899 4704 net.cpp:380] MITindoor67 -> label
- I0412 04:12:22.239899 4704 data_transformer.cpp:25] Loading mean file from: mean.binaryproto
- I0412 04:12:22.257618 4704 common.cpp:36] System entropy source not available, using fallback
- algorithm to generate seed instead.
- I0412 04:12:22.257618 4704 data_layer.cpp:45] output data size: 20,3,227,227
- I0412 04:12:22.270828 4704 net.cpp:122] Setting up MITindoor67
- I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 3 227 227 (3091740)
- I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 (20)
- I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 12367040
- I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer conv1
- I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer conv1
- I0412 04:12:22.270828 4704 net.cpp:406] conv1 <- data
- I0412 04:12:22.270828 4704 net.cpp:380] conv1 -> conv1
- I0412 04:12:22.270828 4704 net.cpp:122] Setting up conv1
- I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 96 55 55 (5808000)
- I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 35599040
- I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer relu1
- I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer relu1
- I0412 04:12:22.270828 4704 net.cpp:406] relu1 <- conv1
- I0412 04:12:22.270828 4704 net.cpp:367] relu1 -> conv1 (in-place)
- I0412 04:12:22.270828 4704 net.cpp:122] Setting up relu1
- I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 96 55 55 (5808000)
- I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 58831040
- I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer pool1
- I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer pool1
- I0412 04:12:22.270828 4704 net.cpp:406] pool1 <- conv1
- I0412 04:12:22.270828 4704 net.cpp:380] pool1 -> pool1
- I0412 04:12:22.270828 4704 net.cpp:122] Setting up pool1
- I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 96 27 27 (1399680)
- I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 64429760
- I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer norm1
- I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer norm1
- I0412 04:12:22.270828 4704 net.cpp:406] norm1 <- pool1
- I0412 04:12:22.270828 4704 net.cpp:380] norm1 -> norm1
- I0412 04:12:22.270828 4704 net.cpp:122] Setting up norm1
- I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 96 27 27 (1399680)
- I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 70028480
- I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer conv2
- I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer conv2
- I0412 04:12:22.270828 4704 net.cpp:406] conv2 <- norm1
- I0412 04:12:22.270828 4704 net.cpp:380] conv2 -> conv2
- I0412 04:12:22.270828 4704 net.cpp:122] Setting up conv2
- I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 256 27 27 (3732480)
- I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 84958400
- I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer relu2
- I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer relu2
- I0412 04:12:22.270828 4704 net.cpp:406] relu2 <- conv2
- I0412 04:12:22.270828 4704 net.cpp:367] relu2 -> conv2 (in-place)
- I0412 04:12:22.270828 4704 net.cpp:122] Setting up relu2
- I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 256 27 27 (3732480)
- I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 99888320
- I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer pool2
- I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer pool2
- I0412 04:12:22.270828 4704 net.cpp:406] pool2 <- conv2
- I0412 04:12:22.270828 4704 net.cpp:380] pool2 -> pool2
- I0412 04:12:22.270828 4704 net.cpp:122] Setting up pool2
- I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 256 13 13 (865280)
- I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 103349440
- I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer norm2
- I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer norm2
- I0412 04:12:22.270828 4704 net.cpp:406] norm2 <- pool2
- I0412 04:12:22.270828 4704 net.cpp:380] norm2 -> norm2
- I0412 04:12:22.270828 4704 net.cpp:122] Setting up norm2
- I0412 04:12:22.270828 4704 net.cpp:129] Top shape: 20 256 13 13 (865280)
- I0412 04:12:22.270828 4704 net.cpp:137] Memory required for data: 106810560
- I0412 04:12:22.270828 4704 layer_factory.cpp:58] Creating layer conv3
- I0412 04:12:22.270828 4704 net.cpp:84] Creating Layer conv3
- I0412 04:12:22.270828 4704 net.cpp:406] conv3 <- norm2
- I0412 04:12:22.270828 4704 net.cpp:380] conv3 -> conv3
- I0412 04:12:22.286504 4704 net.cpp:122] Setting up conv3
- I0412 04:12:22.286504 4704 net.cpp:129] Top shape: 20 384 13 13 (1297920)
- I0412 04:12:22.286504 4704 net.cpp:137] Memory required for data: 112002240
- I0412 04:12:22.286504 4704 layer_factory.cpp:58] Creating layer relu3
- I0412 04:12:22.286504 4704 net.cpp:84] Creating Layer relu3
- I0412 04:12:22.286504 4704 net.cpp:406] relu3 <- conv3
- I0412 04:12:22.286504 4704 net.cpp:367] relu3 -> conv3 (in-place)
- I0412 04:12:22.286504 4704 net.cpp:122] Setting up relu3
- I0412 04:12:22.286504 4704 net.cpp:129] Top shape: 20 384 13 13 (1297920)
- I0412 04:12:22.286504 4704 net.cpp:137] Memory required for data: 117193920
- I0412 04:12:22.286504 4704 layer_factory.cpp:58] Creating layer conv4
- I0412 04:12:22.286504 4704 net.cpp:84] Creating Layer conv4
- I0412 04:12:22.286504 4704 net.cpp:406] conv4 <- conv3
- I0412 04:12:22.286504 4704 net.cpp:380] conv4 -> conv4
- I0412 04:12:22.286504 4704 net.cpp:122] Setting up conv4
- I0412 04:12:22.286504 4704 net.cpp:129] Top shape: 20 384 13 13 (1297920)
- I0412 04:12:22.286504 4704 net.cpp:137] Memory required for data: 122385600
- I0412 04:12:22.286504 4704 layer_factory.cpp:58] Creating layer relu4
- I0412 04:12:22.286504 4704 net.cpp:84] Creating Layer relu4
- I0412 04:12:22.286504 4704 net.cpp:406] relu4 <- conv4
- I0412 04:12:22.286504 4704 net.cpp:367] relu4 -> conv4 (in-place)
- I0412 04:12:22.286504 4704 net.cpp:122] Setting up relu4
- I0412 04:12:22.286504 4704 net.cpp:129] Top shape: 20 384 13 13 (1297920)
- I0412 04:12:22.286504 4704 net.cpp:137] Memory required for data: 127577280
- I0412 04:12:22.286504 4704 layer_factory.cpp:58] Creating layer conv5
- I0412 04:12:22.286504 4704 net.cpp:84] Creating Layer conv5
- I0412 04:12:22.286504 4704 net.cpp:406] conv5 <- conv4
- I0412 04:12:22.286504 4704 net.cpp:380] conv5 -> conv5
- I0412 04:12:22.302098 4704 net.cpp:122] Setting up conv5
- I0412 04:12:22.302098 4704 net.cpp:129] Top shape: 20 256 13 13 (865280)
- I0412 04:12:22.302098 4704 net.cpp:137] Memory required for data: 131038400
- I0412 04:12:22.302098 4704 layer_factory.cpp:58] Creating layer relu5
- I0412 04:12:22.302098 4704 net.cpp:84] Creating Layer relu5
- I0412 04:12:22.302098 4704 net.cpp:406] relu5 <- conv5
- I0412 04:12:22.302098 4704 net.cpp:367] relu5 -> conv5 (in-place)
- I0412 04:12:22.302098 4704 net.cpp:122] Setting up relu5
- I0412 04:12:22.302098 4704 net.cpp:129] Top shape: 20 256 13 13 (865280)
- I0412 04:12:22.302098 4704 net.cpp:137] Memory required for data: 134499520
- I0412 04:12:22.302098 4704 layer_factory.cpp:58] Creating layer pool5
- I0412 04:12:22.302098 4704 net.cpp:84] Creating Layer pool5
- I0412 04:12:22.302098 4704 net.cpp:406] pool5 <- conv5
- I0412 04:12:22.302098 4704 net.cpp:380] pool5 -> pool5
- I0412 04:12:22.302098 4704 net.cpp:122] Setting up pool5
- I0412 04:12:22.302098 4704 net.cpp:129] Top shape: 20 256 6 6 (184320)
- I0412 04:12:22.302098 4704 net.cpp:137] Memory required for data: 135236800
- I0412 04:12:22.302098 4704 layer_factory.cpp:58] Creating layer pool5_flatten
- I0412 04:12:22.302098 4704 net.cpp:84] Creating Layer pool5_flatten
- I0412 04:12:22.302098 4704 net.cpp:406] pool5_flatten <- pool5
- I0412 04:12:22.302098 4704 net.cpp:380] pool5_flatten -> pool5_flatten
- I0412 04:12:22.302098 4704 net.cpp:122] Setting up pool5_flatten
- I0412 04:12:22.302098 4704 net.cpp:129] Top shape: 20 9216 (184320)
- I0412 04:12:22.302098 4704 net.cpp:137] Memory required for data: 135974080
- I0412 04:12:22.302098 4704 layer_factory.cpp:58] Creating layer fc6
- I0412 04:12:22.302098 4704 net.cpp:84] Creating Layer fc6
- I0412 04:12:22.302098 4704 net.cpp:406] fc6 <- pool5_flatten
- I0412 04:12:22.302098 4704 net.cpp:380] fc6 -> fc6
- I0412 04:12:22.333617 4704 net.cpp:122] Setting up fc6
- I0412 04:12:22.333617 4704 net.cpp:129] Top shape: 20 4096 (81920)
- I0412 04:12:22.333617 4704 net.cpp:137] Memory required for data: 136301760
- I0412 04:12:22.333617 4704 layer_factory.cpp:58] Creating layer relu6
- I0412 04:12:22.333617 4704 net.cpp:84] Creating Layer relu6
- I0412 04:12:22.333617 4704 net.cpp:406] relu6 <- fc6
- I0412 04:12:22.333617 4704 net.cpp:367] relu6 -> fc6 (in-place)
- I0412 04:12:22.333617 4704 net.cpp:122] Setting up relu6
- I0412 04:12:22.333617 4704 net.cpp:129] Top shape: 20 4096 (81920)
- I0412 04:12:22.333617 4704 net.cpp:137] Memory required for data: 136629440
- I0412 04:12:22.333617 4704 layer_factory.cpp:58] Creating layer drop6
- I0412 04:12:22.333617 4704 net.cpp:84] Creating Layer drop6
- I0412 04:12:22.333617 4704 net.cpp:406] drop6 <- fc6
- I0412 04:12:22.333617 4704 net.cpp:367] drop6 -> fc6 (in-place)
- I0412 04:12:22.333617 4704 net.cpp:122] Setting up drop6
- I0412 04:12:22.333617 4704 net.cpp:129] Top shape: 20 4096 (81920)
- I0412 04:12:22.333617 4704 net.cpp:137] Memory required for data: 136957120
- I0412 04:12:22.333617 4704 layer_factory.cpp:58] Creating layer fc7
- I0412 04:12:22.333617 4704 net.cpp:84] Creating Layer fc7
- I0412 04:12:22.333617 4704 net.cpp:406] fc7 <- fc6
- I0412 04:12:22.333617 4704 net.cpp:380] fc7 -> fc7
- I0412 04:12:22.364234 4704 net.cpp:122] Setting up fc7
- I0412 04:12:22.364234 4704 net.cpp:129] Top shape: 20 4096 (81920)
- I0412 04:12:22.364234 4704 net.cpp:137] Memory required for data: 137284800
- I0412 04:12:22.364234 4704 layer_factory.cpp:58] Creating layer relu7
- I0412 04:12:22.364234 4704 net.cpp:84] Creating Layer relu7
- I0412 04:12:22.364234 4704 net.cpp:406] relu7 <- fc7
- I0412 04:12:22.364234 4704 net.cpp:367] relu7 -> fc7 (in-place)
- I0412 04:12:22.364234 4704 net.cpp:122] Setting up relu7
- I0412 04:12:22.364234 4704 net.cpp:129] Top shape: 20 4096 (81920)
- I0412 04:12:22.364234 4704 net.cpp:137] Memory required for data: 137612480
- I0412 04:12:22.364234 4704 layer_factory.cpp:58] Creating layer drop7
- I0412 04:12:22.364234 4704 net.cpp:84] Creating Layer drop7
- I0412 04:12:22.364234 4704 net.cpp:406] drop7 <- fc7
- I0412 04:12:22.364234 4704 net.cpp:367] drop7 -> fc7 (in-place)
- I0412 04:12:22.364234 4704 net.cpp:122] Setting up drop7
- I0412 04:12:22.364234 4704 net.cpp:129] Top shape: 20 4096 (81920)
- I0412 04:12:22.364234 4704 net.cpp:137] Memory required for data: 137940160
- I0412 04:12:22.364234 4704 layer_factory.cpp:58] Creating layer fc8_mitindoor67
- I0412 04:12:22.364234 4704 net.cpp:84] Creating Layer fc8_mitindoor67
- I0412 04:12:22.364234 4704 net.cpp:406] fc8_mitindoor67 <- fc7
- I0412 04:12:22.364234 4704 net.cpp:380] fc8_mitindoor67 -> fc8_mitindoor67
- I0412 04:12:22.364234 4704 net.cpp:122] Setting up fc8_mitindoor67
- I0412 04:12:22.364234 4704 net.cpp:129] Top shape: 20 67 (1340)
- I0412 04:12:22.364234 4704 net.cpp:137] Memory required for data: 137945520
- I0412 04:12:22.364234 4704 layer_factory.cpp:58] Creating layer loss
- I0412 04:12:22.364234 4704 net.cpp:84] Creating Layer loss
- I0412 04:12:22.364234 4704 net.cpp:406] loss <- fc8_mitindoor67
- I0412 04:12:22.364234 4704 net.cpp:406] loss <- label
- I0412 04:12:22.364234 4704 net.cpp:380] loss -> loss
- I0412 04:12:22.364234 4704 layer_factory.cpp:58] Creating layer loss
- I0412 04:12:22.364234 4704 net.cpp:122] Setting up loss
- I0412 04:12:22.364234 4704 net.cpp:129] Top shape: (1)
- I0412 04:12:22.364234 4704 net.cpp:132] with loss weight 1
- I0412 04:12:22.364234 4704 net.cpp:137] Memory required for data: 137945524
- I0412 04:12:22.364234 4704 net.cpp:198] loss needs backward computation.
- I0412 04:12:22.364234 4704 net.cpp:198] fc8_mitindoor67 needs backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] drop7 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] relu7 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] fc7 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] drop6 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] relu6 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] fc6 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] pool5_flatten does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] pool5 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] relu5 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] conv5 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] relu4 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] conv4 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] relu3 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] conv3 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] norm2 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] pool2 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] relu2 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] conv2 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] norm1 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] pool1 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] relu1 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] conv1 does not need backward computation.
- I0412 04:12:22.364234 4704 net.cpp:200] MITindoor67 does not need backward computatio
- n.
- I0412 04:12:22.536056 4704 net.cpp:242] This network produces output loss
- I0412 04:12:22.536056 4704 net.cpp:255] Network initialization done.
- I0412 04:12:22.536056 4704 solver.cpp:172] Creating test net (#0) specified by net file:
- C:/Users/mahtab/Desktop/finetune_mitindoor67/SceneRecognitionCaffeNet.prototxt
- I0412 04:12:22.536056 4704 net.cpp:294] The NetState phase (1) differed from the phase (0)
- specified by a rule in layer MITindoor67
- I0412 04:12:22.536056 4704 net.cpp:51] Initializing net from parameters:
- name: "SceneRecognitionCaffeNet"
- state {
- phase: TEST
- }
- layer {
- name: "MITindoor67"
- type: "Data"
- top: "data"
- top: "label"
- include {
- phase: TEST
- }
- transform_param {
- mirror: false
- crop_size: 227
- mean_file: "mean.binaryproto"
- }
- data_param {
- source: "mydataset256_test_lmdb"
- batch_size: 10
- backend: LMDB
- }
- }
- layer {
- name: "conv1"
- type: "Convolution"
- bottom: "data"
- top: "conv1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 96
- kernel_size: 11
- stride: 4
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
- }
- layer {
- name: "relu1"
- type: "ReLU"
- bottom: "conv1"
- top: "conv1"
- }
- layer {
- name: "pool1"
- type: "Pooling"
- bottom: "conv1"
- top: "pool1"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
- }
- layer {
- name: "norm1"
- type: "LRN"
- bottom: "pool1"
- top: "norm1"
- lrn_param {
- local_size: 5
- alpha: 0.0001
- beta: 0.75
- }
- }
- layer {
- name: "conv2"
- type: "Convolution"
- bottom: "norm1"
- top: "conv2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 256
- pad: 2
- kernel_size: 5
- group: 2
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 1
- }
- }
- }
- layer {
- name: "relu2"
- type: "ReLU"
- bottom: "conv2"
- top: "conv2"
- }
- layer {
- name: "pool2"
- type: "Pooling"
- bottom: "conv2"
- top: "pool2"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
- }
- layer {
- name: "norm2"
- type: "LRN"
- bottom: "pool2"
- top: "norm2"
- lrn_param {
- local_size: 5
- alpha: 0.0001
- beta: 0.75
- }
- }
- layer {
- name: "conv3"
- type: "Convolution"
- bottom: "norm2"
- top: "conv3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 384
- pad: 1
- kernel_size: 3
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- }
- }
- layer {
- name: "relu3"
- type: "ReLU"
- bottom: "conv3"
- top: "conv3"
- }
- layer {
- name: "conv4"
- type: "Convolution"
- bottom: "conv3"
- top: "conv4"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 384
- pad: 1
- kernel_size: 3
- group: 2
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 1
- }
- }
- }
- layer {
- name: "relu4"
- type: "ReLU"
- bottom: "conv4"
- top: "conv4"
- }
- layer {
- name: "conv5"
- type: "Convolution"
- bottom: "conv4"
- top: "conv5"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 256
- pad: 1
- kernel_size: 3
- group: 2
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 1
- }
- }
- }
- layer {
- name: "relu5"
- type: "ReLU"
- bottom: "conv5"
- top: "conv5"
- }
- layer {
- name: "pool5"
- type: "Pooling"
- bottom: "conv5"
- top: "pool5"
- pooling_param {
- pool: MAX
- kernel_size: 3
- stride: 2
- }
- }
- layer {
- name: "pool5_flatten"
- type: "Flatten"
- bottom: "pool5"
- top: "pool5_flatten"
- }
- layer {
- name: "fc6"
- type: "Convolution"
- bottom: "pool5_flatten"
- top: "fc6"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 4096
- pad: 0
- kernel_size: 7
- stride: 1
- }
- }
- layer {
- name: "relu6"
- type: "ReLU"
- bottom: "fc6"
- top: "fc6"
- }
- layer {
- name: "drop6"
- type: "Dropout"
- bottom: "fc6"
- top: "fc6"
- dropout_param {
- dropout_ratio: 0.5
- }
- }
- layer {
- name: "fc7"
- type: "Convolution"
- bottom: "fc6"
- top: "fc7"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- convolution_param {
- num_output: 4096
- kernel_size: 1
- stride: 1
- }
- }
- layer {
- name: "relu7"
- type: "ReLU"
- bottom: "fc7"
- top: "fc7"
- }
- layer {
- name: "drop7"
- type: "Dropout"
- bottom: "fc7"
- top: "fc7"
- dropout_param {
- dropout_ratio: 0.5
- }
- }
- layer {
- name: "fc8_mitindoor67"
- type: "Convolution"
- bottom: "fc7"
- top: "fc8_mitindoor67"
- param {
- lr_mult: 10
- decay_mult: 0
- }
- param {
- lr_mult: 20
- decay_mult: 0
- }
- convolution_param {
- num_output: 67
- kernel_size: 1
- stride: 1
- }
- }
- layer {
- name: "loss"
- type: "SoftmaxWithLoss"
- bottom: "fc8_mitindoor67"
- bottom: "label"
- top: "loss"
- }
- layer {
- name: "accuracy"
- type: "Accuracy"
- bottom: "fc8_mitindoor67"
- bottom: "label"
- top: "accuracy"
- include {
- phase: TEST
- }
- }
- I0412 04:12:22.598542 4704 layer_factory.cpp:58] Creating layer MITindoor67
- I0412 04:12:22.598542 4704 db_lmdb.cpp:40] Opened lmdb mydataset256_test_lmdb
- I0412 04:12:22.598542 4704 net.cpp:84] Creating Layer MITindoor67
- I0412 04:12:22.598542 4704 net.cpp:380] MITindoor67 -> data
- I0412 04:12:22.598542 4704 net.cpp:380] MITindoor67 -> label
- I0412 04:12:22.598542 4704 data_transformer.cpp:25] Loading mean file from: mean.binaryproto
- I0412 04:12:22.598542 4704 data_layer.cpp:45] output data size: 10,3,227,227
- I0412 04:12:22.614207 4704 net.cpp:122] Setting up MITindoor67
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 3 227 227 (1545870)
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 (10)
- I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 6183520
- I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer label_MITindoor67_1_split
- I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer label_MITindoor67_1_split
- I0412 04:12:22.614207 4704 net.cpp:406] label_MITindoor67_1_split <- label
- I0412 04:12:22.614207 4704 net.cpp:380] label_MITindoor67_1_split -> label_MITindoor67_1_split_0
- I0412 04:12:22.614207 4704 net.cpp:380] label_MITindoor67_1_split -> label_MITindoor67_1_split_1
- I0412 04:12:22.614207 4704 net.cpp:122] Setting up label_MITindoor67_1_split
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 (10)
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 (10)
- I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 6183600
- I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer conv1
- I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer conv1
- I0412 04:12:22.614207 4704 net.cpp:406] conv1 <- data
- I0412 04:12:22.614207 4704 net.cpp:380] conv1 -> conv1
- I0412 04:12:22.614207 4704 net.cpp:122] Setting up conv1
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 96 55 55 (2904000)
- I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 17799600
- I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer relu1
- I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer relu1
- I0412 04:12:22.614207 4704 net.cpp:406] relu1 <- conv1
- I0412 04:12:22.614207 4704 net.cpp:367] relu1 -> conv1 (in-place)
- I0412 04:12:22.614207 4704 net.cpp:122] Setting up relu1
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 96 55 55 (2904000)
- I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 29415600
- I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer pool1
- I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer pool1
- I0412 04:12:22.614207 4704 net.cpp:406] pool1 <- conv1
- I0412 04:12:22.614207 4704 net.cpp:380] pool1 -> pool1
- I0412 04:12:22.614207 4704 net.cpp:122] Setting up pool1
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 96 27 27 (699840)
- I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 32214960
- I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer norm1
- I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer norm1
- I0412 04:12:22.614207 4704 net.cpp:406] norm1 <- pool1
- I0412 04:12:22.614207 4704 net.cpp:380] norm1 -> norm1
- I0412 04:12:22.614207 4704 net.cpp:122] Setting up norm1
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 96 27 27 (699840)
- I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 35014320
- I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer conv2
- I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer conv2
- I0412 04:12:22.614207 4704 net.cpp:406] conv2 <- norm1
- I0412 04:12:22.614207 4704 net.cpp:380] conv2 -> conv2
- I0412 04:12:22.614207 4704 net.cpp:122] Setting up conv2
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 256 27 27 (1866240)
- I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 42479280
- I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer relu2
- I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer relu2
- I0412 04:12:22.614207 4704 net.cpp:406] relu2 <- conv2
- I0412 04:12:22.614207 4704 net.cpp:367] relu2 -> conv2 (in-place)
- I0412 04:12:22.614207 4704 net.cpp:122] Setting up relu2
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 256 27 27 (1866240)
- I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 49944240
- I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer pool2
- I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer pool2
- I0412 04:12:22.614207 4704 net.cpp:406] pool2 <- conv2
- I0412 04:12:22.614207 4704 net.cpp:380] pool2 -> pool2
- I0412 04:12:22.614207 4704 net.cpp:122] Setting up pool2
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 256 13 13 (432640)
- I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 51674800
- I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer norm2
- I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer norm2
- I0412 04:12:22.614207 4704 net.cpp:406] norm2 <- pool2
- I0412 04:12:22.614207 4704 net.cpp:380] norm2 -> norm2
- I0412 04:12:22.614207 4704 net.cpp:122] Setting up norm2
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 256 13 13 (432640)
- I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 53405360
- I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer conv3
- I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer conv3
- I0412 04:12:22.614207 4704 net.cpp:406] conv3 <- norm2
- I0412 04:12:22.614207 4704 net.cpp:380] conv3 -> conv3
- I0412 04:12:22.614207 4704 net.cpp:122] Setting up conv3
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 384 13 13 (648960)
- I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 56001200
- I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer relu3
- I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer relu3
- I0412 04:12:22.614207 4704 net.cpp:406] relu3 <- conv3
- I0412 04:12:22.614207 4704 net.cpp:367] relu3 -> conv3 (in-place)
- I0412 04:12:22.614207 4704 net.cpp:122] Setting up relu3
- I0412 04:12:22.614207 4704 net.cpp:129] Top shape: 10 384 13 13 (648960)
- I0412 04:12:22.614207 4704 net.cpp:137] Memory required for data: 58597040
- I0412 04:12:22.614207 4704 layer_factory.cpp:58] Creating layer conv4
- I0412 04:12:22.614207 4704 net.cpp:84] Creating Layer conv4
- I0412 04:12:22.614207 4704 net.cpp:406] conv4 <- conv3
- I0412 04:12:22.614207 4704 net.cpp:380] conv4 -> conv4
- I0412 04:12:22.629875 4704 net.cpp:122] Setting up conv4
- I0412 04:12:22.629875 4704 net.cpp:129] Top shape: 10 384 13 13 (648960)
- I0412 04:12:22.629875 4704 net.cpp:137] Memory required for data: 61192880
- I0412 04:12:22.629875 4704 layer_factory.cpp:58] Creating layer relu4
- I0412 04:12:22.629875 4704 net.cpp:84] Creating Layer relu4
- I0412 04:12:22.629875 4704 net.cpp:406] relu4 <- conv4
- I0412 04:12:22.629875 4704 net.cpp:367] relu4 -> conv4 (in-place)
- I0412 04:12:22.629875 4704 net.cpp:122] Setting up relu4
- I0412 04:12:22.629875 4704 net.cpp:129] Top shape: 10 384 13 13 (648960)
- I0412 04:12:22.629875 4704 net.cpp:137] Memory required for data: 63788720
- I0412 04:12:22.629875 4704 layer_factory.cpp:58] Creating layer conv5
- I0412 04:12:22.629875 4704 net.cpp:84] Creating Layer conv5
- I0412 04:12:22.629875 4704 net.cpp:406] conv5 <- conv4
- I0412 04:12:22.629875 4704 net.cpp:380] conv5 -> conv5
- I0412 04:12:22.629875 4704 net.cpp:122] Setting up conv5
- I0412 04:12:22.629875 4704 net.cpp:129] Top shape: 10 256 13 13 (432640)
- I0412 04:12:22.629875 4704 net.cpp:137] Memory required for data: 65519280
- I0412 04:12:22.629875 4704 layer_factory.cpp:58] Creating layer relu5
- I0412 04:12:22.629875 4704 net.cpp:84] Creating Layer relu5
- I0412 04:12:22.629875 4704 net.cpp:406] relu5 <- conv5
- I0412 04:12:22.629875 4704 net.cpp:367] relu5 -> conv5 (in-place)
- I0412 04:12:22.629875 4704 net.cpp:122] Setting up relu5
- I0412 04:12:22.629875 4704 net.cpp:129] Top shape: 10 256 13 13 (432640)
- I0412 04:12:22.629875 4704 net.cpp:137] Memory required for data: 67249840
- I0412 04:12:22.629875 4704 layer_factory.cpp:58] Creating layer pool5
- I0412 04:12:22.629875 4704 net.cpp:84] Creating Layer pool5
- I0412 04:12:22.629875 4704 net.cpp:406] pool5 <- conv5
- I0412 04:12:22.629875 4704 net.cpp:380] pool5 -> pool5
- I0412 04:12:22.629875 4704 net.cpp:122] Setting up pool5
- I0412 04:12:22.629875 4704 net.cpp:129] Top shape: 10 256 6 6 (92160)
- I0412 04:12:22.629875 4704 net.cpp:137] Memory required for data: 67618480
- I0412 04:12:22.629875 4704 layer_factory.cpp:58] Creating layer pool5_flatten
- I0412 04:12:22.629875 4704 net.cpp:84] Creating Layer pool5_flatten
- I0412 04:12:22.629875 4704 net.cpp:406] pool5_flatten <- pool5
- I0412 04:12:22.629875 4704 net.cpp:380] pool5_flatten -> pool5_flatten
- I0412 04:12:22.629875 4704 net.cpp:122] Setting up pool5_flatten
- I0412 04:12:22.629875 4704 net.cpp:129] Top shape: 10 9216 (92160)
- I0412 04:12:22.629875 4704 net.cpp:137] Memory required for data: 67987120
- I0412 04:12:22.629875 4704 layer_factory.cpp:58] Creating layer fc6
- I0412 04:12:22.629875 4704 net.cpp:84] Creating Layer fc6
- I0412 04:12:22.629875 4704 net.cpp:406] fc6 <- pool5_flatten
- I0412 04:12:22.629875 4704 net.cpp:380] fc6 -> fc6
- I0412 04:12:22.677767 4704 net.cpp:122] Setting up fc6
- I0412 04:12:22.677767 4704 net.cpp:129] Top shape: 10 4096 (40960)
- I0412 04:12:22.677767 4704 net.cpp:137] Memory required for data: 68150960
- I0412 04:12:22.677767 4704 layer_factory.cpp:58] Creating layer relu6
- I0412 04:12:22.677767 4704 net.cpp:84] Creating Layer relu6
- I0412 04:12:22.677767 4704 net.cpp:406] relu6 <- fc6
- I0412 04:12:22.677767 4704 net.cpp:367] relu6 -> fc6 (in-place)
- I0412 04:12:22.677767 4704 net.cpp:122] Setting up relu6
- I0412 04:12:22.677767 4704 net.cpp:129] Top shape: 10 4096 (40960)
- I0412 04:12:22.677767 4704 net.cpp:137] Memory required for data: 68314800
- I0412 04:12:22.677767 4704 layer_factory.cpp:58] Creating layer drop6
- I0412 04:12:22.677767 4704 net.cpp:84] Creating Layer drop6
- I0412 04:12:22.677767 4704 net.cpp:406] drop6 <- fc6
- I0412 04:12:22.677767 4704 net.cpp:367] drop6 -> fc6 (in-place)
- I0412 04:12:22.677767 4704 net.cpp:122] Setting up drop6
- I0412 04:12:22.677767 4704 net.cpp:129] Top shape: 10 4096 (40960)
- I0412 04:12:22.677767 4704 net.cpp:137] Memory required for data: 68478640
- I0412 04:12:22.677767 4704 layer_factory.cpp:58] Creating layer fc7
- I0412 04:12:22.677767 4704 net.cpp:84] Creating Layer fc7
- I0412 04:12:22.677767 4704 net.cpp:406] fc7 <- fc6
- I0412 04:12:22.677767 4704 net.cpp:380] fc7 -> fc7
- I0412 04:12:22.692293 4704 net.cpp:122] Setting up fc7
- I0412 04:12:22.692293 4704 net.cpp:129] Top shape: 10 4096 (40960)
- I0412 04:12:22.692293 4704 net.cpp:137] Memory required for data: 68642480
- I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer relu7
- I0412 04:12:22.692293 4704 net.cpp:84] Creating Layer relu7
- I0412 04:12:22.692293 4704 net.cpp:406] relu7 <- fc7
- I0412 04:12:22.692293 4704 net.cpp:367] relu7 -> fc7 (in-place)
- I0412 04:12:22.692293 4704 net.cpp:122] Setting up relu7
- I0412 04:12:22.692293 4704 net.cpp:129] Top shape: 10 4096 (40960)
- I0412 04:12:22.692293 4704 net.cpp:137] Memory required for data: 68806320
- I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer drop7
- I0412 04:12:22.692293 4704 net.cpp:84] Creating Layer drop7
- I0412 04:12:22.692293 4704 net.cpp:406] drop7 <- fc7
- I0412 04:12:22.692293 4704 net.cpp:367] drop7 -> fc7 (in-place)
- I0412 04:12:22.692293 4704 net.cpp:122] Setting up drop7
- I0412 04:12:22.692293 4704 net.cpp:129] Top shape: 10 4096 (40960)
- I0412 04:12:22.692293 4704 net.cpp:137] Memory required for data: 68970160
- I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer fc8_mitindoor67
- I0412 04:12:22.692293 4704 net.cpp:84] Creating Layer fc8_mitindoor67
- I0412 04:12:22.692293 4704 net.cpp:406] fc8_mitindoor67 <- fc7
- I0412 04:12:22.692293 4704 net.cpp:380] fc8_mitindoor67 -> fc8_mitindoor67
- I0412 04:12:22.692293 4704 net.cpp:122] Setting up fc8_mitindoor67
- I0412 04:12:22.692293 4704 net.cpp:129] Top shape: 10 67 (670)
- I0412 04:12:22.692293 4704 net.cpp:137] Memory required for data: 68972840
- I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer
- fc8_mitindoor67_fc8_mitindoor67_0_split
- I0412 04:12:22.692293 4704 net.cpp:84] Creating Layer fc8_mitindoor67_fc8_mitindoor67_0_split
- I0412 04:12:22.692293 4704 net.cpp:406] fc8_mitindoor67_fc8_mitindoor67_0_split <-
- fc8_mitindoor67
- I0412 04:12:22.692293 4704 net.cpp:380] fc8_mitindoor67_fc8_mitindoor67_0_split ->
- fc8_mitindoor67_fc8_mitindoor67_0_split_0
- I0412 04:12:22.692293 4704 net.cpp:380] fc8_mitindoor67_fc8_mitindoor67_0_split ->
- fc8_mitindoor67_fc8_mitindoor67_0_split_1
- I0412 04:12:22.692293 4704 net.cpp:122] Setting up fc8_mitindoor67_fc8_mitindoor67_0_split
- I0412 04:12:22.692293 4704 net.cpp:129] Top shape: 10 67 (670)
- I0412 04:12:22.692293 4704 net.cpp:129] Top shape: 10 67 (670)
- I0412 04:12:22.692293 4704 net.cpp:137] Memory required for data: 68978200
- I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer loss
- I0412 04:12:22.692293 4704 net.cpp:84] Creating Layer loss
- I0412 04:12:22.692293 4704 net.cpp:406] loss <- fc8_mitindoor67_fc8_mitindoor67_0_split_0
- I0412 04:12:22.692293 4704 net.cpp:406] loss <- label_MITindoor67_1_split_0
- I0412 04:12:22.692293 4704 net.cpp:380] loss -> loss
- I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer loss
- I0412 04:12:22.692293 4704 net.cpp:122] Setting up loss
- I0412 04:12:22.692293 4704 net.cpp:129] Top shape: (1)
- I0412 04:12:22.692293 4704 net.cpp:132] with loss weight 1
- I0412 04:12:22.692293 4704 net.cpp:137] Memory required for data: 68978204
- I0412 04:12:22.692293 4704 layer_factory.cpp:58] Creating layer accuracy
- I0412 04:12:22.692293 4704 net.cpp:84] Creating Layer accuracy
- I0412 04:12:22.692293 4704 net.cpp:406] accuracy <- fc8_mitindoor67_fc8_mitindoor67_0_split_1
- I0412 04:12:22.692293 4704 net.cpp:406] accuracy <- label_MITindoor67_1_split_1
- I0412 04:12:22.692293 4704 net.cpp:380] accuracy -> accuracy
- I0412 04:12:22.692293 4704 net.cpp:122] Setting up accuracy
- I0412 04:12:22.707957 4704 net.cpp:129] Top shape: (1)
- I0412 04:12:22.707957 4704 net.cpp:137] Memory required for data: 68978208
- I0412 04:12:22.707957 4704 net.cpp:200] accuracy does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:198] loss needs backward computation.
- I0412 04:12:22.707957 4704 net.cpp:198] fc8_mitindoor67_fc8_mitindoor67_0_split needs backward
- computation.
- I0412 04:12:22.707957 4704 net.cpp:198] fc8_mitindoor67 needs backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] drop7 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] relu7 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] fc7 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] drop6 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] relu6 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] fc6 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] pool5_flatten does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] pool5 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] relu5 d
- oes not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] conv5 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] relu4 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] conv4 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] relu3 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] conv3 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] norm2 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] pool2 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] relu2 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] conv2 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] norm1 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] pool1 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] relu1 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] conv1 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:200] label_MITindoor67_1_split does not need backward
- computation.
- I0412 04:12:22.707957 4704 net.cpp:200] MITindoor67 does not need backward computation.
- I0412 04:12:22.707957 4704 net.cpp:242] This network produces output accuracy
- I0412 04:12:22.707957 4704 net.cpp:242] This network produces output loss
- I0412 04:12:22.707957 4704 net.cpp:255] Network initialization done.
- I0412 04:12:22.707957 4704 solver.cpp:56] Solver scaffolding done.
- I0412 04:12:22.707957 4704 caffe.cpp:155] Finetuning from
- C:/Users/mahtab/Desktop/finetune_mitindoor67/places205CNN_iter_300000_upgraded.caffemodel
- I0412 04:12:22.895382 4704 upgrade_proto.cpp:53] Attempting to upgrade input file specified
- using deprecated V1LayerParameter:
- C:/Users/mahtab/Desktop/finetune_mitindoor67/places205CNN_iter_300000_upgraded.caffemodel
- I0412 04:12:23.145401 4704 upgrade_proto.cpp:61] Successfully upgraded file specified using
- deprecated V1LayerParameter
- I0412 04:12:23.161070 4704 net.cpp:744] Ignoring source layer data
- I0412 04:12:23.192430 4704 net.cpp:744] Ignoring source layer fc8
- I0412 04:12:23.427315 4704 upgrade_proto.cpp:53] Attempting to upgrade input file specified
- using deprecated V1LayerParameter:
- C:/Users/mahtab/Desktop/finetune_mitindoor67/places205CNN_iter_300000_upgraded.caffemodel
- I0412 04:12:23.630440 4704 upgrade_proto.cpp:61] Successfully upgraded file specified using
- deprecated V1LayerParameter
- I0412 04:12:23.646035 4704 net.cpp:744] Ignoring source layer data
- I0412 04:12:23.677067 4704 net.cpp:744] Ignoring source layer fc8
- I0412 04:12:23.708406 4704 caffe.cpp:249] Starting Optimization
- I0412 04:12:23.708406 4704 solver.cpp:272] Solving SceneRecognitionCaffeNet
- I0412 04:12:23.708406 4704 solver.cpp:273] Learning Rate Policy: step
- I0412 04:12:23.770778 4704 solver.cpp:330] Iteration 0, Testing net (#0)
- I0412 04:13:47.208680 4232 data_layer.cpp:73] Restarting data prefetching from start.
- I0412 04:13:49.770946 4704 solver.cpp:397] Test net output #0: accuracy = 0
- I0412 04:13:49.770946 4704 solver.cpp:397] Test net output #1: loss = 4.20469 (* 1 = 4.20469
- loss)
- I0412 04:13:51.082967 4704 solver.cpp:218] Iteration 0 (0 iter/s, 87.374s/50 iters), loss =4.20469
- I0412 04:13:51.082967 4704 solver.cpp:237] Train net output #0: loss = 4.20469 (* 1 =4.20469 loss)
- I0412 04:13:51.082967 4704 sgd_solver.cpp:105] Iteration 0, lr = 1e-07
- I0412 04:14:58.489819 4704 solver.cpp:218] Iteration 50 (0.741774 iter/s, 67.406s/50 iters),loss = 4.04563
- I0412 04:14:58.489819 4704 solver.cpp:237] Train net output #0: loss = 4.04563 (* 1 =4.04563 loss)
- I0412 04:14:58.489819 4704 sgd_solver.cpp:105] Iteration 50, lr = 1e-07
- I0412 04:16:05.783648 4704 solver.cpp:218] Iteration 100 (0.743019 iter/s, 67.293s/50 iters),loss = 3.89188
- I0412 04:16:05.783648 4704 solver.cpp:237] Train net output #0: loss = 3.89188 (* 1 =3.89188 loss)
- I0412 04:16:05.783648 4704 sgd_solver.cpp:105] Iteration 100, lr = 1e-07
- I0412 04:17:13.099241 4704 solver.cpp:218] Iteration 150 (0.742776 iter/s, 67.315s/50 iters),loss = 3.69574
- I0412 04:17:13.099241 4704 solver.cpp:237] Train net output #0: loss = 3.69574 (* 1 =3.69574 loss)
- I0412 04:17:13.099241 4704 sgd_solver.cpp:105] Iteration 150, lr = 1e-07
- I0412 04:18:20.946854 4704 solver.cpp:218] Iteration 200 (0.736952 iter/s, 67.847s/50 iters),loss = 3.53715
- I0412 04:18:20.946854 4704 solver.cpp:237] Train net output #0: loss = 3.53715 (* 1 =3.53715 loss)
- I0412 04:18:20.946854 4704 sgd_solver.cpp:105] Iteration 200, lr = 1e-07
- I0412 04:19:28.125959 4704 solver.cpp:218] Iteration 250 (0.74428 iter/s, 67.179s/50 iters),loss = 3.3488
- I0412 04:19:28.125959 4704 solver.cpp:237] Train net output #0: loss = 3.3488 (* 1 = 3.3488loss)
- I0412 04:19:28.125959 4704 sgd_solver.cpp:105] Iteration 250, lr = 1e-07
- I0412 04:19:51.017532 4704 solver.cpp:330] Iteration 268, Testing net (#0)
- I0412 04:21:16.677172 4704 solver.cpp:397] Test net output #0: accuracy = 1
- I0412 04:21:16.677172 4704 solver.cpp:397] Test net output #1: loss = 3.3788 (* 1 = 3.3788loss)
- I0412 04:22:01.020563 4704 solver.cpp:218] Iteration 300 (0.327024 iter/s, 152.894s/50 iters),loss = 3.14588
- I0412 04:22:01.020563 4704 solver.cpp:237] Train net output #0: loss = 3.14588 (* 1 =3.14588 loss)
- I0412 04:22:01.020563 4704 sgd_solver.cpp:105] Iteration 300, lr = 1e-07
- I0412 04:23:08.178277 4704 solver.cpp:218] Iteration 350 (0.744524 iter/s, 67.157s/50 iters),loss = 3.00576
- I0412 04:23:08.178277 4704 solver.cpp:237] Train net output #0: loss = 3.00576 (* 1 =3.00576 loss)
- I0412 04:23:08.178277 4704 sgd_solver.cpp:105] Iteration 350, lr = 1e-07
- I0412 04:24:15.349581 4704 solver.cpp:218] Iteration 400 (0.744369 iter/s, 67.171s/50 iters),loss = 2.83526
- I0412 04:24:15.349581 4704 solver.cpp:237] Train net output #0: loss = 2.83526 (* 1 =2.83526 loss)
- I0412 04:24:15.349581 4704 sgd_solver.cpp:105] Iteration 400, lr = 1e-07
- I0412 04:25:22.693616 4704 solver.cpp:218] Iteration 450 (0.742457 iter/s, 67.344s/50 iters),loss = 2.59777
- I0412 04:25:22.693616 4704 solver.cpp:237] Train net output #0: loss = 2.59777 (* 1 =2.59777 loss)
- I0412 04:25:22.693616 4704 sgd_solver.cpp:105] Iteration 450, lr = 1e-07
- I0412 04:26:29.989542 4704 solver.cpp:218] Iteration 500 (0.742997 iter/s, 67.295s/50 iters),loss = 2.58771
- I0412 04:26:29.989542 4704 solver.cpp:237] Train net output #0: loss = 2.58771 (* 1 =2.58771 loss)
- I0412 04:26:29.989542 4704 sgd_solver.cpp:105] Iteration 500, lr = 1e-07
- I0412 04:27:17.091591 4704 solver.cpp:330] Iteration 536, Testing net (#0)
- I0412 04:28:42.646239 4704 solver.cpp:397] Test net output #0: accuracy = 1
- I0412 04:28:42.646239 4704 solver.cpp:397] Test net output #1: loss = 2.58355 (* 1 = 2.58355loss)
- I0412 04:29:02.723795 4704 solver.cpp:218] Iteration 550 (0.327367 iter/s, 152.734s/50 iters),loss = 2.41916
- I0412 04:29:02.723795 4704 solver.cpp:237] Train net output #0: loss = 2.41916 (* 1 =2.41916 loss)
- I0412 04:29:02.723795 4704 sgd_solver.cpp:105] Iteration 550, lr = 1e-07
- I0412 04:30:10.005543 4704 solver.cpp:218] Iteration 600 (0.743152 iter/s, 67.281s/50 iters),loss = 2.17474
- I0412 04:30:10.005543 4704 solver.cpp:237] Train net output #0: loss = 2.17474 (* 1 =2.17474 loss)
- I0412 04:30:10.005543 4704 sgd_solver.cpp:105] Iteration 600, lr = 1e-07
- I0412 04:31:17.192559 4704 solver.cpp:218] Iteration 650 (0.744192 iter/s, 67.187s/50 iters),loss = 2.06964
- I0412 04:31:17.192559 4704 solver.cpp:237] Train net output #0: loss = 2.06964 (* 1 =2.06964 loss)
- I0412 04:31:17.192559 4704 sgd_solver.cpp:105] Iteration 650, lr = 1e-07
- I0412 04:32:24.360908 4704 solver.cpp:218] Iteration 700 (0.744402 iter/s, 67.168s/50 iters),loss = 1.89071
- I0412 04:32:24.360908 4704 solver.cpp:237] Train net output #0: loss = 1.89071 (* 1 =1.89071 loss)
- I0412 04:32:24.360908 4704 sgd_solver.cpp:105] Iteration 700, lr = 1e-07
- I0412 04:33:31.505738 4704 solver.cpp:218] Iteration 750 (0.744668 iter/s, 67.144s/50 iters),loss = 1.87087
- I0412 04:33:31.505738 4704 solver.cpp:237] Train net output #0: loss = 1.87087 (* 1 =1.87087 loss)
- I0412 04:33:31.505738 4704 sgd_solver.cpp:105] Iteration 750, lr = 1e-07
- I0412 04:34:38.818370 4704 solver.cpp:218] Iteration 800 (0.74281 iter/s, 67.312s/50 iters),loss = 1.69662
- I0412 04:34:38.818370 4704 solver.cpp:237] Train net output #0: loss = 1.69662 (* 1 =1.69662 loss)
- I0412 04:34:38.818370 4704 sgd_solver.cpp:105] Iteration 800, lr = 1e-07
- I0412 04:34:42.958595 4704 solver.cpp:330] Iteration 804, Testing net (#0)
- I0412 04:36:08.349354 4704 solver.cpp:397] Test net output #0: accuracy = 1
- I0412 04:36:08.365051 4704 solver.cpp:397] Test net output #1: loss = 1.90856 (* 1 = 1.90856loss)
- I0412 04:37:11.380419 4704 solver.cpp:218] Iteration 850 (0.327736 iter/s, 152.562s/50 iters),loss = 1.68572
- I0412 04:37:11.380419 4704 solver.cpp:237] Train net output #0: loss = 1.68572 (* 1 =1.68572 loss)
- I0412 04:37:11.380419 4704 sgd_solver.cpp:105] Iteration 850, lr = 1e-07
- I0412 04:38:18.349567 4704 solver.cpp:218] Iteration 900 (0.746614 iter/s, 66.969s/50 iters),loss = 1.5322
- I0412 04:38:18.349567 4704 solver.cpp:237] Train net output #0: loss = 1.5322 (* 1 = 1.5322loss)
- I0412 04:38:18.349567 4704 sgd_solver.cpp:105] Iteration 900, lr = 1e-07
- I0412 04:39:25.224520 4704 solver.cpp:218] Iteration 950 (0.747675 iter/s, 66.874s/50 iters),loss = 1.35434
- I0412 04:39:25.224520 4704 solver.cpp:237] Train net output #0: loss = 1.35434 (* 1 =1.35434 loss)
- I0412 04:39:25.224520 4704 sgd_solver.cpp:105] Iteration 950, lr = 1e-07
- I0412 04:40:32.130380 4704 solver.cpp:218] Iteration 1000 (0.747328 iter/s, 66.905s/50 iters),loss = 1.30102
- I0412 04:40:32.130380 4704 solver.cpp:237] Train net output #0: loss = 1.30102 (* 1 =1.30102 loss)
- I0412 04:40:32.130380 4704 sgd_solver.cpp:105] Iteration 1000, lr = 1e-07
- I0412 04:41:40.880671 4704 solver.cpp:218] Iteration 1050 (0.727273 iter/s, 68.75s/50 iters),loss = 1.24691
- I0412 04:41:40.880671 4704 solver.cpp:237] Train net output #0: loss = 1.24691 (* 1 =1.24691 loss)
- I0412 04:41:40.880671 4704 sgd_solver.cpp:105] Iteration 1050, lr = 1e-07
- I0412 04:42:09.927634 4704 solver.cpp:330] Iteration 1072, Testing net (#0)
- I0412 04:43:38.349730 4704 solver.cpp:397] Test net output #0: accuracy = 1
- I0412 04:43:38.349730 4704 solver.cpp:397] Test net output #1: loss = 1.40134 (* 1 = 1.40134loss)
- I0412 04:44:18.818953 4704 solver.cpp:218] Iteration 1100 (0.31658 iter/s, 157.938s/50 iters),loss = 1.18656
- I0412 04:44:18.818953 4704 solver.cpp:237] Train net output #0: loss = 1.18656 (* 1 =1.18656 loss)
- I0412 04:44:18.818953 4704 sgd_solver.cpp:105] Iteration 1100, lr = 1e-07
- I0412 04:45:29.631006 4704 solver.cpp:218] Iteration 1150 (0.706095 iter/s, 70.812s/50 iters),loss = 1.06573
- I0412 04:45:29.631006 4704 solver.cpp:237] Train net output #0: loss = 1.06573 (* 1 =1.06573 loss)
- I0412 04:45:29.631006 4704 sgd_solver.cpp:105] Iteration 1150, lr = 1e-07
- I0412 04:46:36.709095 4704 solver.cpp:218] Iteration 1200 (0.745401 iter/s, 67.078s/50 iters),loss = 1.0079
- I0412 04:46:36.709095 4704 solver.cpp:237] Train net output #0: loss = 1.0079 (* 1 = 1.0079loss)
- I0412 04:46:36.709095 4704 sgd_solver.cpp:105] Iteration 1200, lr = 1e-07
- I0412 04:47:43.599740 4704 solver.cpp:218] Iteration 1250 (0.747496 iter/s, 66.89s/50 iters),loss = 0.896948
- I0412 04:47:43.599740 4704 solver.cpp:237] Train net output #0: loss = 0.896948 (* 1 =0.896948 loss)
- I0412 04:47:43.599740 4704 sgd_solver.cpp:105] Iteration 1250, lr = 1e-07
- I0412 04:48:50.458986 4704 solver.cpp:218] Iteration 1300 (0.747842 iter/s, 66.859s/50 iters),loss = 0.760583
- I0412 04:48:50.458986 4704 solver.cpp:237] Train net output #0: loss = 0.760583 (* 1 =0.760583 loss)
- I0412 04:48:50.458986 4704 sgd_solver.cpp:105] Iteration 1300, lr = 1e-07
- I0412 04:49:42.693409 4704 solver.cpp:330] Iteration 1340, Testing net (#0)
- I0412 04:51:07.927647 4704 solver.cpp:397] Test net output #0: accuracy = 1
- I0412 04:51:07.927647 4704 solver.cpp:397] Test net output #1: loss = 1.05483 (* 1 = 1.05483loss)
- I0412 04:51:22.568167 4704 solver.cpp:218] Iteration 1350 (0.328746 iter/s, 152.093s/50 iters),loss = 0.775195
- I0412 04:51:22.568167 4704 solver.cpp:237] Train net output #0: loss = 0.775195 (* 1 =0.775195 loss)
- I0412 04:51:22.568167 4704 sgd_solver.cpp:105] Iteration 1350, lr = 1e-07
- I0412 04:52:29.552525 4704 solver.cpp:218] Iteration 1400 (0.746447 iter/s, 66.984s/50 iters),loss = 0.718551
- I0412 04:52:29.552525 4704 solver.cpp:237] Train net output #0: loss = 0.718551 (* 1 =0.718551 loss)
- I0412 04:52:29.552525 4704 sgd_solver.cpp:105] Iteration 1400, lr = 1e-07
- I0412 04:53:37.411788 4704 solver.cpp:218] Iteration 1450 (0.736822 iter/s, 67.859s/50 iters),loss = 0.691536
- I0412 04:53:37.411788 4704 solver.cpp:237] Train net output #0: loss = 0.691536 (* 1 =0.691536 loss)
- I0412 04:53:37.411788 4704 sgd_solver.cpp:105] Iteration 1450, lr = 1e-07
- I0412 04:54:44.599256 4704 solver.cpp:218] Iteration 1500 (0.744192 iter/s, 67.187s/50 iters),loss = 0.834102
- I0412 04:54:44.599256 4704 solver.cpp:237] Train net output #0: loss = 0.834102 (* 1 =0.834102 loss)
- I0412 04:54:44.599256 4704 sgd_solver.cpp:105] Iteration 1500, lr = 1e-07
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement