Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- name: "WRN_50_2"
- layer {
- name: "Data1"
- type: "Data"
- top: "Data1"
- top: "Data2"
- include {
- phase: TRAIN
- }
- transform_param {
- mirror: true
- crop_size: 151
- mean_value: 104
- mean_value: 117
- mean_value: 123
- }
- data_param {
- source: "/PATH/TO/train_imagenet_156_lmdb"
- batch_size: 64
- backend: LMDB
- }
- }
- layer {
- name: "Data1"
- type: "Data"
- top: "Data1"
- top: "Data2"
- include {
- phase: TEST
- }
- transform_param {
- mirror: true
- crop_size: 151
- mean_value: 104
- mean_value: 117
- mean_value: 123
- }
- data_param {
- source: "/PATH/TO/val_imagenet_156_lmdb"
- batch_size: 64
- backend: LMDB
- }
- }
- layer {
- name: "Convolution1"
- type: "Convolution"
- bottom: "Data1"
- top: "Convolution1"
- convolution_param {
- num_output: 64
- pad: 3
- kernel_size: 7
- stride: 2
- bias_term: false
- }
- }
- layer {
- name: "BatchNorm1"
- type: "BatchNorm"
- bottom: "Convolution1"
- top: "Convolution1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale1"
- type: "Scale"
- bottom: "Convolution1"
- top: "Convolution1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU1"
- type: "ReLU"
- bottom: "Convolution1"
- top: "Convolution1"
- }
- layer {
- name: "Pooling1"
- type: "Pooling"
- bottom: "Convolution1"
- top: "Convolution1"
- pooling_param {
- kernel_size: 3
- stride: 2
- pad: 1
- pool: MAX
- }
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution2_1_1"
- type: "Convolution"
- bottom: "Convolution1"
- top: "Convolution2_1_1"
- convolution_param {
- num_output: 128
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm2_1_1"
- type: "BatchNorm"
- bottom: "Convolution2_1_1"
- top: "Convolution2_1_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale2_1_1"
- type: "Scale"
- bottom: "Convolution2_1_1"
- top: "Convolution2_1_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU2_1_1"
- type: "ReLU"
- bottom: "Convolution2_1_1"
- top: "Convolution2_1_1"
- }
- layer {
- name: "Convolution2_1_2"
- type: "Convolution"
- bottom: "Convolution2_1_1"
- top: "Convolution2_1_2"
- convolution_param {
- num_output: 128
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 1
- }
- }
- layer {
- name: "BatchNorm2_1_2"
- type: "BatchNorm"
- bottom: "Convolution2_1_2"
- top: "Convolution2_1_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale2_1_2"
- type: "Scale"
- bottom: "Convolution2_1_2"
- top: "Convolution2_1_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU2_1_2"
- type: "ReLU"
- bottom: "Convolution2_1_2"
- top: "Convolution2_1_2"
- }
- layer {
- name: "Convolution2_1_3"
- type: "Convolution"
- bottom: "Convolution2_1_2"
- top: "Convolution2_1_3"
- convolution_param {
- num_output: 256
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm2_1_3"
- type: "BatchNorm"
- bottom: "Convolution2_1_3"
- top: "Convolution2_1_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale2_1_3"
- type: "Scale"
- bottom: "Convolution2_1_3"
- top: "Convolution2_1_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Convolution2_1"
- type: "Convolution"
- bottom: "Convolution1"
- top: "Convolution2_1"
- convolution_param {
- num_output: 256
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm2_1"
- type: "BatchNorm"
- bottom: "Convolution2_1"
- top: "Convolution2_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale2_1"
- type: "Scale"
- bottom: "Convolution2_1"
- top: "Convolution2_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum2_1"
- type: "Eltwise"
- bottom: "Convolution2_1"
- bottom: "Convolution2_1_3"
- top: "Sum2_1"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU2_1"
- type: "ReLU"
- bottom: "Sum2_1"
- top: "Sum2_1"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution2_2_1"
- type: "Convolution"
- bottom: "Sum2_1"
- top: "Convolution2_2_1"
- convolution_param {
- num_output: 128
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm2_2_1"
- type: "BatchNorm"
- bottom: "Convolution2_2_1"
- top: "Convolution2_2_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale2_2_1"
- type: "Scale"
- bottom: "Convolution2_2_1"
- top: "Convolution2_2_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU2_2_1"
- type: "ReLU"
- bottom: "Convolution2_2_1"
- top: "Convolution2_2_1"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution2_2_2"
- type: "Convolution"
- bottom: "Convolution2_2_1"
- top: "Convolution2_2_2"
- convolution_param {
- num_output: 128
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 1
- }
- }
- layer {
- name: "BatchNorm2_2_2"
- type: "BatchNorm"
- bottom: "Convolution2_2_2"
- top: "Convolution2_2_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale2_2_2"
- type: "Scale"
- bottom: "Convolution2_2_2"
- top: "Convolution2_2_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU2_2_2"
- type: "ReLU"
- bottom: "Convolution2_2_2"
- top: "Convolution2_2_2"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution2_2_3"
- type: "Convolution"
- bottom: "Convolution2_2_2"
- top: "Convolution2_2_3"
- convolution_param {
- num_output: 256
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm2_2_3"
- type: "BatchNorm"
- bottom: "Convolution2_2_3"
- top: "Convolution2_2_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale2_2_3"
- type: "Scale"
- bottom: "Convolution2_2_3"
- top: "Convolution2_2_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum2_2"
- type: "Eltwise"
- bottom: "Sum2_1"
- bottom: "Convolution2_2_3"
- top: "Sum2_2"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU2_2"
- type: "ReLU"
- bottom: "Sum2_2"
- top: "Sum2_2"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution2_3_1"
- type: "Convolution"
- bottom: "Sum2_2"
- top: "Convolution2_3_1"
- convolution_param {
- num_output: 128
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm2_3_1"
- type: "BatchNorm"
- bottom: "Convolution2_3_1"
- top: "Convolution2_3_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale2_3_1"
- type: "Scale"
- bottom: "Convolution2_3_1"
- top: "Convolution2_3_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU2_3_1"
- type: "ReLU"
- bottom: "Convolution2_3_1"
- top: "Convolution2_3_1"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution2_3_2"
- type: "Convolution"
- bottom: "Convolution2_3_1"
- top: "Convolution2_3_2"
- convolution_param {
- num_output: 128
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 1
- }
- }
- layer {
- name: "BatchNorm2_3_2"
- type: "BatchNorm"
- bottom: "Convolution2_3_2"
- top: "Convolution2_3_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale2_3_2"
- type: "Scale"
- bottom: "Convolution2_3_2"
- top: "Convolution2_3_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU2_3_2"
- type: "ReLU"
- bottom: "Convolution2_3_2"
- top: "Convolution2_3_2"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution2_3_3"
- type: "Convolution"
- bottom: "Convolution2_3_2"
- top: "Convolution2_3_3"
- convolution_param {
- num_output: 256
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm2_3_3"
- type: "BatchNorm"
- bottom: "Convolution2_3_3"
- top: "Convolution2_3_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale2_3_3"
- type: "Scale"
- bottom: "Convolution2_3_3"
- top: "Convolution2_3_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum2_3"
- type: "Eltwise"
- bottom: "Sum2_2"
- bottom: "Convolution2_3_3"
- top: "Sum2_3"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU2_3"
- type: "ReLU"
- bottom: "Sum2_3"
- top: "Sum2_3"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution3_1_1"
- type: "Convolution"
- bottom: "Sum2_3"
- top: "Convolution3_1_1"
- convolution_param {
- num_output: 256
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm3_1_1"
- type: "BatchNorm"
- bottom: "Convolution3_1_1"
- top: "Convolution3_1_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale3_1_1"
- type: "Scale"
- bottom: "Convolution3_1_1"
- top: "Convolution3_1_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU3_1_1"
- type: "ReLU"
- bottom: "Convolution3_1_1"
- top: "Convolution3_1_1"
- }
- layer {
- name: "Convolution3_1_2"
- type: "Convolution"
- bottom: "Convolution3_1_1"
- top: "Convolution3_1_2"
- convolution_param {
- num_output: 256
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 2
- }
- }
- layer {
- name: "BatchNorm3_1_2"
- type: "BatchNorm"
- bottom: "Convolution3_1_2"
- top: "Convolution3_1_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale3_1_2"
- type: "Scale"
- bottom: "Convolution3_1_2"
- top: "Convolution3_1_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU3_1_2"
- type: "ReLU"
- bottom: "Convolution3_1_2"
- top: "Convolution3_1_2"
- }
- layer {
- name: "Convolution3_1_3"
- type: "Convolution"
- bottom: "Convolution3_1_2"
- top: "Convolution3_1_3"
- convolution_param {
- num_output: 512
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm3_1_3"
- type: "BatchNorm"
- bottom: "Convolution3_1_3"
- top: "Convolution3_1_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale3_1_3"
- type: "Scale"
- bottom: "Convolution3_1_3"
- top: "Convolution3_1_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Convolution3_1"
- type: "Convolution"
- bottom: "Sum2_3"
- top: "Convolution3_1"
- convolution_param {
- num_output: 512
- bias_term: false
- kernel_size: 1
- stride: 2
- }
- }
- layer {
- name: "BatchNorm3_1"
- type: "BatchNorm"
- bottom: "Convolution3_1"
- top: "Convolution3_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale3_1"
- type: "Scale"
- bottom: "Convolution3_1"
- top: "Convolution3_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum3_1"
- type: "Eltwise"
- bottom: "Convolution3_1"
- bottom: "Convolution3_1_3"
- top: "Sum3_1"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU3_1"
- type: "ReLU"
- bottom: "Sum3_1"
- top: "Sum3_1"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution3_2_1"
- type: "Convolution"
- bottom: "Sum3_1"
- top: "Convolution3_2_1"
- convolution_param {
- num_output: 256
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm3_2_1"
- type: "BatchNorm"
- bottom: "Convolution3_2_1"
- top: "Convolution3_2_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale3_2_1"
- type: "Scale"
- bottom: "Convolution3_2_1"
- top: "Convolution3_2_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU3_2_1"
- type: "ReLU"
- bottom: "Convolution3_2_1"
- top: "Convolution3_2_1"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution3_2_2"
- type: "Convolution"
- bottom: "Convolution3_2_1"
- top: "Convolution3_2_2"
- convolution_param {
- num_output: 256
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 1
- }
- }
- layer {
- name: "BatchNorm3_2_2"
- type: "BatchNorm"
- bottom: "Convolution3_2_2"
- top: "Convolution3_2_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale3_2_2"
- type: "Scale"
- bottom: "Convolution3_2_2"
- top: "Convolution3_2_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU3_2_2"
- type: "ReLU"
- bottom: "Convolution3_2_2"
- top: "Convolution3_2_2"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution3_2_3"
- type: "Convolution"
- bottom: "Convolution3_2_2"
- top: "Convolution3_2_3"
- convolution_param {
- num_output: 512
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm3_2_3"
- type: "BatchNorm"
- bottom: "Convolution3_2_3"
- top: "Convolution3_2_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale3_2_3"
- type: "Scale"
- bottom: "Convolution3_2_3"
- top: "Convolution3_2_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum3_2"
- type: "Eltwise"
- bottom: "Sum3_1"
- bottom: "Convolution3_2_3"
- top: "Sum3_2"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU3_2"
- type: "ReLU"
- bottom: "Sum3_2"
- top: "Sum3_2"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution3_3_1"
- type: "Convolution"
- bottom: "Sum3_2"
- top: "Convolution3_3_1"
- convolution_param {
- num_output: 256
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm3_3_1"
- type: "BatchNorm"
- bottom: "Convolution3_3_1"
- top: "Convolution3_3_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale3_3_1"
- type: "Scale"
- bottom: "Convolution3_3_1"
- top: "Convolution3_3_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU3_3_1"
- type: "ReLU"
- bottom: "Convolution3_3_1"
- top: "Convolution3_3_1"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution3_3_2"
- type: "Convolution"
- bottom: "Convolution3_3_1"
- top: "Convolution3_3_2"
- convolution_param {
- num_output: 256
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 1
- }
- }
- layer {
- name: "BatchNorm3_3_2"
- type: "BatchNorm"
- bottom: "Convolution3_3_2"
- top: "Convolution3_3_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale3_3_2"
- type: "Scale"
- bottom: "Convolution3_3_2"
- top: "Convolution3_3_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU3_3_2"
- type: "ReLU"
- bottom: "Convolution3_3_2"
- top: "Convolution3_3_2"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution3_3_3"
- type: "Convolution"
- bottom: "Convolution3_3_2"
- top: "Convolution3_3_3"
- convolution_param {
- num_output: 512
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm3_3_3"
- type: "BatchNorm"
- bottom: "Convolution3_3_3"
- top: "Convolution3_3_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale3_3_3"
- type: "Scale"
- bottom: "Convolution3_3_3"
- top: "Convolution3_3_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum3_3"
- type: "Eltwise"
- bottom: "Sum3_2"
- bottom: "Convolution3_3_3"
- top: "Sum3_3"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU3_3"
- type: "ReLU"
- bottom: "Sum3_3"
- top: "Sum3_3"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution3_4_1"
- type: "Convolution"
- bottom: "Sum3_3"
- top: "Convolution3_4_1"
- convolution_param {
- num_output: 256
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm3_4_1"
- type: "BatchNorm"
- bottom: "Convolution3_4_1"
- top: "Convolution3_4_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale3_4_1"
- type: "Scale"
- bottom: "Convolution3_4_1"
- top: "Convolution3_4_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU3_4_1"
- type: "ReLU"
- bottom: "Convolution3_4_1"
- top: "Convolution3_4_1"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution3_4_2"
- type: "Convolution"
- bottom: "Convolution3_4_1"
- top: "Convolution3_4_2"
- convolution_param {
- num_output: 256
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 1
- }
- }
- layer {
- name: "BatchNorm3_4_2"
- type: "BatchNorm"
- bottom: "Convolution3_4_2"
- top: "Convolution3_4_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale3_4_2"
- type: "Scale"
- bottom: "Convolution3_4_2"
- top: "Convolution3_4_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU3_4_2"
- type: "ReLU"
- bottom: "Convolution3_4_2"
- top: "Convolution3_4_2"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution3_4_3"
- type: "Convolution"
- bottom: "Convolution3_4_2"
- top: "Convolution3_4_3"
- convolution_param {
- num_output: 512
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm3_4_3"
- type: "BatchNorm"
- bottom: "Convolution3_4_3"
- top: "Convolution3_4_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale3_4_3"
- type: "Scale"
- bottom: "Convolution3_4_3"
- top: "Convolution3_4_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum3_4"
- type: "Eltwise"
- bottom: "Sum3_3"
- bottom: "Convolution3_4_3"
- top: "Sum3_4"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU3_4"
- type: "ReLU"
- bottom: "Sum3_4"
- top: "Sum3_4"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_1_1"
- type: "Convolution"
- bottom: "Sum3_4"
- top: "Convolution4_1_1"
- convolution_param {
- num_output: 512
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm4_1_1"
- type: "BatchNorm"
- bottom: "Convolution4_1_1"
- top: "Convolution4_1_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_1_1"
- type: "Scale"
- bottom: "Convolution4_1_1"
- top: "Convolution4_1_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU4_1_1"
- type: "ReLU"
- bottom: "Convolution4_1_1"
- top: "Convolution4_1_1"
- }
- layer {
- name: "Convolution4_1_2"
- type: "Convolution"
- bottom: "Convolution4_1_1"
- top: "Convolution4_1_2"
- convolution_param {
- num_output: 512
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 2
- }
- }
- layer {
- name: "BatchNorm4_1_2"
- type: "BatchNorm"
- bottom: "Convolution4_1_2"
- top: "Convolution4_1_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_1_2"
- type: "Scale"
- bottom: "Convolution4_1_2"
- top: "Convolution4_1_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU4_1_2"
- type: "ReLU"
- bottom: "Convolution4_1_2"
- top: "Convolution4_1_2"
- }
- layer {
- name: "Convolution4_1_3"
- type: "Convolution"
- bottom: "Convolution4_1_2"
- top: "Convolution4_1_3"
- convolution_param {
- num_output: 1024
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm4_1_3"
- type: "BatchNorm"
- bottom: "Convolution4_1_3"
- top: "Convolution4_1_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_1_3"
- type: "Scale"
- bottom: "Convolution4_1_3"
- top: "Convolution4_1_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Convolution4_1"
- type: "Convolution"
- bottom: "Sum3_4"
- top: "Convolution4_1"
- convolution_param {
- num_output: 1024
- bias_term: false
- kernel_size: 1
- stride: 2
- }
- }
- layer {
- name: "BatchNorm4_1"
- type: "BatchNorm"
- bottom: "Convolution4_1"
- top: "Convolution4_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_1"
- type: "Scale"
- bottom: "Convolution4_1"
- top: "Convolution4_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum4_1"
- type: "Eltwise"
- bottom: "Convolution4_1"
- bottom: "Convolution4_1_3"
- top: "Sum4_1"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU4_1"
- type: "ReLU"
- bottom: "Sum4_1"
- top: "Sum4_1"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_2_1"
- type: "Convolution"
- bottom: "Sum4_1"
- top: "Convolution4_2_1"
- convolution_param {
- num_output: 512
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm4_2_1"
- type: "BatchNorm"
- bottom: "Convolution4_2_1"
- top: "Convolution4_2_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_2_1"
- type: "Scale"
- bottom: "Convolution4_2_1"
- top: "Convolution4_2_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU4_2_1"
- type: "ReLU"
- bottom: "Convolution4_2_1"
- top: "Convolution4_2_1"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_2_2"
- type: "Convolution"
- bottom: "Convolution4_2_1"
- top: "Convolution4_2_2"
- convolution_param {
- num_output: 512
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 1
- }
- }
- layer {
- name: "BatchNorm4_2_2"
- type: "BatchNorm"
- bottom: "Convolution4_2_2"
- top: "Convolution4_2_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_2_2"
- type: "Scale"
- bottom: "Convolution4_2_2"
- top: "Convolution4_2_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU4_2_2"
- type: "ReLU"
- bottom: "Convolution4_2_2"
- top: "Convolution4_2_2"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_2_3"
- type: "Convolution"
- bottom: "Convolution4_2_2"
- top: "Convolution4_2_3"
- convolution_param {
- num_output: 1024
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm4_2_3"
- type: "BatchNorm"
- bottom: "Convolution4_2_3"
- top: "Convolution4_2_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_2_3"
- type: "Scale"
- bottom: "Convolution4_2_3"
- top: "Convolution4_2_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum4_2"
- type: "Eltwise"
- bottom: "Sum4_1"
- bottom: "Convolution4_2_3"
- top: "Sum4_2"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU4_2"
- type: "ReLU"
- bottom: "Sum4_2"
- top: "Sum4_2"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_3_1"
- type: "Convolution"
- bottom: "Sum4_2"
- top: "Convolution4_3_1"
- convolution_param {
- num_output: 512
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm4_3_1"
- type: "BatchNorm"
- bottom: "Convolution4_3_1"
- top: "Convolution4_3_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_3_1"
- type: "Scale"
- bottom: "Convolution4_3_1"
- top: "Convolution4_3_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU4_3_1"
- type: "ReLU"
- bottom: "Convolution4_3_1"
- top: "Convolution4_3_1"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_3_2"
- type: "Convolution"
- bottom: "Convolution4_3_1"
- top: "Convolution4_3_2"
- convolution_param {
- num_output: 512
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 1
- }
- }
- layer {
- name: "BatchNorm4_3_2"
- type: "BatchNorm"
- bottom: "Convolution4_3_2"
- top: "Convolution4_3_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_3_2"
- type: "Scale"
- bottom: "Convolution4_3_2"
- top: "Convolution4_3_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU4_3_2"
- type: "ReLU"
- bottom: "Convolution4_3_2"
- top: "Convolution4_3_2"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_3_3"
- type: "Convolution"
- bottom: "Convolution4_3_2"
- top: "Convolution4_3_3"
- convolution_param {
- num_output: 1024
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm4_3_3"
- type: "BatchNorm"
- bottom: "Convolution4_3_3"
- top: "Convolution4_3_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_3_3"
- type: "Scale"
- bottom: "Convolution4_3_3"
- top: "Convolution4_3_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum4_3"
- type: "Eltwise"
- bottom: "Sum4_2"
- bottom: "Convolution4_3_3"
- top: "Sum4_3"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU4_3"
- type: "ReLU"
- bottom: "Sum4_3"
- top: "Sum4_3"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_4_1"
- type: "Convolution"
- bottom: "Sum4_3"
- top: "Convolution4_4_1"
- convolution_param {
- num_output: 512
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm4_4_1"
- type: "BatchNorm"
- bottom: "Convolution4_4_1"
- top: "Convolution4_4_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_4_1"
- type: "Scale"
- bottom: "Convolution4_4_1"
- top: "Convolution4_4_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU4_4_1"
- type: "ReLU"
- bottom: "Convolution4_4_1"
- top: "Convolution4_4_1"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_4_2"
- type: "Convolution"
- bottom: "Convolution4_4_1"
- top: "Convolution4_4_2"
- convolution_param {
- num_output: 512
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 1
- }
- }
- layer {
- name: "BatchNorm4_4_2"
- type: "BatchNorm"
- bottom: "Convolution4_4_2"
- top: "Convolution4_4_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_4_2"
- type: "Scale"
- bottom: "Convolution4_4_2"
- top: "Convolution4_4_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU4_4_2"
- type: "ReLU"
- bottom: "Convolution4_4_2"
- top: "Convolution4_4_2"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_4_3"
- type: "Convolution"
- bottom: "Convolution4_4_2"
- top: "Convolution4_4_3"
- convolution_param {
- num_output: 1024
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm4_4_3"
- type: "BatchNorm"
- bottom: "Convolution4_4_3"
- top: "Convolution4_4_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_4_3"
- type: "Scale"
- bottom: "Convolution4_4_3"
- top: "Convolution4_4_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum4_4"
- type: "Eltwise"
- bottom: "Sum4_3"
- bottom: "Convolution4_4_3"
- top: "Sum4_4"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU4_4"
- type: "ReLU"
- bottom: "Sum4_4"
- top: "Sum4_4"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_5_1"
- type: "Convolution"
- bottom: "Sum4_4"
- top: "Convolution4_5_1"
- convolution_param {
- num_output: 512
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm4_5_1"
- type: "BatchNorm"
- bottom: "Convolution4_5_1"
- top: "Convolution4_5_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_5_1"
- type: "Scale"
- bottom: "Convolution4_5_1"
- top: "Convolution4_5_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU4_5_1"
- type: "ReLU"
- bottom: "Convolution4_5_1"
- top: "Convolution4_5_1"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_5_2"
- type: "Convolution"
- bottom: "Convolution4_5_1"
- top: "Convolution4_5_2"
- convolution_param {
- num_output: 512
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 1
- }
- }
- layer {
- name: "BatchNorm4_5_2"
- type: "BatchNorm"
- bottom: "Convolution4_5_2"
- top: "Convolution4_5_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_5_2"
- type: "Scale"
- bottom: "Convolution4_5_2"
- top: "Convolution4_5_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU4_5_2"
- type: "ReLU"
- bottom: "Convolution4_5_2"
- top: "Convolution4_5_2"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_5_3"
- type: "Convolution"
- bottom: "Convolution4_5_2"
- top: "Convolution4_5_3"
- convolution_param {
- num_output: 1024
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm4_5_3"
- type: "BatchNorm"
- bottom: "Convolution4_5_3"
- top: "Convolution4_5_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_5_3"
- type: "Scale"
- bottom: "Convolution4_5_3"
- top: "Convolution4_5_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum4_5"
- type: "Eltwise"
- bottom: "Sum4_4"
- bottom: "Convolution4_5_3"
- top: "Sum4_5"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU4_5"
- type: "ReLU"
- bottom: "Sum4_5"
- top: "Sum4_5"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_6_1"
- type: "Convolution"
- bottom: "Sum4_5"
- top: "Convolution4_6_1"
- convolution_param {
- num_output: 512
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm4_6_1"
- type: "BatchNorm"
- bottom: "Convolution4_6_1"
- top: "Convolution4_6_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_6_1"
- type: "Scale"
- bottom: "Convolution4_6_1"
- top: "Convolution4_6_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU4_6_1"
- type: "ReLU"
- bottom: "Convolution4_6_1"
- top: "Convolution4_6_1"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_6_2"
- type: "Convolution"
- bottom: "Convolution4_6_1"
- top: "Convolution4_6_2"
- convolution_param {
- num_output: 512
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 1
- }
- }
- layer {
- name: "BatchNorm4_6_2"
- type: "BatchNorm"
- bottom: "Convolution4_6_2"
- top: "Convolution4_6_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_6_2"
- type: "Scale"
- bottom: "Convolution4_6_2"
- top: "Convolution4_6_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU4_6_2"
- type: "ReLU"
- bottom: "Convolution4_6_2"
- top: "Convolution4_6_2"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution4_6_3"
- type: "Convolution"
- bottom: "Convolution4_6_2"
- top: "Convolution4_6_3"
- convolution_param {
- num_output: 1024
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm4_6_3"
- type: "BatchNorm"
- bottom: "Convolution4_6_3"
- top: "Convolution4_6_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale4_6_3"
- type: "Scale"
- bottom: "Convolution4_6_3"
- top: "Convolution4_6_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum4_6"
- type: "Eltwise"
- bottom: "Sum4_5"
- bottom: "Convolution4_6_3"
- top: "Sum4_6"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU4_6"
- type: "ReLU"
- bottom: "Sum4_6"
- top: "Sum4_6"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution5_1_1"
- type: "Convolution"
- bottom: "Sum4_6"
- top: "Convolution5_1_1"
- convolution_param {
- num_output: 1024
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm5_1_1"
- type: "BatchNorm"
- bottom: "Convolution5_1_1"
- top: "Convolution5_1_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale5_1_1"
- type: "Scale"
- bottom: "Convolution5_1_1"
- top: "Convolution5_1_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU5_1_1"
- type: "ReLU"
- bottom: "Convolution5_1_1"
- top: "Convolution5_1_1"
- }
- layer {
- name: "Convolution5_1_2"
- type: "Convolution"
- bottom: "Convolution5_1_1"
- top: "Convolution5_1_2"
- convolution_param {
- num_output: 1024
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 2
- }
- }
- layer {
- name: "BatchNorm5_1_2"
- type: "BatchNorm"
- bottom: "Convolution5_1_2"
- top: "Convolution5_1_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale5_1_2"
- type: "Scale"
- bottom: "Convolution5_1_2"
- top: "Convolution5_1_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU5_1_2"
- type: "ReLU"
- bottom: "Convolution5_1_2"
- top: "Convolution5_1_2"
- }
- layer {
- name: "Convolution5_1_3"
- type: "Convolution"
- bottom: "Convolution5_1_2"
- top: "Convolution5_1_3"
- convolution_param {
- num_output: 2048
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm5_1_3"
- type: "BatchNorm"
- bottom: "Convolution5_1_3"
- top: "Convolution5_1_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale5_1_3"
- type: "Scale"
- bottom: "Convolution5_1_3"
- top: "Convolution5_1_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Convolution5_1"
- type: "Convolution"
- bottom: "Sum4_6"
- top: "Convolution5_1"
- convolution_param {
- num_output: 2048
- bias_term: false
- kernel_size: 1
- stride: 2
- }
- }
- layer {
- name: "BatchNorm5_1"
- type: "BatchNorm"
- bottom: "Convolution5_1"
- top: "Convolution5_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale5_1"
- type: "Scale"
- bottom: "Convolution5_1"
- top: "Convolution5_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum5_1"
- type: "Eltwise"
- bottom: "Convolution5_1"
- bottom: "Convolution5_1_3"
- top: "Sum5_1"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU5_1"
- type: "ReLU"
- bottom: "Sum5_1"
- top: "Sum5_1"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution5_2_1"
- type: "Convolution"
- bottom: "Sum5_1"
- top: "Convolution5_2_1"
- convolution_param {
- num_output: 1024
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm5_2_1"
- type: "BatchNorm"
- bottom: "Convolution5_2_1"
- top: "Convolution5_2_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale5_2_1"
- type: "Scale"
- bottom: "Convolution5_2_1"
- top: "Convolution5_2_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU5_2_1"
- type: "ReLU"
- bottom: "Convolution5_2_1"
- top: "Convolution5_2_1"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution5_2_2"
- type: "Convolution"
- bottom: "Convolution5_2_1"
- top: "Convolution5_2_2"
- convolution_param {
- num_output: 1024
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 1
- }
- }
- layer {
- name: "BatchNorm5_2_2"
- type: "BatchNorm"
- bottom: "Convolution5_2_2"
- top: "Convolution5_2_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale5_2_2"
- type: "Scale"
- bottom: "Convolution5_2_2"
- top: "Convolution5_2_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU5_2_2"
- type: "ReLU"
- bottom: "Convolution5_2_2"
- top: "Convolution5_2_2"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution5_2_3"
- type: "Convolution"
- bottom: "Convolution5_2_2"
- top: "Convolution5_2_3"
- convolution_param {
- num_output: 2048
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm5_2_3"
- type: "BatchNorm"
- bottom: "Convolution5_2_3"
- top: "Convolution5_2_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale5_2_3"
- type: "Scale"
- bottom: "Convolution5_2_3"
- top: "Convolution5_2_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum5_2"
- type: "Eltwise"
- bottom: "Sum5_1"
- bottom: "Convolution5_2_3"
- top: "Sum5_2"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU5_2"
- type: "ReLU"
- bottom: "Sum5_2"
- top: "Sum5_2"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution5_3_1"
- type: "Convolution"
- bottom: "Sum5_2"
- top: "Convolution5_3_1"
- convolution_param {
- num_output: 1024
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm5_3_1"
- type: "BatchNorm"
- bottom: "Convolution5_3_1"
- top: "Convolution5_3_1"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale5_3_1"
- type: "Scale"
- bottom: "Convolution5_3_1"
- top: "Convolution5_3_1"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU5_3_1"
- type: "ReLU"
- bottom: "Convolution5_3_1"
- top: "Convolution5_3_1"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution5_3_2"
- type: "Convolution"
- bottom: "Convolution5_3_1"
- top: "Convolution5_3_2"
- convolution_param {
- num_output: 1024
- bias_term: false
- pad: 1
- kernel_size: 3
- stride: 1
- }
- }
- layer {
- name: "BatchNorm5_3_2"
- type: "BatchNorm"
- bottom: "Convolution5_3_2"
- top: "Convolution5_3_2"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale5_3_2"
- type: "Scale"
- bottom: "Convolution5_3_2"
- top: "Convolution5_3_2"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "ReLU5_3_2"
- type: "ReLU"
- bottom: "Convolution5_3_2"
- top: "Convolution5_3_2"
- }
- #------------------------------------------------------------------------------
- layer {
- name: "Convolution5_3_3"
- type: "Convolution"
- bottom: "Convolution5_3_2"
- top: "Convolution5_3_3"
- convolution_param {
- num_output: 2048
- bias_term: false
- kernel_size: 1
- }
- }
- layer {
- name: "BatchNorm5_3_3"
- type: "BatchNorm"
- bottom: "Convolution5_3_3"
- top: "Convolution5_3_3"
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- param {
- lr_mult: 0
- decay_mult: 0
- }
- batch_norm_param {
- use_global_stats: false
- }
- }
- layer {
- name: "Scale5_3_3"
- type: "Scale"
- bottom: "Convolution5_3_3"
- top: "Convolution5_3_3"
- scale_param {
- bias_term: true
- }
- }
- layer {
- name: "Sum5_3"
- type: "Eltwise"
- bottom: "Sum5_2"
- bottom: "Convolution5_3_3"
- top: "Sum5_3"
- eltwise_param {
- operation: SUM
- }
- }
- layer {
- name: "ReLU5_3"
- type: "ReLU"
- bottom: "Sum5_3"
- top: "Sum5_3"
- }
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- #------------------------------------------------------------------------------
- layer {
- name: "Pooling2"
- type: "Pooling"
- bottom: "Sum5_3"
- top: "Pooling2"
- pooling_param {
- pool: AVE
- kernel_size: 7
- stride: 1
- }
- }
- layer {
- name: "InnerProduct1"
- type: "InnerProduct"
- bottom: "Pooling2"
- top: "InnerProduct1"
- inner_product_param {
- num_output: 1000
- }
- }
- layer {
- name: "SoftmaxWithLoss1"
- type: "SoftmaxWithLoss"
- bottom: "InnerProduct1"
- bottom: "Data2"
- top: "SoftmaxWithLoss1"
- }
- layer {
- name: "Accuracy1"
- type: "Accuracy"
- bottom: "InnerProduct1"
- bottom: "Data2"
- top: "Accuracy1"
- include {
- phase: TEST
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement