SHARE
TWEET

Untitled

a guest Mar 21st, 2017 55 Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. name: "WSDDN"
  2. layer {
  3.   name: 'data'
  4.   type: 'Python'
  5.   top: 'data'
  6.   top: 'rois'
  7.   top: 'binarylabel'
  8.   top: 'boxscores'
  9.   python_param {
  10.     module: 'roi_data_layer.layer'
  11.     layer: 'RoIWeakDataLayer'
  12.     param_str: "'num_classes': 21"
  13.   }
  14. }
  15. ##################################
  16. layer {
  17.   name: "conv1"
  18.   type: "Convolution"
  19.   bottom: "data"
  20.   top: "conv1"
  21.   # learning rate and decay multipliers for the filters
  22.   param { lr_mult: 0 decay_mult: 0 }
  23.   # learning rate and decay multipliers for the biases
  24.   param { lr_mult: 0 decay_mult: 0 }
  25.   convolution_param {
  26.     num_output: 96
  27.     kernel_size: 7
  28.     stride: 2
  29.   }
  30. }
  31. layer {
  32.   name: "relu1"
  33.   type: "ReLU"
  34.   bottom: "conv1"
  35.   top: "conv1"
  36. }
  37. layer {
  38.   name: "norm1"
  39.   type: "LRN"
  40.   bottom: "conv1"
  41.   top: "norm1"
  42.   lrn_param {
  43.     # number of channels to sum over??
  44.     local_size: 5
  45.     # scaling parameter
  46.     alpha: 0.0005
  47.     beta: 0.75
  48.     k: 2
  49.   }
  50. }
  51. layer {
  52.   name: "pool1"
  53.   type: "Pooling"
  54.   bottom: "norm1"
  55.   top: "pool1"
  56.   pooling_param {
  57.     pool: MAX
  58.     kernel_size: 3
  59.     stride: 2
  60.   }
  61. }
  62. layer {
  63.   name: "conv2"
  64.   type: "Convolution"
  65.   bottom: "pool1"
  66.   top: "conv2"
  67.   param {
  68.     lr_mult: 1
  69.   }
  70.   param {
  71.     lr_mult: 2
  72.   }
  73.   convolution_param {
  74.     num_output: 256
  75.     pad: 1
  76.     kernel_size: 5
  77.     stride: 2
  78.   }
  79. }
  80. layer {
  81.   name: "relu2"
  82.   type: "ReLU"
  83.   bottom: "conv2"
  84.   top: "conv2"
  85. }
  86. layer {
  87.   name: "norm2"
  88.   type: "LRN"
  89.   bottom: "conv2"
  90.   top: "norm2"
  91.   lrn_param {
  92.     local_size: 5
  93.     alpha: 0.0005
  94.     beta: 0.75
  95.     k: 2
  96.   }
  97. }
  98. layer {
  99.   name: "pool2"
  100.   type: "Pooling"
  101.   bottom: "norm2"
  102.   top: "pool2"
  103.   pooling_param {
  104.     pool: MAX
  105.     kernel_size: 3
  106.     stride: 2
  107.   }
  108. }
  109. layer {
  110.   name: "conv3"
  111.   type: "Convolution"
  112.   bottom: "pool2"
  113.   top: "conv3"
  114.   param {
  115.     lr_mult: 1
  116.   }
  117.   param {
  118.     lr_mult: 2
  119.   }
  120.   convolution_param {
  121.     num_output: 512
  122.     pad: 1
  123.     kernel_size: 3
  124.   }
  125. }
  126. layer {
  127.   name: "relu3"
  128.   type: "ReLU"
  129.   bottom: "conv3"
  130.   top: "conv3"
  131. }
  132. layer {
  133.   name: "conv4"
  134.   type: "Convolution"
  135.   bottom: "conv3"
  136.   top: "conv4"
  137.   param {
  138.     lr_mult: 1
  139.   }
  140.   param {
  141.     lr_mult: 2
  142.   }
  143.   convolution_param {
  144.     num_output: 512
  145.     pad: 1
  146.     kernel_size: 3
  147.   }
  148. }
  149. layer {
  150.   name: "relu4"
  151.   type: "ReLU"
  152.   bottom: "conv4"
  153.   top: "conv4"
  154. }
  155. layer {
  156.   name: "conv5"
  157.   type: "Convolution"
  158.   bottom: "conv4"
  159.   top: "conv5"
  160.   param {
  161.     lr_mult: 1
  162.   }
  163.   param {
  164.     lr_mult: 2
  165.   }
  166.   convolution_param {
  167.     num_output: 512
  168.     pad: 1
  169.     kernel_size: 3
  170.   }
  171. }
  172. layer {
  173.   name: "relu5"
  174. type: "ReLU"
  175.   bottom: "conv5"
  176.   top: "conv5"
  177. }
  178. layer {
  179.   name: "roi_pool5"
  180.   type: "ROIPooling"
  181.   bottom: "conv5"
  182.   bottom: "rois"
  183.   top: "pool5"
  184.   roi_pooling_param {
  185.     pooled_w: 6
  186.     pooled_h: 6
  187.     spatial_scale: 0.0625 # 1/16
  188.   }
  189. }
  190. layer {
  191.   name: "box_sc"
  192.   type: "Scale"
  193.   bottom: "conv5"
  194.   bottom: "rois"
  195.   top: "pool5"
  196.   roi_pooling_param {
  197.     pooled_w: 6
  198.     pooled_h: 6
  199.     spatial_scale: 0.0625 # 1/16
  200.   }
  201. }
  202. layer {
  203.   name: "fc6"
  204.   type: "InnerProduct"
  205.   bottom: "pool5"
  206.   top: "fc6"
  207.   param {
  208.     lr_mult: 1
  209.   }
  210.   param {
  211.     lr_mult: 2
  212.   }
  213.   inner_product_param {
  214.     num_output: 4096
  215.   }
  216. }
  217. layer {
  218.   name: "relu6"
  219.   type: "ReLU"
  220.   bottom: "fc6"
  221.   top: "fc6"
  222. }
  223. layer {
  224.   name: "drop6"
  225.   type: "Dropout"
  226.   bottom: "fc6"
  227.   top: "fc6"
  228.   dropout_param {
  229.     dropout_ratio: 0.5
  230. }
  231. layer {
  232.   name: "fc7"
  233.   type: "InnerProduct"
  234.   bottom: "fc6"
  235.   top: "fc7"
  236.   param {
  237.     lr_mult: 1
  238.   }
  239.   param {
  240.     lr_mult: 2
  241.   }
  242.   inner_product_param {
  243.     num_output: 1024
  244.   }
  245. }
  246. layer {
  247.   name: "relu7"
  248.   type: "ReLU"
  249.   bottom: "fc7"
  250.   top: "fc7"
  251. }
  252. layer {
  253.   name: "drop7"
  254.   type: "Dropout"
  255.   bottom: "fc7"
  256.   top: "fc7"
  257.   dropout_param {
  258.     dropout_ratio: 0.5
  259.   }
  260. }
  261. layer {
  262.   name: "fc8c"
  263.   type: "InnerProduct"
  264.   bottom: "fc7"
  265.   top: "fc8c"
  266.   param {
  267.     lr_mult: 1
  268.   }
  269.   param {
  270.     lr_mult: 2
  271.   }
  272.   inner_product_param {
  273.     num_output: 21
  274.   }
  275. }
  276. layer {
  277.   name: "fc8d"
  278.   type: "InnerProduct"
  279.   bottom: "fc7"
  280.   top: "fc8d"
  281.   param {
  282.     lr_mult: 1
  283.   }
  284.   param {
  285.     lr_mult: 2
  286.   }
  287.   inner_product_param {
  288.     num_output: 21
  289.   }
  290. }
  291. layer {
  292.   name: "classification_prob"
  293.   type: "Softmax"
  294.   bottom: "fc8c"
  295.   top: "classification_prob"
  296.   softmax_param {
  297.     axis: 2
  298.   }
  299. }
  300. layer {
  301.   name: "detection_prob"
  302.   type: "Softmax"
  303.   bottom: "fc8d"
  304.   top: "detection_prob"
  305.   softmax_param {
  306.     axis: 1
  307.   }
  308. }
  309. ##################################
  310. layer {
  311.   name: "eltwise-prod"
  312.   type: "Eltwise"
  313.   bottom: "classification_prob"
  314.   bottom: "detection_prob"
  315.   top: "cls_prob"
  316.   eltwise_param { operation: PROD }
  317. }
  318. layer {
  319.   name: 'reshape'
  320.   type: 'Python'
  321.   bottom: 'cls_prob'
  322.   top: 'cls_prob_reshaped'
  323.   python_param {
  324.     module: 'roi_data_layer.layer'
  325.     layer: 'SecretAssignmentLayer'
  326.   }
  327. }
  328. layer {
  329.   name: "WeakPred"
  330.   type: "Reduction"
  331.   bottom: "cls_prob_reshaped"
  332.   top: "weakpred"
  333.   reduction_param {
  334.     operation: SUM
  335.     axis: 2
  336.   }
  337. }
  338.  
  339. #weakpred is of dimensions 1 x Nclasses = 1x20
  340. ########## Loss section ##########
RAW Paste Data
Top