Advertisement
Guest User

Untitled

a guest
Mar 26th, 2018
203
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
JSON 13.51 KB | None | 0 0
  1. name: "VGG_ILSVRC_16_layers"
  2. input:"data"
  3. input_dim: 1
  4. input_dim: 3
  5. input_dim: 224
  6. input_dim: 224
  7. layer {
  8.   name: "conv1_1"
  9.   type: "Convolution"
  10.   bottom: "data"
  11.   top: "conv1_1"
  12.   convolution_param {
  13.     num_output: 64
  14.     pad: 1
  15.     kernel_size: 3
  16.   }
  17. }
  18. layer {
  19.   name: "relu1_1"
  20.   type: "ReLU"
  21.   bottom: "conv1_1"
  22.   top: "conv1_1"
  23. }
  24. layer {
  25.   name: "conv1_2_V"
  26.   type: "Convolution"
  27.   bottom: "conv1_1"
  28.   top: "conv1_2_V"
  29.   convolution_param {
  30.     num_output: 22
  31.     pad: 1
  32.     pad: 0
  33.     kernel_size: 3
  34.     kernel_size: 1
  35.     stride: 1
  36.   }
  37. }
  38. layer {
  39.   name: "conv1_2_H"
  40.   type: "Convolution"
  41.   bottom: "conv1_2_V"
  42.   top: "conv1_2_H"
  43.   param {
  44.     lr_mult: 1.0
  45.     decay_mult: 1.0
  46.   }
  47.   convolution_param {
  48.     num_output: 22
  49.     bias_term: true
  50.     pad: 0
  51.     pad: 1
  52.     kernel_size: 1
  53.     kernel_size: 3
  54.     stride: 1
  55.     weight_filler {
  56.       type: "msra"
  57.     }
  58.   }
  59. }
  60. layer {
  61.   name: "conv1_2_P"
  62.   type: "Convolution"
  63.   bottom: "conv1_2_H"
  64.   top: "conv1_2_P"
  65.   param {
  66.     lr_mult: 1.0
  67.     decay_mult: 1.0
  68.   }
  69.   param {
  70.     lr_mult: 1.0
  71.     decay_mult: 2.0
  72.   }
  73.   convolution_param {
  74.     num_output: 59
  75.     pad: 0
  76.     kernel_size: 1
  77.     stride: 1
  78.     weight_filler {
  79.       type: "msra"
  80.     }
  81.     bias_filler {
  82.       type: "constant"
  83.       value: 0.0
  84.     }
  85.   }
  86. }
  87. layer {
  88.   name: "relu1_2"
  89.   type: "ReLU"
  90.   bottom: "conv1_2_P"
  91.   top: "conv1_2_P"
  92. }
  93. layer {
  94.   name: "pool1"
  95.   type: "Pooling"
  96.   bottom: "conv1_2_P"
  97.   top: "pool1"
  98.   pooling_param {
  99.     pool: MAX
  100.     kernel_size: 2
  101.     stride: 2
  102.   }
  103. }
  104. layer {
  105.   name: "conv2_1_V"
  106.   type: "Convolution"
  107.   bottom: "pool1"
  108.   top: "conv2_1_V"
  109.   convolution_param {
  110.     num_output: 37
  111.     pad: 1
  112.     pad: 0
  113.     kernel_size: 3
  114.     kernel_size: 1
  115.     stride: 1
  116.   }
  117. }
  118. layer {
  119.   name: "conv2_1_H"
  120.   type: "Convolution"
  121.   bottom: "conv2_1_V"
  122.   top: "conv2_1_H"
  123.   param {
  124.     lr_mult: 1.0
  125.     decay_mult: 1.0
  126.   }
  127.   convolution_param {
  128.     num_output: 37
  129.     bias_term: true
  130.     pad: 0
  131.     pad: 1
  132.     kernel_size: 1
  133.     kernel_size: 3
  134.     stride: 1
  135.     weight_filler {
  136.       type: "msra"
  137.     }
  138.   }
  139. }
  140. layer {
  141.   name: "conv2_1_P"
  142.   type: "Convolution"
  143.   bottom: "conv2_1_H"
  144.   top: "conv2_1_P"
  145.   param {
  146.     lr_mult: 1.0
  147.     decay_mult: 1.0
  148.   }
  149.   param {
  150.     lr_mult: 1.0
  151.     decay_mult: 2.0
  152.   }
  153.   convolution_param {
  154.     num_output: 118
  155.     pad: 0
  156.     kernel_size: 1
  157.     stride: 1
  158.     weight_filler {
  159.       type: "msra"
  160.     }
  161.     bias_filler {
  162.       type: "constant"
  163.       value: 0.0
  164.     }
  165.   }
  166. }
  167. layer {
  168.   name: "relu2_1"
  169.   type: "ReLU"
  170.   bottom: "conv2_1_P"
  171.   top: "conv2_1_P"
  172. }
  173. layer {
  174.   name: "conv2_2_V"
  175.   type: "Convolution"
  176.   bottom: "conv2_1_P"
  177.   top: "conv2_2_V"
  178.   convolution_param {
  179.     num_output: 47
  180.     pad: 1
  181.     pad: 0
  182.     kernel_size: 3
  183.     kernel_size: 1
  184.     stride: 1
  185.   }
  186. }
  187. layer {
  188.   name: "conv2_2_H"
  189.   type: "Convolution"
  190.   bottom: "conv2_2_V"
  191.   top: "conv2_2_H"
  192.   param {
  193.     lr_mult: 1.0
  194.     decay_mult: 1.0
  195.   }
  196.   convolution_param {
  197.     num_output: 47
  198.     bias_term: true
  199.     pad: 0
  200.     pad: 1
  201.     kernel_size: 1
  202.     kernel_size: 3
  203.     stride: 1
  204.     weight_filler {
  205.       type: "msra"
  206.     }
  207.   }
  208. }
  209. layer {
  210.   name: "conv2_2_P"
  211.   type: "Convolution"
  212.   bottom: "conv2_2_H"
  213.   top: "conv2_2_P"
  214.   param {
  215.     lr_mult: 1.0
  216.     decay_mult: 1.0
  217.   }
  218.   param {
  219.     lr_mult: 1.0
  220.     decay_mult: 2.0
  221.   }
  222.   convolution_param {
  223.     num_output: 119
  224.     pad: 0
  225.     kernel_size: 1
  226.     stride: 1
  227.     weight_filler {
  228.       type: "msra"
  229.     }
  230.     bias_filler {
  231.       type: "constant"
  232.       value: 0.0
  233.     }
  234.   }
  235. }
  236. layer {
  237.   name: "relu2_2"
  238.   type: "ReLU"
  239.   bottom: "conv2_2_P"
  240.   top: "conv2_2_P"
  241. }
  242. layer {
  243.   name: "pool2"
  244.   type: "Pooling"
  245.   bottom: "conv2_2_P"
  246.   top: "pool2"
  247.   pooling_param {
  248.     pool: MAX
  249.     kernel_size: 2
  250.     stride: 2
  251.   }
  252. }
  253. layer {
  254.   name: "conv3_1_V"
  255.   type: "Convolution"
  256.   bottom: "pool2"
  257.   top: "conv3_1_V"
  258.   convolution_param {
  259.     num_output: 83
  260.     pad: 1
  261.     pad: 0
  262.     kernel_size: 3
  263.     kernel_size: 1
  264.     stride: 1
  265.   }
  266. }
  267. layer {
  268.   name: "conv3_1_H"
  269.   type: "Convolution"
  270.   bottom: "conv3_1_V"
  271.   top: "conv3_1_H"
  272.   param {
  273.     lr_mult: 1.0
  274.     decay_mult: 1.0
  275.   }
  276.   convolution_param {
  277.     num_output: 83
  278.     bias_term: true
  279.     pad: 0
  280.     pad: 1
  281.     kernel_size: 1
  282.     kernel_size: 3
  283.     stride: 1
  284.     weight_filler {
  285.       type: "msra"
  286.     }
  287.   }
  288. }
  289. layer {
  290.   name: "conv3_1_P"
  291.   type: "Convolution"
  292.   bottom: "conv3_1_H"
  293.   top: "conv3_1_P"
  294.   param {
  295.     lr_mult: 1.0
  296.     decay_mult: 1.0
  297.   }
  298.   param {
  299.     lr_mult: 1.0
  300.     decay_mult: 2.0
  301.   }
  302.   convolution_param {
  303.     num_output: 226
  304.     pad: 0
  305.     kernel_size: 1
  306.     stride: 1
  307.     weight_filler {
  308.       type: "msra"
  309.     }
  310.     bias_filler {
  311.       type: "constant"
  312.       value: 0.0
  313.     }
  314.   }
  315. }
  316. layer {
  317.   name: "relu3_1"
  318.   type: "ReLU"
  319.   bottom: "conv3_1_P"
  320.   top: "conv3_1_P"
  321. }
  322. layer {
  323.   name: "conv3_2_V"
  324.   type: "Convolution"
  325.   bottom: "conv3_1_P"
  326.   top: "conv3_2_V"
  327.   convolution_param {
  328.     num_output: 89
  329.     pad: 1
  330.     pad: 0
  331.     kernel_size: 3
  332.     kernel_size: 1
  333.     stride: 1
  334.   }
  335. }
  336. layer {
  337.   name: "conv3_2_H"
  338.   type: "Convolution"
  339.   bottom: "conv3_2_V"
  340.   top: "conv3_2_H"
  341.   param {
  342.     lr_mult: 1.0
  343.     decay_mult: 1.0
  344.   }
  345.   convolution_param {
  346.     num_output: 89
  347.     bias_term: true
  348.     pad: 0
  349.     pad: 1
  350.     kernel_size: 1
  351.     kernel_size: 3
  352.     stride: 1
  353.     weight_filler {
  354.       type: "msra"
  355.     }
  356.   }
  357. }
  358. layer {
  359.   name: "conv3_2_P"
  360.   type: "Convolution"
  361.   bottom: "conv3_2_H"
  362.   top: "conv3_2_P"
  363.   param {
  364.     lr_mult: 1.0
  365.     decay_mult: 1.0
  366.   }
  367.   param {
  368.     lr_mult: 1.0
  369.     decay_mult: 2.0
  370.   }
  371.   convolution_param {
  372.     num_output: 243
  373.     pad: 0
  374.     kernel_size: 1
  375.     stride: 1
  376.     weight_filler {
  377.       type: "msra"
  378.     }
  379.     bias_filler {
  380.       type: "constant"
  381.       value: 0.0
  382.     }
  383.   }
  384. }
  385. layer {
  386.   name: "relu3_2"
  387.   type: "ReLU"
  388.   bottom: "conv3_2_P"
  389.   top: "conv3_2_P"
  390. }
  391. layer {
  392.   name: "conv3_3_V"
  393.   type: "Convolution"
  394.   bottom: "conv3_2_P"
  395.   top: "conv3_3_V"
  396.   convolution_param {
  397.     num_output: 106
  398.     pad: 1
  399.     pad: 0
  400.     kernel_size: 3
  401.     kernel_size: 1
  402.     stride: 1
  403.   }
  404. }
  405. layer {
  406.   name: "conv3_3_H"
  407.   type: "Convolution"
  408.   bottom: "conv3_3_V"
  409.   top: "conv3_3_H"
  410.   param {
  411.     lr_mult: 1.0
  412.     decay_mult: 1.0
  413.   }
  414.   convolution_param {
  415.     num_output: 106
  416.     bias_term: true
  417.     pad: 0
  418.     pad: 1
  419.     kernel_size: 1
  420.     kernel_size: 3
  421.     stride: 1
  422.     weight_filler {
  423.       type: "msra"
  424.     }
  425.   }
  426. }
  427. layer {
  428.   name: "conv3_3_P"
  429.   type: "Convolution"
  430.   bottom: "conv3_3_H"
  431.   top: "conv3_3_P"
  432.   param {
  433.     lr_mult: 1.0
  434.     decay_mult: 1.0
  435.   }
  436.   param {
  437.     lr_mult: 1.0
  438.     decay_mult: 2.0
  439.   }
  440.   convolution_param {
  441.     num_output: 256
  442.     pad: 0
  443.     kernel_size: 1
  444.     stride: 1
  445.     weight_filler {
  446.       type: "msra"
  447.     }
  448.     bias_filler {
  449.       type: "constant"
  450.       value: 0.0
  451.     }
  452.   }
  453. }
  454. layer {
  455.   name: "relu3_3"
  456.   type: "ReLU"
  457.   bottom: "conv3_3_P"
  458.   top: "conv3_3_P"
  459. }
  460. layer {
  461.   name: "pool3"
  462.   type: "Pooling"
  463.   bottom: "conv3_3_P"
  464.   top: "pool3"
  465.   pooling_param {
  466.     pool: MAX
  467.     kernel_size: 2
  468.     stride: 2
  469.   }
  470. }
  471. layer {
  472.   name: "conv4_1_V"
  473.   type: "Convolution"
  474.   bottom: "pool3"
  475.   top: "conv4_1_V"
  476.   convolution_param {
  477.     num_output: 175
  478.     pad: 1
  479.     pad: 0
  480.     kernel_size: 3
  481.     kernel_size: 1
  482.     stride: 1
  483.   }
  484. }
  485. layer {
  486.   name: "conv4_1_H"
  487.   type: "Convolution"
  488.   bottom: "conv4_1_V"
  489.   top: "conv4_1_H"
  490.   param {
  491.     lr_mult: 1.0
  492.     decay_mult: 1.0
  493.   }
  494.   convolution_param {
  495.     num_output: 175
  496.     bias_term: true
  497.     pad: 0
  498.     pad: 1
  499.     kernel_size: 1
  500.     kernel_size: 3
  501.     stride: 1
  502.     weight_filler {
  503.       type: "msra"
  504.     }
  505.   }
  506. }
  507. layer {
  508.   name: "conv4_1_P"
  509.   type: "Convolution"
  510.   bottom: "conv4_1_H"
  511.   top: "conv4_1_P"
  512.   param {
  513.     lr_mult: 1.0
  514.     decay_mult: 1.0
  515.   }
  516.   param {
  517.     lr_mult: 1.0
  518.     decay_mult: 2.0
  519.   }
  520.   convolution_param {
  521.     num_output: 482
  522.     pad: 0
  523.     kernel_size: 1
  524.     stride: 1
  525.     weight_filler {
  526.       type: "msra"
  527.     }
  528.     bias_filler {
  529.       type: "constant"
  530.       value: 0.0
  531.     }
  532.   }
  533. }
  534. layer {
  535.   name: "relu4_1"
  536.   type: "ReLU"
  537.   bottom: "conv4_1_P"
  538.   top: "conv4_1_P"
  539. }
  540. layer {
  541.   name: "conv4_2_V"
  542.   type: "Convolution"
  543.   bottom: "conv4_1_P"
  544.   top: "conv4_2_V"
  545.   convolution_param {
  546.     num_output: 192
  547.     pad: 1
  548.     pad: 0
  549.     kernel_size: 3
  550.     kernel_size: 1
  551.     stride: 1
  552.   }
  553. }
  554. layer {
  555.   name: "conv4_2_H"
  556.   type: "Convolution"
  557.   bottom: "conv4_2_V"
  558.   top: "conv4_2_H"
  559.   param {
  560.     lr_mult: 1.0
  561.     decay_mult: 1.0
  562.   }
  563.   convolution_param {
  564.     num_output: 192
  565.     bias_term: true
  566.     pad: 0
  567.     pad: 1
  568.     kernel_size: 1
  569.     kernel_size: 3
  570.     stride: 1
  571.     weight_filler {
  572.       type: "msra"
  573.     }
  574.   }
  575. }
  576. layer {
  577.   name: "conv4_2_P"
  578.   type: "Convolution"
  579.   bottom: "conv4_2_H"
  580.   top: "conv4_2_P"
  581.   param {
  582.     lr_mult: 1.0
  583.     decay_mult: 1.0
  584.   }
  585.   param {
  586.     lr_mult: 1.0
  587.     decay_mult: 2.0
  588.   }
  589.   convolution_param {
  590.     num_output: 457
  591.     pad: 0
  592.     kernel_size: 1
  593.     stride: 1
  594.     weight_filler {
  595.       type: "msra"
  596.     }
  597.     bias_filler {
  598.       type: "constant"
  599.       value: 0.0
  600.     }
  601.   }
  602. }
  603. layer {
  604.   name: "relu4_2"
  605.   type: "ReLU"
  606.   bottom: "conv4_2_P"
  607.   top: "conv4_2_P"
  608. }
  609. layer {
  610.   name: "conv4_3_V"
  611.   type: "Convolution"
  612.   bottom: "conv4_2_P"
  613.   top: "conv4_3_V"
  614.   convolution_param {
  615.     num_output: 227
  616.     pad: 1
  617.     pad: 0
  618.     kernel_size: 3
  619.     kernel_size: 1
  620.     stride: 1
  621.   }
  622. }
  623. layer {
  624.   name: "conv4_3_H"
  625.   type: "Convolution"
  626.   bottom: "conv4_3_V"
  627.   top: "conv4_3_H"
  628.   param {
  629.     lr_mult: 1.0
  630.     decay_mult: 1.0
  631.   }
  632.   convolution_param {
  633.     num_output: 227
  634.     bias_term: true
  635.     pad: 0
  636.     pad: 1
  637.     kernel_size: 1
  638.     kernel_size: 3
  639.     stride: 1
  640.     weight_filler {
  641.       type: "msra"
  642.     }
  643.   }
  644. }
  645. layer {
  646.   name: "conv4_3_P"
  647.   type: "Convolution"
  648.   bottom: "conv4_3_H"
  649.   top: "conv4_3_P"
  650.   param {
  651.     lr_mult: 1.0
  652.     decay_mult: 1.0
  653.   }
  654.   param {
  655.     lr_mult: 1.0
  656.     decay_mult: 2.0
  657.   }
  658.   convolution_param {
  659.     num_output: 512
  660.     pad: 0
  661.     kernel_size: 1
  662.     stride: 1
  663.     weight_filler {
  664.       type: "msra"
  665.     }
  666.     bias_filler {
  667.       type: "constant"
  668.       value: 0.0
  669.     }
  670.   }
  671. }
  672. layer {
  673.   name: "relu4_3"
  674.   type: "ReLU"
  675.   bottom: "conv4_3_P"
  676.   top: "conv4_3_P"
  677. }
  678. layer {
  679.   name: "pool4"
  680.   type: "Pooling"
  681.   bottom: "conv4_3_P"
  682.   top: "pool4"
  683.   pooling_param {
  684.     pool: MAX
  685.     kernel_size: 2
  686.     stride: 2
  687.   }
  688. }
  689. layer {
  690.   name: "conv5_1_V"
  691.   type: "Convolution"
  692.   bottom: "pool4"
  693.   top: "conv5_1_V"
  694.   convolution_param {
  695.     num_output: 398
  696.     pad: 1
  697.     pad: 0
  698.     kernel_size: 3
  699.     kernel_size: 1
  700.     stride: 1
  701.   }
  702. }
  703. layer {
  704.   name: "conv5_1_H"
  705.   type: "Convolution"
  706.   bottom: "conv5_1_V"
  707.   top: "conv5_1_H"
  708.   param {
  709.     lr_mult: 1.0
  710.     decay_mult: 1.0
  711.   }
  712.   convolution_param {
  713.     num_output: 512
  714.     bias_term: true
  715.     pad: 0
  716.     pad: 1
  717.     kernel_size: 1
  718.     kernel_size: 3
  719.     stride: 1
  720.     weight_filler {
  721.       type: "msra"
  722.     }
  723.   }
  724. }
  725. layer {
  726.   name: "relu5_1"
  727.   type: "ReLU"
  728.   bottom: "conv5_1_H"
  729.   top: "conv5_1_H"
  730. }
  731. layer {
  732.   name: "conv5_2_V"
  733.   type: "Convolution"
  734.   bottom: "conv5_1_H"
  735.   top: "conv5_2_V"
  736.   convolution_param {
  737.     num_output: 390
  738.     pad: 1
  739.     pad: 0
  740.     kernel_size: 3
  741.     kernel_size: 1
  742.     stride: 1
  743.   }
  744. }
  745. layer {
  746.   name: "conv5_2_H"
  747.   type: "Convolution"
  748.   bottom: "conv5_2_V"
  749.   top: "conv5_2_H"
  750.   param {
  751.     lr_mult: 1.0
  752.     decay_mult: 1.0
  753.   }
  754.   convolution_param {
  755.     num_output: 512
  756.     bias_term: true
  757.     pad: 0
  758.     pad: 1
  759.     kernel_size: 1
  760.     kernel_size: 3
  761.     stride: 1
  762.     weight_filler {
  763.       type: "msra"
  764.     }
  765.   }
  766. }
  767. layer {
  768.   name: "relu5_2"
  769.   type: "ReLU"
  770.   bottom: "conv5_2_H"
  771.   top: "conv5_2_H"
  772. }
  773. layer {
  774.   name: "conv5_3_V"
  775.   type: "Convolution"
  776.   bottom: "conv5_2_H"
  777.   top: "conv5_3_V"
  778.   convolution_param {
  779.     num_output: 379
  780.     pad: 1
  781.     pad: 0
  782.     kernel_size: 3
  783.     kernel_size: 1
  784.     stride: 1
  785.   }
  786. }
  787. layer {
  788.   name: "conv5_3_H"
  789.   type: "Convolution"
  790.   bottom: "conv5_3_V"
  791.   top: "conv5_3_H"
  792.   param {
  793.     lr_mult: 1.0
  794.     decay_mult: 1.0
  795.   }
  796.   convolution_param {
  797.     num_output: 512
  798.     bias_term: true
  799.     pad: 0
  800.     pad: 1
  801.     kernel_size: 1
  802.     kernel_size: 3
  803.     stride: 1
  804.     weight_filler {
  805.       type: "msra"
  806.     }
  807.   }
  808. }
  809. layer {
  810.   name: "relu5_3"
  811.   type: "ReLU"
  812.   bottom: "conv5_3_H"
  813.   top: "conv5_3_H"
  814. }
  815. layer {
  816.   name: "pool5"
  817.   type: "Pooling"
  818.   bottom: "conv5_3_H"
  819.   top: "pool5"
  820.   pooling_param {
  821.     pool: MAX
  822.     kernel_size: 2
  823.     stride: 2
  824.   }
  825. }
  826. layer {
  827.   name: "fc6"
  828.   type: "InnerProduct"
  829.   bottom: "pool5"
  830.   top: "fc6"
  831.   inner_product_param {
  832.     num_output: 4096
  833.   }
  834. }
  835. layer {
  836.   name: "relu6"
  837.   type: "ReLU"
  838.   bottom: "fc6"
  839.   top: "fc6"
  840. }
  841. layer {
  842.   name: "drop6"
  843.   type: "Dropout"
  844.   bottom: "fc6"
  845.   top: "fc6"
  846.   dropout_param {
  847.     dropout_ratio: 0.5
  848.   }
  849. }
  850. layer {
  851.   name: "fc7"
  852.   type: "InnerProduct"
  853.   bottom: "fc6"
  854.   top: "fc7"
  855.   inner_product_param {
  856.     num_output: 4096
  857.   }
  858. }
  859. layer {
  860.   name: "relu7"
  861.   type: "ReLU"
  862.   bottom: "fc7"
  863.   top: "fc7"
  864. }
  865. layer {
  866.   name: "drop7"
  867.   type: "Dropout"
  868.   bottom: "fc7"
  869.   top: "fc7"
  870.   dropout_param {
  871.     dropout_ratio: 0.5
  872.   }
  873. }
  874. layer {
  875.   name: "fc8"
  876.   type: "InnerProduct"
  877.   bottom: "fc7"
  878.   top: "fc8"
  879.   inner_product_param {
  880.     num_output: 1000
  881.   }
  882. }
  883.  
  884. layer {
  885.   name: "loss"
  886.   type: "Softmax"
  887.   bottom: "fc8"
  888.   top: "loss"
  889. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement