Author: Not specified Language: text
Description: Not specified Timestamp: 2018-01-16 22:55:36 +0000
View raw paste Reply
  1. ###################### DeepLab ####################
  2. layer {
  3.   name: "conv1_1"
  4.   type: "Convolution"
  5.   bottom: "data"
  6.   top: "conv1_1"
  7.   param {
  8.     lr_mult: 1
  9.     decay_mult: 1
  10.   }
  11.   param {
  12.     lr_mult: 2
  13.     decay_mult: 0
  14.   }
  15.   convolution_param {
  16.     num_output: 64
  17.     pad: 1
  18.     kernel_size: 3
  19.   }
  20. }
  21. layer {
  22.   name: "relu1_1"
  23.   type: "ReLU"
  24.   bottom: "conv1_1"
  25.   top: "conv1_1"
  26. }
  27. layer {
  28.   name: "conv1_2"
  29.   type: "Convolution"
  30.   bottom: "conv1_1"
  31.   top: "conv1_2"
  32.   param {
  33.     lr_mult: 1
  34.     decay_mult: 1
  35.   }
  36.   param {
  37.     lr_mult: 2
  38.     decay_mult: 0
  39.   }
  40.   convolution_param {
  41.     num_output: 64
  42.     pad: 1
  43.     kernel_size: 3
  44.   }
  45. }
  46. layer {
  47.   name: "relu1_2"
  48.   type: "ReLU"
  49.   bottom: "conv1_2"
  50.   top: "conv1_2"
  51. }
  52. layer {
  53.   name: "pool1"
  54.   type: "Pooling"
  55.   bottom: "conv1_2"
  56.   top: "pool1"
  57.   pooling_param {
  58.     pool: MAX
  59.     kernel_size: 3
  60.     stride: 2
  61.     pad: 1
  62.   }
  63. }
  64. layer {
  65.   name: "conv2_1"
  66.   type: "Convolution"
  67.   bottom: "pool1"
  68.   top: "conv2_1"
  69.   param {
  70.     lr_mult: 1
  71.     decay_mult: 1
  72.   }
  73.   param {
  74.     lr_mult: 2
  75.     decay_mult: 0
  76.   }
  77.   convolution_param {
  78.     num_output: 128
  79.     pad: 1
  80.     kernel_size: 3
  81.   }
  82. }
  83. layer {
  84.   name: "relu2_1"
  85.   type: "ReLU"
  86.   bottom: "conv2_1"
  87.   top: "conv2_1"
  88. }
  89. layer {
  90.   name: "conv2_2"
  91.   type: "Convolution"
  92.   bottom: "conv2_1"
  93.   top: "conv2_2"
  94.   param {
  95.     lr_mult: 1
  96.     decay_mult: 1
  97.   }
  98.   param {
  99.     lr_mult: 2
  100.     decay_mult: 0
  101.   }
  102.   convolution_param {
  103.     num_output: 128
  104.     pad: 1
  105.     kernel_size: 3
  106.   }
  107. }
  108. layer {
  109.   name: "relu2_2"
  110.   type: "ReLU"
  111.   bottom: "conv2_2"
  112.   top: "conv2_2"
  113. }
  114. layer {
  115.   name: "pool2"
  116.   type: "Pooling"
  117.   bottom: "conv2_2"
  118.   top: "pool2"
  119.   pooling_param {
  120.     pool: MAX
  121.     kernel_size: 3
  122.     stride: 2
  123.     pad: 1
  124.   }
  125. }
  126. layer {
  127.   name: "conv3_1"
  128.   type: "Convolution"
  129.   bottom: "pool2"
  130.   top: "conv3_1"
  131.   param {
  132.     lr_mult: 1
  133.     decay_mult: 1
  134.   }
  135.   param {
  136.     lr_mult: 2
  137.     decay_mult: 0
  138.   }
  139.   convolution_param {
  140.     num_output: 256
  141.     pad: 1
  142.     kernel_size: 3
  143.   }
  144. }
  145. layer {
  146.   name: "relu3_1"
  147.   type: "ReLU"
  148.   bottom: "conv3_1"
  149.   top: "conv3_1"
  150. }
  151. layer {
  152.   name: "conv3_2"
  153.   type: "Convolution"
  154.   bottom: "conv3_1"
  155.   top: "conv3_2"
  156.   param {
  157.     lr_mult: 1
  158.     decay_mult: 1
  159.   }
  160.   param {
  161.     lr_mult: 2
  162.     decay_mult: 0
  163.   }
  164.   convolution_param {
  165.     num_output: 256
  166.     pad: 1
  167.     kernel_size: 3
  168.   }
  169. }
  170. layer {
  171.   name: "relu3_2"
  172.   type: "ReLU"
  173.   bottom: "conv3_2"
  174.   top: "conv3_2"
  175. }
  176. layer {
  177.   name: "conv3_3"
  178.   type: "Convolution"
  179.   bottom: "conv3_2"
  180.   top: "conv3_3"
  181.   param {
  182.     lr_mult: 1
  183.     decay_mult: 1
  184.   }
  185.   param {
  186.     lr_mult: 2
  187.     decay_mult: 0
  188.   }
  189.   convolution_param {
  190.     num_output: 256
  191.     pad: 1
  192.     kernel_size: 3
  193.   }
  194. }
  195. layer {
  196.   name: "relu3_3"
  197.   type: "ReLU"
  198.   bottom: "conv3_3"
  199.   top: "conv3_3"
  200. }
  201. layer {
  202.   name: "pool3"
  203.   type: "Pooling"
  204.   bottom: "conv3_3"
  205.   top: "pool3"
  206.   pooling_param {
  207.     pool: MAX
  208.     kernel_size: 3
  209.     stride: 2
  210.     pad: 1
  211.   }
  212. }
  213. layer {
  214.   name: "conv4_1"
  215.   type: "Convolution"
  216.   bottom: "pool3"
  217.   top: "conv4_1"
  218.   param {
  219.     lr_mult: 1
  220.     decay_mult: 1
  221.   }
  222.   param {
  223.     lr_mult: 2
  224.     decay_mult: 0
  225.   }
  226.   convolution_param {
  227.     num_output: 512
  228.     pad: 1
  229.     kernel_size: 3
  230.   }
  231. }
  232. layer {
  233.   name: "relu4_1"
  234.   type: "ReLU"
  235.   bottom: "conv4_1"
  236.   top: "conv4_1"
  237. }
  238. layer {
  239.   name: "conv4_2"
  240.   type: "Convolution"
  241.   bottom: "conv4_1"
  242.   top: "conv4_2"
  243.   param {
  244.     lr_mult: 1
  245.     decay_mult: 1
  246.   }
  247.   param {
  248.     lr_mult: 2
  249.     decay_mult: 0
  250.   }
  251.   convolution_param {
  252.     num_output: 512
  253.     pad: 1
  254.     kernel_size: 3
  255.   }
  256. }
  257. layer {
  258.   name: "relu4_2"
  259.   type: "ReLU"
  260.   bottom: "conv4_2"
  261.   top: "conv4_2"
  262. }
  263. layer {
  264.   name: "conv4_3"
  265.   type: "Convolution"
  266.   bottom: "conv4_2"
  267.   top: "conv4_3"
  268.   param {
  269.     lr_mult: 1
  270.     decay_mult: 1
  271.   }
  272.   param {
  273.     lr_mult: 2
  274.     decay_mult: 0
  275.   }
  276.   convolution_param {
  277.     num_output: 512
  278.     pad: 1
  279.     kernel_size: 3
  280.   }
  281. }
  282. layer {
  283.   name: "relu4_3"
  284.   type: "ReLU"
  285.   bottom: "conv4_3"
  286.   top: "conv4_3"
  287. }
  288. layer {
  289.   bottom: "conv4_3"
  290.   top: "pool4"
  291.   name: "pool4"
  292.   type: "Pooling"
  293.   pooling_param {
  294.     pool: MAX
  295.     kernel_size: 3
  296.     pad: 1
  297.     stride: 1
  298.   }
  299. }
  300. layer {
  301.   name: "conv5_1"
  302.   type: "Convolution"
  303.   bottom: "pool4"
  304.   top: "conv5_1"
  305.   param {
  306.     lr_mult: 1
  307.     decay_mult: 1
  308.   }
  309.   param {
  310.     lr_mult: 2
  311.     decay_mult: 0
  312.   }
  313.   convolution_param {
  314.     num_output: 512
  315.     pad: 2
  316.     kernel_size: 3
  317.     dilation: 2
  318.   }
  319. }
  320. layer {
  321.   name: "relu5_1"
  322.   type: "ReLU"
  323.   bottom: "conv5_1"
  324.   top: "conv5_1"
  325. }
  326. layer {
  327.   name: "conv5_2"
  328.   type: "Convolution"
  329.   bottom: "conv5_1"
  330.   top: "conv5_2"
  331.   param {
  332.     lr_mult: 1
  333.     decay_mult: 1
  334.   }
  335.   param {
  336.     lr_mult: 2
  337.     decay_mult: 0
  338.   }
  339.   convolution_param {
  340.     num_output: 512
  341.     pad: 2
  342.     kernel_size: 3
  343.     dilation: 2
  344.   }
  345. }
  346. layer {
  347.   name: "relu5_2"
  348.   type: "ReLU"
  349.   bottom: "conv5_2"
  350.   top: "conv5_2"
  351. }
  352. layer {
  353.   name: "conv5_3"
  354.   type: "Convolution"
  355.   bottom: "conv5_2"
  356.   top: "conv5_3"
  357.   param {
  358.     lr_mult: 1
  359.     decay_mult: 1
  360.   }
  361.   param {
  362.     lr_mult: 2
  363.     decay_mult: 0
  364.   }
  365.   convolution_param {
  366.     num_output: 512
  367.     pad: 2
  368.     kernel_size: 3

This paste is large and only partially shown.
View full paste

View raw paste Reply