Created
July 10, 2017 06:42
-
-
Save szm-R/32592fc968c65ce2d2b96f42bc33657e to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: "DarkNet" | |
| input: "data" | |
| input_shape { | |
| dim: 1 | |
| dim: 3 | |
| dim: 224 | |
| dim: 224 | |
| } | |
| ##################################################################### | |
| layer { | |
| name: "conv1" | |
| type: "Convolution" | |
| bottom: "data" | |
| top: "conv1" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 32 | |
| pad: 1 | |
| kernel_size: 3 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv1/relu" | |
| type: "ReLU" | |
| bottom: "conv1" | |
| top: "conv1" | |
| } | |
| layer { | |
| name: "pool1" | |
| type: "Pooling" | |
| bottom: "conv1" | |
| top: "pool1" | |
| pooling_param { | |
| pool: MAX | |
| kernel_size: 2 | |
| stride: 2 | |
| } | |
| } | |
| layer { | |
| name: "conv2" | |
| type: "Convolution" | |
| bottom: "pool1" | |
| top: "conv2" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 64 | |
| pad: 1 | |
| kernel_size: 3 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv2/relu" | |
| type: "ReLU" | |
| bottom: "conv2" | |
| top: "conv2" | |
| } | |
| layer { | |
| name: "pool2" | |
| type: "Pooling" | |
| bottom: "conv2" | |
| top: "pool2" | |
| pooling_param { | |
| pool: MAX | |
| kernel_size: 2 | |
| stride: 2 | |
| } | |
| } | |
| ##################################################################### | |
| layer { | |
| name: "conv3" | |
| type: "Convolution" | |
| bottom: "pool2" | |
| top: "conv3" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 128 | |
| pad: 1 | |
| kernel_size: 3 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv3/relu" | |
| type: "ReLU" | |
| bottom: "conv3" | |
| top: "conv3" | |
| } | |
| layer { | |
| name: "conv4" | |
| type: "Convolution" | |
| bottom: "conv3" | |
| top: "conv4" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 64 | |
| pad: 0 | |
| kernel_size: 1 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv4/relu" | |
| type: "ReLU" | |
| bottom: "conv4" | |
| top: "conv4" | |
| } | |
| layer { | |
| name: "conv5" | |
| type: "Convolution" | |
| bottom: "conv4" | |
| top: "conv5" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 128 | |
| pad: 1 | |
| kernel_size: 3 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv5/relu" | |
| type: "ReLU" | |
| bottom: "conv5" | |
| top: "conv5" | |
| } | |
| ##################################################################### | |
| layer { | |
| name: "pool3" | |
| type: "Pooling" | |
| bottom: "conv5" | |
| top: "pool3" | |
| pooling_param { | |
| pool: MAX | |
| kernel_size: 2 | |
| stride: 2 | |
| } | |
| } | |
| ##################################################################### | |
| layer { | |
| name: "conv6" | |
| type: "Convolution" | |
| bottom: "pool3" | |
| top: "conv6" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 256 | |
| pad: 1 | |
| kernel_size: 3 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv6/relu" | |
| type: "ReLU" | |
| bottom: "conv6" | |
| top: "conv6" | |
| } | |
| layer { | |
| name: "conv7" | |
| type: "Convolution" | |
| bottom: "conv6" | |
| top: "conv7" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 128 | |
| pad: 0 | |
| kernel_size: 1 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv7/relu" | |
| type: "ReLU" | |
| bottom: "conv7" | |
| top: "conv7" | |
| } | |
| layer { | |
| name: "conv8" | |
| type: "Convolution" | |
| bottom: "conv7" | |
| top: "conv8" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 256 | |
| pad: 1 | |
| kernel_size: 3 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv8/relu" | |
| type: "ReLU" | |
| bottom: "conv8" | |
| top: "conv8" | |
| } | |
| ##################################################################### | |
| layer { | |
| name: "pool4" | |
| type: "Pooling" | |
| bottom: "conv8" | |
| top: "pool4" | |
| pooling_param { | |
| pool: MAX | |
| kernel_size: 2 | |
| stride: 2 | |
| } | |
| } | |
| ##################################################################### | |
| layer { | |
| name: "conv9" | |
| type: "Convolution" | |
| bottom: "pool4" | |
| top: "conv9" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 512 | |
| pad: 1 | |
| kernel_size: 3 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv9/relu" | |
| type: "ReLU" | |
| bottom: "conv9" | |
| top: "conv9" | |
| } | |
| layer { | |
| name: "conv10" | |
| type: "Convolution" | |
| bottom: "conv9" | |
| top: "conv10" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 256 | |
| pad: 0 | |
| kernel_size: 1 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv10/relu" | |
| type: "ReLU" | |
| bottom: "conv10" | |
| top: "conv10" | |
| } | |
| layer { | |
| name: "conv11" | |
| type: "Convolution" | |
| bottom: "conv10" | |
| top: "conv11" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 512 | |
| pad: 1 | |
| kernel_size: 3 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv11/relu" | |
| type: "ReLU" | |
| bottom: "conv11" | |
| top: "conv11" | |
| } | |
| layer { | |
| name: "conv12" | |
| type: "Convolution" | |
| bottom: "conv11" | |
| top: "conv12" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 256 | |
| pad: 0 | |
| kernel_size: 1 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv12/relu" | |
| type: "ReLU" | |
| bottom: "conv12" | |
| top: "conv12" | |
| } | |
| layer { | |
| name: "conv13" | |
| type: "Convolution" | |
| bottom: "conv12" | |
| top: "conv13" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 512 | |
| pad: 1 | |
| kernel_size: 3 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv13/relu" | |
| type: "ReLU" | |
| bottom: "conv13" | |
| top: "conv13" | |
| } | |
| ##################################################################### | |
| layer { | |
| name: "pool5" | |
| type: "Pooling" | |
| bottom: "conv13" | |
| top: "pool5" | |
| pooling_param { | |
| pool: MAX | |
| kernel_size: 2 | |
| stride: 2 | |
| } | |
| } | |
| ##################################################################### | |
| layer { | |
| name: "conv14" | |
| type: "Convolution" | |
| bottom: "pool5" | |
| top: "conv14" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 1024 | |
| pad: 1 | |
| kernel_size: 3 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv14/relu" | |
| type: "ReLU" | |
| bottom: "conv14" | |
| top: "conv14" | |
| } | |
| layer { | |
| name: "conv15" | |
| type: "Convolution" | |
| bottom: "conv14" | |
| top: "conv15" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 512 | |
| pad: 0 | |
| kernel_size: 1 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv15/relu" | |
| type: "ReLU" | |
| bottom: "conv15" | |
| top: "conv15" | |
| } | |
| layer { | |
| name: "conv16" | |
| type: "Convolution" | |
| bottom: "conv15" | |
| top: "conv16" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 1024 | |
| pad: 1 | |
| kernel_size: 3 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv16/relu" | |
| type: "ReLU" | |
| bottom: "conv16" | |
| top: "conv16" | |
| } | |
| layer { | |
| name: "conv17" | |
| type: "Convolution" | |
| bottom: "conv16" | |
| top: "conv17" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 512 | |
| pad: 0 | |
| kernel_size: 1 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv17/relu" | |
| type: "ReLU" | |
| bottom: "conv17" | |
| top: "conv17" | |
| } | |
| layer { | |
| name: "conv18" | |
| type: "Convolution" | |
| bottom: "conv17" | |
| top: "conv18" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 1024 | |
| pad: 1 | |
| kernel_size: 3 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv18/relu" | |
| type: "ReLU" | |
| bottom: "conv18" | |
| top: "conv18" | |
| } | |
| ##################################################################### | |
| layer { | |
| name: "conv19" | |
| type: "Convolution" | |
| bottom: "conv18" | |
| top: "conv19" | |
| param { | |
| lr_mult: 1 | |
| decay_mult: 1 | |
| } | |
| param { | |
| lr_mult: 2 | |
| decay_mult: 0 | |
| } | |
| convolution_param { | |
| num_output: 1000 | |
| pad: 0 | |
| kernel_size: 1 | |
| weight_filler { | |
| type: "msra" | |
| variance_norm: AVERAGE | |
| } | |
| bias_filler { | |
| type: "constant" | |
| value: 0.2 | |
| } | |
| } | |
| } | |
| layer { | |
| name: "conv19/relu" | |
| type: "ReLU" | |
| bottom: "conv19" | |
| top: "conv19" | |
| } | |
| layer { | |
| name: "pool6" | |
| type: "Pooling" | |
| bottom: "conv19" | |
| top: "pool6" | |
| pooling_param { | |
| pool: AVE | |
| global_pooling: true | |
| } | |
| } | |
| layer { | |
| name: "softmax" | |
| type: "Softmax" | |
| bottom: "pool6" | |
| top: "softmax" | |
| } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment