diff options
-rw-r--r-- | models/bvlc_reference_caffenet/train_val.prototxt | 16 | ||||
-rw-r--r-- | models/bvlc_reference_caffenet/train_val_mean_value.prototxt | 348 |
2 files changed, 16 insertions, 348 deletions
diff --git a/models/bvlc_reference_caffenet/train_val.prototxt b/models/bvlc_reference_caffenet/train_val.prototxt index 073d8aef..00fcc080 100644 --- a/models/bvlc_reference_caffenet/train_val.prototxt +++ b/models/bvlc_reference_caffenet/train_val.prototxt @@ -14,6 +14,14 @@ layers { mean_file: "data/ilsvrc12/imagenet_mean.binaryproto" mirror: true } +# mean pixel / channel-wise mean instead of mean image +# transform_param { +# crop_size: 227 +# mean_value: 104 +# mean_value: 117 +# mean_value: 123 +# mirror: true +# } include: { phase: TRAIN } } layers { @@ -31,6 +39,14 @@ layers { mean_file: "data/ilsvrc12/imagenet_mean.binaryproto" mirror: false } +# mean pixel / channel-wise mean instead of mean image +# transform_param { +# crop_size: 227 +# mean_value: 104 +# mean_value: 117 +# mean_value: 123 +# mirror: true +# } include: { phase: TEST } } layers { diff --git a/models/bvlc_reference_caffenet/train_val_mean_value.prototxt b/models/bvlc_reference_caffenet/train_val_mean_value.prototxt deleted file mode 100644 index c2fbb711..00000000 --- a/models/bvlc_reference_caffenet/train_val_mean_value.prototxt +++ /dev/null @@ -1,348 +0,0 @@ -name: "CaffeNet" -layers { - name: "data" - type: DATA - top: "data" - top: "label" - data_param { - source: "examples/imagenet/ilsvrc12_train_leveldb" - batch_size: 256 - } - transform_param { - crop_size: 227 - mean_value: 104 - mean_value: 117 - mean_value: 123 - mirror: true - } - include: { phase: TRAIN } -} -layers { - name: "data" - type: DATA - top: "data" - top: "label" - data_param { - source: "examples/imagenet/ilsvrc12_val_leveldb" - batch_size: 50 - } - transform_param { - crop_size: 227 - mean_value: 104 - mean_value: 117 - mean_value: 123 - mirror: false - } - include: { phase: TEST } -} -layers { - name: "conv1" - type: CONVOLUTION - bottom: "data" - top: "conv1" - blobs_lr: 1 - blobs_lr: 2 - weight_decay: 1 - weight_decay: 0 - convolution_param { - num_output: 96 - kernel_size: 11 - stride: 4 - weight_filler { - type: "gaussian" - std: 0.01 - } - bias_filler { - type: "constant" - value: 0 - } - } -} -layers { - name: "relu1" - type: RELU - bottom: "conv1" - top: "conv1" -} -layers { - name: "pool1" - type: POOLING - bottom: "conv1" - top: "pool1" - pooling_param { - pool: MAX - kernel_size: 3 - stride: 2 - } -} -layers { - name: "norm1" - type: LRN - bottom: "pool1" - top: "norm1" - lrn_param { - local_size: 5 - alpha: 0.0001 - beta: 0.75 - } -} -layers { - name: "conv2" - type: CONVOLUTION - bottom: "norm1" - top: "conv2" - blobs_lr: 1 - blobs_lr: 2 - weight_decay: 1 - weight_decay: 0 - convolution_param { - num_output: 256 - pad: 2 - kernel_size: 5 - group: 2 - weight_filler { - type: "gaussian" - std: 0.01 - } - bias_filler { - type: "constant" - value: 1 - } - } -} -layers { - name: "relu2" - type: RELU - bottom: "conv2" - top: "conv2" -} -layers { - name: "pool2" - type: POOLING - bottom: "conv2" - top: "pool2" - pooling_param { - pool: MAX - kernel_size: 3 - stride: 2 - } -} -layers { - name: "norm2" - type: LRN - bottom: "pool2" - top: "norm2" - lrn_param { - local_size: 5 - alpha: 0.0001 - beta: 0.75 - } -} -layers { - name: "conv3" - type: CONVOLUTION - bottom: "norm2" - top: "conv3" - blobs_lr: 1 - blobs_lr: 2 - weight_decay: 1 - weight_decay: 0 - convolution_param { - num_output: 384 - pad: 1 - kernel_size: 3 - weight_filler { - type: "gaussian" - std: 0.01 - } - bias_filler { - type: "constant" - value: 0 - } - } -} -layers { - name: "relu3" - type: RELU - bottom: "conv3" - top: "conv3" -} -layers { - name: "conv4" - type: CONVOLUTION - bottom: "conv3" - top: "conv4" - blobs_lr: 1 - blobs_lr: 2 - weight_decay: 1 - weight_decay: 0 - convolution_param { - num_output: 384 - pad: 1 - kernel_size: 3 - group: 2 - weight_filler { - type: "gaussian" - std: 0.01 - } - bias_filler { - type: "constant" - value: 1 - } - } -} -layers { - name: "relu4" - type: RELU - bottom: "conv4" - top: "conv4" -} -layers { - name: "conv5" - type: CONVOLUTION - bottom: "conv4" - top: "conv5" - blobs_lr: 1 - blobs_lr: 2 - weight_decay: 1 - weight_decay: 0 - convolution_param { - num_output: 256 - pad: 1 - kernel_size: 3 - group: 2 - weight_filler { - type: "gaussian" - std: 0.01 - } - bias_filler { - type: "constant" - value: 1 - } - } -} -layers { - name: "relu5" - type: RELU - bottom: "conv5" - top: "conv5" -} -layers { - name: "pool5" - type: POOLING - bottom: "conv5" - top: "pool5" - pooling_param { - pool: MAX - kernel_size: 3 - stride: 2 - } -} -layers { - name: "fc6" - type: INNER_PRODUCT - bottom: "pool5" - top: "fc6" - blobs_lr: 1 - blobs_lr: 2 - weight_decay: 1 - weight_decay: 0 - inner_product_param { - num_output: 4096 - weight_filler { - type: "gaussian" - std: 0.005 - } - bias_filler { - type: "constant" - value: 1 - } - } -} -layers { - name: "relu6" - type: RELU - bottom: "fc6" - top: "fc6" -} -layers { - name: "drop6" - type: DROPOUT - bottom: "fc6" - top: "fc6" - dropout_param { - dropout_ratio: 0.5 - } -} -layers { - name: "fc7" - type: INNER_PRODUCT - bottom: "fc6" - top: "fc7" - blobs_lr: 1 - blobs_lr: 2 - weight_decay: 1 - weight_decay: 0 - inner_product_param { - num_output: 4096 - weight_filler { - type: "gaussian" - std: 0.005 - } - bias_filler { - type: "constant" - value: 1 - } - } -} -layers { - name: "relu7" - type: RELU - bottom: "fc7" - top: "fc7" -} -layers { - name: "drop7" - type: DROPOUT - bottom: "fc7" - top: "fc7" - dropout_param { - dropout_ratio: 0.5 - } -} -layers { - name: "fc8" - type: INNER_PRODUCT - bottom: "fc7" - top: "fc8" - blobs_lr: 1 - blobs_lr: 2 - weight_decay: 1 - weight_decay: 0 - inner_product_param { - num_output: 1000 - weight_filler { - type: "gaussian" - std: 0.01 - } - bias_filler { - type: "constant" - value: 0 - } - } -} -layers { - name: "accuracy" - type: ACCURACY - bottom: "fc8" - bottom: "label" - top: "accuracy" - include: { phase: TEST } -} -layers { - name: "loss" - type: SOFTMAX_LOSS - bottom: "fc8" - bottom: "label" - top: "loss" -} |