summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/caffe/layer_factory.cpp2
-rw-r--r--src/caffe/layers/dummy_data_layer.cpp100
-rw-r--r--src/caffe/proto/caffe.proto26
3 files changed, 124 insertions, 4 deletions
diff --git a/src/caffe/layer_factory.cpp b/src/caffe/layer_factory.cpp
index 58c20b1f..d6e506df 100644
--- a/src/caffe/layer_factory.cpp
+++ b/src/caffe/layer_factory.cpp
@@ -36,6 +36,8 @@ Layer<Dtype>* GetLayer(const LayerParameter& param) {
return new DataLayer<Dtype>(param);
case LayerParameter_LayerType_DROPOUT:
return new DropoutLayer<Dtype>(param);
+ case LayerParameter_LayerType_DUMMY_DATA:
+ return new DummyDataLayer<Dtype>(param);
case LayerParameter_LayerType_EUCLIDEAN_LOSS:
return new EuclideanLossLayer<Dtype>(param);
case LayerParameter_LayerType_ELTWISE:
diff --git a/src/caffe/layers/dummy_data_layer.cpp b/src/caffe/layers/dummy_data_layer.cpp
new file mode 100644
index 00000000..38568580
--- /dev/null
+++ b/src/caffe/layers/dummy_data_layer.cpp
@@ -0,0 +1,100 @@
+// Copyright 2014 BVLC and contributors.
+
+#include <vector>
+
+#include "caffe/filler.hpp"
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void DummyDataLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top) {
+ const int num_top = top->size();
+ const DummyDataParameter& param = this->layer_param_.dummy_data_param();
+ const int num_data_filler = param.data_filler_size();
+ CHECK(num_data_filler == 0 || num_data_filler == 1 ||
+ num_data_filler == num_top)
+ << "Number of data fillers must be 0, 1 or equal to the number of tops: "
+ << num_top << "; you specified " << num_data_filler << " data fillers.";
+ CHECK(param.num_size() == 1 || param.num_size() == num_top)
+ << "Must specify either a single (1) 'num' or one for each top blob ("
+ << num_top << "); you specified " << param.num_size() << ".";
+ CHECK(param.channels_size() == 1 || param.channels_size() == num_top)
+ << "Must specify either a single (1) 'channels' or one for each top blob ("
+ << num_top << "); you specified " << param.channels_size() << ".";
+ CHECK(param.height_size() == 1 || param.height_size() == num_top)
+ << "Must specify either a single (1) 'height' or one for each top blob ("
+ << num_top << "); you specified " << param.height_size() << ".";
+ CHECK(param.width_size() == 1 || param.width_size() == num_top)
+ << "Must specify either a single (1) 'width' or one for each top blob ("
+ << num_top << "); you specified " << param.width_size() << ".";
+ // refill_[i] tells Forward i whether or not to actually refill top Blob i.
+ // If refill_[i] is false, Forward does nothing for Blob i. We use this to
+ // avoid wastefully refilling "constant" Blobs in every forward pass.
+ // We first fill refill_ in with the INVERSE of its final values.
+ // The first time we run Forward from the SetUp method, we'll fill only the
+ // Blobs for which refill_ is normally false. These Blobs will never be
+ // filled again.
+ refill_.clear();
+ fillers_.clear();
+ if (num_data_filler <= 1) {
+ FillerParameter filler_param;
+ if (num_data_filler == 0) {
+ filler_param.set_type("constant");
+ filler_param.set_value(0);
+ } else {
+ filler_param.CopyFrom(param.data_filler(0));
+ }
+ // Refill on each iteration iff not using a constant filler,
+ // but use the inverse of this rule for the first run.
+ refill_.resize(1);
+ refill_[0] = (strcmp(filler_param.type().c_str(), "constant") == 0);
+ fillers_.resize(1);
+ fillers_[0].reset(GetFiller<Dtype>(filler_param));
+ } else {
+ refill_.resize(num_top);
+ fillers_.resize(num_top);
+ for (int i = 0; i < num_top; ++i) {
+ fillers_[i].reset(GetFiller<Dtype>(param.data_filler(i)));
+ // Refill on each iteration iff not using a constant filler,
+ // but use the inverse of this rule for the first run.
+ refill_[i] =
+ (strcmp(param.data_filler(i).type().c_str(), "constant") == 0);
+ }
+ }
+ for (int i = 0; i < num_top; ++i) {
+ const int num = (param.num_size() == 1) ? param.num(0) : param.num(i);
+ const int channels =
+ (param.channels_size() == 1) ? param.channels(0) : param.channels(i);
+ const int height =
+ (param.height_size() == 1) ? param.height(0) : param.height(i);
+ const int width =
+ (param.width_size() == 1) ? param.width(0) : param.width(i);
+ (*top)[i]->Reshape(num, channels, height, width);
+ }
+ // Run Forward once, with refill_ inverted, to fill the constant Blobs.
+ Forward(bottom, top);
+ // Invert the inverted refill_ values to refill the desired (non-constant)
+ // Blobs in every usual forward pass.
+ for (int i = 0; i < refill_.size(); ++i) {
+ refill_[i] = !refill_[i];
+ }
+}
+
+template <typename Dtype>
+Dtype DummyDataLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top) {
+ for (int i = 0; i < top->size(); ++i) {
+ const int filler_id = (fillers_.size() > 1) ? i : 0;
+ if (refill_[filler_id]) {
+ fillers_[filler_id]->Fill((*top)[i]);
+ }
+ }
+ return Dtype(0.);
+}
+
+INSTANTIATE_CLASS(DummyDataLayer);
+
+} // namespace caffe
diff --git a/src/caffe/proto/caffe.proto b/src/caffe/proto/caffe.proto
index e540a95a..60c7daa9 100644
--- a/src/caffe/proto/caffe.proto
+++ b/src/caffe/proto/caffe.proto
@@ -115,7 +115,7 @@ message SolverState {
// NOTE
// Update the next available ID when you add a new LayerParameter field.
//
-// LayerParameter next available ID: 25 (last added: eltwise_param)
+// LayerParameter next available ID: 27 (last added: dummy_data_param)
message LayerParameter {
repeated string bottom = 2; // the name of the bottom blobs
repeated string top = 3; // the name of the top blobs
@@ -127,7 +127,7 @@ message LayerParameter {
// line above the enum. Update the next available ID when you add a new
// LayerType.
//
- // LayerType next available ID: 32 (last added: THRESHOLD)
+ // LayerType next available ID: 33 (last added: DUMMY_DATA)
enum LayerType {
// "NONE" layer type is 0th enum element so that we don't cause confusion
// by defaulting to an existent LayerType (instead, should usually error if
@@ -140,6 +140,7 @@ message LayerParameter {
CONVOLUTION = 4;
DATA = 5;
DROPOUT = 6;
+ DUMMY_DATA = 32;
EUCLIDEAN_LOSS = 7;
ELTWISE = 25;
FLATTEN = 8;
@@ -175,13 +176,12 @@ message LayerParameter {
// The weight decay that is multiplied on the global weight decay.
repeated float weight_decay = 8;
- // Parameters for particular layer types.
- // Parameters next available ID: 26 (last added: ThresholdParameter)
optional ArgMaxParameter argmax_param = 23;
optional ConcatParameter concat_param = 9;
optional ConvolutionParameter convolution_param = 10;
optional DataParameter data_param = 11;
optional DropoutParameter dropout_param = 12;
+ optional DummyDataParameter dummy_data_param = 26;
optional EltwiseParameter eltwise_param = 24;
optional HDF5DataParameter hdf5_data_param = 13;
optional HDF5OutputParameter hdf5_output_param = 14;
@@ -254,6 +254,24 @@ message DropoutParameter {
optional float dropout_ratio = 1 [default = 0.5]; // dropout ratio
}
+// Message that stores parameters used by DummyDataLayer.
+// DummyDataLayer fills any number of arbitrarily shaped blobs with random
+// (or constant) data generated by "Fillers" (see "message FillerParameter").
+message DummyDataParameter {
+ // This layer produces N >= 1 top blobs. DummyDataParameter must specify 1 or N
+ // num, N channels, N height, and N width fields, and must specify 0, 1 or N
+ // data_fillers.
+ //
+ // If 0 data_fillers are specified, ConstantFiller with a value of 0 is used.
+ // If 1 data_filler is specified, it is applied to all top blobs. If N are
+ // specified, the ith is applied to the ith top blob.
+ repeated FillerParameter data_filler = 1;
+ repeated uint32 num = 2;
+ repeated uint32 channels = 3;
+ repeated uint32 height = 4;
+ repeated uint32 width = 5;
+}
+
// Message that stores parameters used by EltwiseLayer
message EltwiseParameter {
enum EltwiseOp {