summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJeff Donahue <jeff.donahue@gmail.com>2015-01-07 20:41:09 -0800
committerJeff Donahue <jeff.donahue@gmail.com>2015-02-05 14:49:21 -0800
commit3b13846486875be2911716bcd1947dca71a97817 (patch)
tree082b89cb75f1369635ca1365e41182a7f8730b40
parent648aed72acf1c506009ddb33d8cace40b75e176e (diff)
downloadcaffeonacl-3b13846486875be2911716bcd1947dca71a97817.tar.gz
caffeonacl-3b13846486875be2911716bcd1947dca71a97817.tar.bz2
caffeonacl-3b13846486875be2911716bcd1947dca71a97817.zip
Layer type is a string
-rw-r--r--include/caffe/common_layers.hpp40
-rw-r--r--include/caffe/data_layers.hpp28
-rw-r--r--include/caffe/layer.hpp27
-rw-r--r--include/caffe/layer_factory.hpp52
-rw-r--r--include/caffe/loss_layers.hpp34
-rw-r--r--include/caffe/neuron_layers.hpp39
-rw-r--r--include/caffe/util/upgrade_proto.hpp2
-rw-r--r--include/caffe/vision_layers.hpp22
-rw-r--r--src/caffe/layer_factory.cpp12
-rw-r--r--src/caffe/layers/absval_layer.cpp5
-rw-r--r--src/caffe/layers/accuracy_layer.cpp3
-rw-r--r--src/caffe/layers/argmax_layer.cpp2
-rw-r--r--src/caffe/layers/bnll_layer.cpp3
-rw-r--r--src/caffe/layers/concat_layer.cpp3
-rw-r--r--src/caffe/layers/contrastive_loss_layer.cpp3
-rw-r--r--src/caffe/layers/conv_layer.cpp1
-rw-r--r--src/caffe/layers/data_layer.cpp2
-rw-r--r--src/caffe/layers/deconv_layer.cpp3
-rw-r--r--src/caffe/layers/dropout_layer.cpp3
-rw-r--r--src/caffe/layers/dummy_data_layer.cpp3
-rw-r--r--src/caffe/layers/eltwise_layer.cpp3
-rw-r--r--src/caffe/layers/euclidean_loss_layer.cpp3
-rw-r--r--src/caffe/layers/exp_layer.cpp3
-rw-r--r--src/caffe/layers/flatten_layer.cpp3
-rw-r--r--src/caffe/layers/hdf5_data_layer.cpp3
-rw-r--r--src/caffe/layers/hdf5_output_layer.cpp3
-rw-r--r--src/caffe/layers/hinge_loss_layer.cpp5
-rw-r--r--src/caffe/layers/im2col_layer.cpp3
-rw-r--r--src/caffe/layers/image_data_layer.cpp3
-rw-r--r--src/caffe/layers/infogain_loss_layer.cpp6
-rw-r--r--src/caffe/layers/inner_product_layer.cpp3
-rw-r--r--src/caffe/layers/lrn_layer.cpp3
-rw-r--r--src/caffe/layers/memory_data_layer.cpp3
-rw-r--r--src/caffe/layers/multinomial_logistic_loss_layer.cpp5
-rw-r--r--src/caffe/layers/mvn_layer.cpp3
-rw-r--r--src/caffe/layers/pooling_layer.cpp1
-rw-r--r--src/caffe/layers/power_layer.cpp3
-rw-r--r--src/caffe/layers/relu_layer.cpp1
-rw-r--r--src/caffe/layers/sigmoid_cross_entropy_loss_layer.cpp5
-rw-r--r--src/caffe/layers/sigmoid_cross_entropy_loss_layer.cu2
-rw-r--r--src/caffe/layers/silence_layer.cpp3
-rw-r--r--src/caffe/layers/slice_layer.cpp3
-rw-r--r--src/caffe/layers/softmax_layer.cpp1
-rw-r--r--src/caffe/layers/softmax_loss_layer.cpp6
-rw-r--r--src/caffe/layers/split_layer.cpp5
-rw-r--r--src/caffe/layers/threshold_layer.cpp3
-rw-r--r--src/caffe/layers/window_data_layer.cpp3
-rw-r--r--src/caffe/proto/caffe.proto54
-rw-r--r--src/caffe/test/test_gradient_based_solver.cpp6
-rw-r--r--src/caffe/test/test_net.cpp258
-rw-r--r--src/caffe/test/test_protobuf.cpp2
-rw-r--r--src/caffe/test/test_solver.cpp8
-rw-r--r--src/caffe/test/test_split_layer.cpp194
-rw-r--r--src/caffe/test/test_upgrade_proto.cpp106
-rw-r--r--src/caffe/util/insert_splits.cpp2
-rw-r--r--src/caffe/util/upgrade_proto.cpp74
56 files changed, 487 insertions, 592 deletions
diff --git a/include/caffe/common_layers.hpp b/include/caffe/common_layers.hpp
index 9718b825..3a5ccd21 100644
--- a/include/caffe/common_layers.hpp
+++ b/include/caffe/common_layers.hpp
@@ -43,9 +43,7 @@ class ArgMaxLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_ARGMAX;
- }
+ virtual inline const char* type() const { return "ArgMax"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
@@ -86,9 +84,7 @@ class ConcatLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_CONCAT;
- }
+ virtual inline const char* type() const { return "Concat"; }
virtual inline int MinBottomBlobs() const { return 2; }
virtual inline int ExactNumTopBlobs() const { return 1; }
@@ -166,9 +162,7 @@ class EltwiseLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_ELTWISE;
- }
+ virtual inline const char* type() const { return "Eltwise"; }
virtual inline int MinBottomBlobs() const { return 2; }
virtual inline int ExactNumTopBlobs() const { return 1; }
@@ -207,9 +201,7 @@ class FlattenLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_FLATTEN;
- }
+ virtual inline const char* type() const { return "Flatten"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
@@ -260,9 +252,7 @@ class InnerProductLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_INNER_PRODUCT;
- }
+ virtual inline const char* type() const { return "InnerProduct"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
@@ -296,9 +286,7 @@ class MVNLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_MVN;
- }
+ virtual inline const char* type() const { return "MVN"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
@@ -330,9 +318,7 @@ class SilenceLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SILENCE;
- }
+ virtual inline const char* type() const { return "Silence"; }
virtual inline int MinBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 0; }
@@ -362,9 +348,7 @@ class SoftmaxLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SOFTMAX;
- }
+ virtual inline const char* type() const { return "Softmax"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
@@ -426,9 +410,7 @@ class SplitLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SPLIT;
- }
+ virtual inline const char* type() const { return "Split"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int MinTopBlobs() const { return 1; }
@@ -461,9 +443,7 @@ class SliceLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SLICE;
- }
+ virtual inline const char* type() const { return "Slice"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int MinTopBlobs() const { return 2; }
diff --git a/include/caffe/data_layers.hpp b/include/caffe/data_layers.hpp
index e9c83856..e55a0bbb 100644
--- a/include/caffe/data_layers.hpp
+++ b/include/caffe/data_layers.hpp
@@ -90,9 +90,7 @@ class DataLayer : public BasePrefetchingDataLayer<Dtype> {
virtual void DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_DATA;
- }
+ virtual inline const char* type() const { return "Data"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int MinTopBlobs() const { return 1; }
virtual inline int MaxTopBlobs() const { return 2; }
@@ -120,9 +118,7 @@ class DummyDataLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_DUMMY_DATA;
- }
+ virtual inline const char* type() const { return "DummyData"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int MinTopBlobs() const { return 1; }
@@ -155,9 +151,7 @@ class HDF5DataLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_HDF5_DATA;
- }
+ virtual inline const char* type() const { return "HDF5Data"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int MinTopBlobs() const { return 1; }
@@ -195,9 +189,7 @@ class HDF5OutputLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_HDF5_OUTPUT;
- }
+ virtual inline const char* type() const { return "HDF5Output"; }
// TODO: no limit on the number of blobs
virtual inline int ExactNumBottomBlobs() const { return 2; }
virtual inline int ExactNumTopBlobs() const { return 0; }
@@ -235,9 +227,7 @@ class ImageDataLayer : public BasePrefetchingDataLayer<Dtype> {
virtual void DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_IMAGE_DATA;
- }
+ virtual inline const char* type() const { return "ImageData"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int ExactNumTopBlobs() const { return 2; }
@@ -263,9 +253,7 @@ class MemoryDataLayer : public BaseDataLayer<Dtype> {
virtual void DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_MEMORY_DATA;
- }
+ virtual inline const char* type() const { return "MemoryData"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int ExactNumTopBlobs() const { return 2; }
@@ -309,9 +297,7 @@ class WindowDataLayer : public BasePrefetchingDataLayer<Dtype> {
virtual void DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_WINDOW_DATA;
- }
+ virtual inline const char* type() const { return "WindowData"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int ExactNumTopBlobs() const { return 2; }
diff --git a/include/caffe/layer.hpp b/include/caffe/layer.hpp
index 8a8330bc..c6461c19 100644
--- a/include/caffe/layer.hpp
+++ b/include/caffe/layer.hpp
@@ -179,18 +179,9 @@ class Layer {
}
/**
- * @brief Returns the layer type as an enum value.
+ * @brief Returns the layer type.
*/
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_NONE;
- }
-
- /**
- * @brief Returns the layer type name.
- */
- virtual inline const string& type_name() const {
- return LayerParameter_LayerType_Name(type());
- }
+ virtual inline const char* type() const { return ""; }
/**
* @brief Returns the exact number of bottom blobs required by the layer,
@@ -347,37 +338,37 @@ class Layer {
const vector<Blob<Dtype>*>& top) {
if (ExactNumBottomBlobs() >= 0) {
CHECK_EQ(ExactNumBottomBlobs(), bottom.size())
- << type_name() << " Layer takes " << ExactNumBottomBlobs()
+ << type() << " Layer takes " << ExactNumBottomBlobs()
<< " bottom blob(s) as input.";
}
if (MinBottomBlobs() >= 0) {
CHECK_LE(MinBottomBlobs(), bottom.size())
- << type_name() << " Layer takes at least " << MinBottomBlobs()
+ << type() << " Layer takes at least " << MinBottomBlobs()
<< " bottom blob(s) as input.";
}
if (MaxBottomBlobs() >= 0) {
CHECK_GE(MaxBottomBlobs(), bottom.size())
- << type_name() << " Layer takes at most " << MaxBottomBlobs()
+ << type() << " Layer takes at most " << MaxBottomBlobs()
<< " bottom blob(s) as input.";
}
if (ExactNumTopBlobs() >= 0) {
CHECK_EQ(ExactNumTopBlobs(), top.size())
- << type_name() << " Layer produces " << ExactNumTopBlobs()
+ << type() << " Layer produces " << ExactNumTopBlobs()
<< " top blob(s) as output.";
}
if (MinTopBlobs() >= 0) {
CHECK_LE(MinTopBlobs(), top.size())
- << type_name() << " Layer produces at least " << MinTopBlobs()
+ << type() << " Layer produces at least " << MinTopBlobs()
<< " top blob(s) as output.";
}
if (MaxTopBlobs() >= 0) {
CHECK_GE(MaxTopBlobs(), top.size())
- << type_name() << " Layer produces at most " << MaxTopBlobs()
+ << type() << " Layer produces at most " << MaxTopBlobs()
<< " top blob(s) as output.";
}
if (EqualNumBottomTopBlobs()) {
CHECK_EQ(bottom.size(), top.size())
- << type_name() << " Layer produces one top blob as output for each "
+ << type() << " Layer produces one top blob as output for each "
<< "bottom blob input.";
}
}
diff --git a/include/caffe/layer_factory.hpp b/include/caffe/layer_factory.hpp
index c1fd6aa0..ede5d1fa 100644
--- a/include/caffe/layer_factory.hpp
+++ b/include/caffe/layer_factory.hpp
@@ -12,17 +12,13 @@
* // your implementations
* };
*
- * and its type is defined in the protobuffer as
- *
- * enum LayerType {
- * // other definitions
- * AWESOME = 46,
- * }
+ * and its type is its C++ class name, but without the "Layer" at the end
+ * ("MyAwesomeLayer" -> "MyAwesome").
*
* If the layer is going to be created simply by its constructor, in your c++
* file, add the following line:
*
- * REGISTER_LAYER_CLASS(AWESOME, MyAwesomeLayer);
+ * REGISTER_LAYER_CLASS(MyAwesome);
*
* Or, if the layer is going to be created by another creator function, in the
* format of:
@@ -35,7 +31,7 @@
* (for example, when your layer has multiple backends, see GetConvolutionLayer
* for a use case), then you can register the creator function instead, like
*
- * REGISTER_LAYER_CREATOR(AWESOME, GetMyAwesomeLayer)
+ * REGISTER_LAYER_CREATOR(MyAwesome, GetMyAwesomeLayer)
*
* Note that each layer type should only be registered once.
*/
@@ -44,6 +40,7 @@
#define CAFFE_LAYER_FACTORY_H_
#include <map>
+#include <string>
#include "caffe/common.hpp"
#include "caffe/proto/caffe.pb.h"
@@ -57,7 +54,7 @@ template <typename Dtype>
class LayerRegistry {
public:
typedef Layer<Dtype>* (*Creator)(const LayerParameter&);
- typedef std::map<LayerParameter_LayerType, Creator> CreatorRegistry;
+ typedef std::map<string, Creator> CreatorRegistry;
static CreatorRegistry& Registry() {
static CreatorRegistry* g_registry_ = new CreatorRegistry();
@@ -65,8 +62,7 @@ class LayerRegistry {
}
// Adds a creator.
- static void AddCreator(const LayerParameter_LayerType& type,
- Creator creator) {
+ static void AddCreator(const string& type, Creator creator) {
CreatorRegistry& registry = Registry();
CHECK_EQ(registry.count(type), 0)
<< "Layer type " << type << " already registered.";
@@ -76,9 +72,10 @@ class LayerRegistry {
// Get a layer using a LayerParameter.
static Layer<Dtype>* CreateLayer(const LayerParameter& param) {
LOG(INFO) << "Creating layer " << param.name();
- const LayerParameter_LayerType& type = param.type();
+ const string& type = param.type();
CreatorRegistry& registry = Registry();
- CHECK_EQ(registry.count(type), 1);
+ CHECK_EQ(registry.count(type), 1) << "Unknown layer type: " << type
+ << " (known types: " << LayerTypeList() << ")";
return registry[type](param);
}
@@ -86,13 +83,26 @@ class LayerRegistry {
// Layer registry should never be instantiated - everything is done with its
// static variables.
LayerRegistry() {}
+
+ static string LayerTypeList() {
+ CreatorRegistry& registry = Registry();
+ string layer_types;
+ for (typename CreatorRegistry::iterator iter = registry.begin();
+ iter != registry.end(); ++iter) {
+ if (iter != registry.begin()) {
+ layer_types += ", ";
+ }
+ layer_types += iter->first;
+ }
+ return layer_types;
+ }
};
template <typename Dtype>
class LayerRegisterer {
public:
- LayerRegisterer(const LayerParameter_LayerType& type,
+ LayerRegisterer(const string& type,
Layer<Dtype>* (*creator)(const LayerParameter&)) {
// LOG(INFO) << "Registering layer type: " << type;
LayerRegistry<Dtype>::AddCreator(type, creator);
@@ -101,17 +111,15 @@ class LayerRegisterer {
#define REGISTER_LAYER_CREATOR(type, creator) \
- static LayerRegisterer<float> g_creator_f_##type( \
- LayerParameter_LayerType_##type, creator<float>); \
- static LayerRegisterer<double> g_creator_d_##type( \
- LayerParameter_LayerType_##type, creator<double>)
+ static LayerRegisterer<float> g_creator_f_##type(#type, creator<float>); \
+ static LayerRegisterer<double> g_creator_d_##type(#type, creator<double>) \
-#define REGISTER_LAYER_CLASS(type, clsname) \
+#define REGISTER_LAYER_CLASS(type) \
template <typename Dtype> \
- Layer<Dtype>* Creator_##clsname(const LayerParameter& param) { \
- return new clsname<Dtype>(param); \
+ Layer<Dtype>* Creator_##type##Layer(const LayerParameter& param) { \
+ return new type##Layer<Dtype>(param); \
} \
- REGISTER_LAYER_CREATOR(type, Creator_##clsname)
+ REGISTER_LAYER_CREATOR(type, Creator_##type##Layer)
} // namespace caffe
diff --git a/include/caffe/loss_layers.hpp b/include/caffe/loss_layers.hpp
index 600a13b8..f14b9716 100644
--- a/include/caffe/loss_layers.hpp
+++ b/include/caffe/loss_layers.hpp
@@ -37,10 +37,7 @@ class AccuracyLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_ACCURACY;
- }
-
+ virtual inline const char* type() const { return "Accuracy"; }
virtual inline int ExactNumBottomBlobs() const { return 2; }
virtual inline int ExactNumTopBlobs() const { return 1; }
@@ -154,9 +151,7 @@ class ContrastiveLossLayer : public LossLayer<Dtype> {
const vector<Blob<Dtype>*>& top);
virtual inline int ExactNumBottomBlobs() const { return 3; }
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_CONTRASTIVE_LOSS;
- }
+ virtual inline const char* type() const { return "ContrastiveLoss"; }
/**
* Unlike most loss layers, in the ContrastiveLossLayer we can backpropagate
* to the first two inputs.
@@ -242,10 +237,7 @@ class EuclideanLossLayer : public LossLayer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_EUCLIDEAN_LOSS;
- }
-
+ virtual inline const char* type() const { return "EuclideanLoss"; }
/**
* Unlike most loss layers, in the EuclideanLossLayer we can backpropagate
* to both inputs -- override to return true and always allow force_backward.
@@ -351,9 +343,7 @@ class HingeLossLayer : public LossLayer<Dtype> {
explicit HingeLossLayer(const LayerParameter& param)
: LossLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_HINGE_LOSS;
- }
+ virtual inline const char* type() const { return "HingeLoss"; }
protected:
/// @copydoc HingeLossLayer
@@ -440,9 +430,7 @@ class InfogainLossLayer : public LossLayer<Dtype> {
virtual inline int MinBottomBlobs() const { return 2; }
virtual inline int MaxBottomBlobs() const { return 3; }
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_INFOGAIN_LOSS;
- }
+ virtual inline const char* type() const { return "InfogainLoss"; }
protected:
/// @copydoc InfogainLossLayer
@@ -524,9 +512,7 @@ class MultinomialLogisticLossLayer : public LossLayer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS;
- }
+ virtual inline const char* type() const { return "MultinomialLogisticLoss"; }
protected:
/// @copydoc MultinomialLogisticLossLayer
@@ -606,9 +592,7 @@ class SigmoidCrossEntropyLossLayer : public LossLayer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SIGMOID_CROSS_ENTROPY_LOSS;
- }
+ virtual inline const char* type() const { return "SigmoidCrossEntropyLoss"; }
protected:
/// @copydoc SigmoidCrossEntropyLossLayer
@@ -711,9 +695,7 @@ class SoftmaxWithLossLayer : public LossLayer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SOFTMAX_LOSS;
- }
+ virtual inline const char* type() const { return "SoftmaxWithLoss"; }
virtual inline int ExactNumBottomBlobs() const { return -1; }
virtual inline int MinBottomBlobs() const { return 2; }
virtual inline int MaxBottomBlobs() const { return 3; }
diff --git a/include/caffe/neuron_layers.hpp b/include/caffe/neuron_layers.hpp
index 5daeeefe..b65d9843 100644
--- a/include/caffe/neuron_layers.hpp
+++ b/include/caffe/neuron_layers.hpp
@@ -29,9 +29,6 @@ class NeuronLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_NONE;
- }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
};
@@ -54,9 +51,7 @@ class AbsValLayer : public NeuronLayer<Dtype> {
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_ABSVAL;
- }
+ virtual inline const char* type() const { return "AbsVal"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
@@ -113,9 +108,7 @@ class BNLLLayer : public NeuronLayer<Dtype> {
explicit BNLLLayer(const LayerParameter& param)
: NeuronLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_BNLL;
- }
+ virtual inline const char* type() const { return "BNLL"; }
protected:
/// @copydoc BNLLLayer
@@ -173,9 +166,7 @@ class DropoutLayer : public NeuronLayer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_DROPOUT;
- }
+ virtual inline const char* type() const { return "Dropout"; }
protected:
/**
@@ -233,9 +224,7 @@ class ExpLayer : public NeuronLayer<Dtype> {
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_EXP;
- }
+ virtual inline const char* type() const { return "Exp"; }
protected:
/**
@@ -298,9 +287,7 @@ class PowerLayer : public NeuronLayer<Dtype> {
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_POWER;
- }
+ virtual inline const char* type() const { return "Power"; }
protected:
/**
@@ -369,9 +356,7 @@ class ReLULayer : public NeuronLayer<Dtype> {
explicit ReLULayer(const LayerParameter& param)
: NeuronLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_RELU;
- }
+ virtual inline const char* type() const { return "ReLU"; }
protected:
/**
@@ -465,9 +450,7 @@ class SigmoidLayer : public NeuronLayer<Dtype> {
explicit SigmoidLayer(const LayerParameter& param)
: NeuronLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SIGMOID;
- }
+ virtual inline const char* type() const { return "Sigmoid"; }
protected:
/**
@@ -549,9 +532,7 @@ class TanHLayer : public NeuronLayer<Dtype> {
explicit TanHLayer(const LayerParameter& param)
: NeuronLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_TANH;
- }
+ virtual inline const char* type() const { return "TanH"; }
protected:
/**
@@ -639,9 +620,7 @@ class ThresholdLayer : public NeuronLayer<Dtype> {
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_THRESHOLD;
- }
+ virtual inline const char* type() const { return "Threshold"; }
protected:
/**
diff --git a/include/caffe/util/upgrade_proto.hpp b/include/caffe/util/upgrade_proto.hpp
index 45483685..0627afe6 100644
--- a/include/caffe/util/upgrade_proto.hpp
+++ b/include/caffe/util/upgrade_proto.hpp
@@ -27,7 +27,7 @@ void UpgradeV0PaddingLayers(const NetParameter& param,
bool UpgradeLayerParameter(const LayerParameter& v0_layer_connection,
LayerParameter* layer_param);
-LayerParameter_LayerType UpgradeV0LayerType(const string& type);
+const char* UpgradeV0LayerType(const string& type);
// Return true iff any layer contains deprecated data transformation parameters.
bool NetNeedsDataUpgrade(const NetParameter& net_param);
diff --git a/include/caffe/vision_layers.hpp b/include/caffe/vision_layers.hpp
index fc178e4c..38e369c4 100644
--- a/include/caffe/vision_layers.hpp
+++ b/include/caffe/vision_layers.hpp
@@ -160,9 +160,8 @@ class ConvolutionLayer : public BaseConvolutionLayer<Dtype> {
*/
explicit ConvolutionLayer(const LayerParameter& param)
: BaseConvolutionLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_CONVOLUTION;
- }
+
+ virtual inline const char* type() const { return "Convolution"; }
protected:
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
@@ -196,9 +195,8 @@ class DeconvolutionLayer : public BaseConvolutionLayer<Dtype> {
public:
explicit DeconvolutionLayer(const LayerParameter& param)
: BaseConvolutionLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_DECONVOLUTION;
- }
+
+ virtual inline const char* type() const { return "Deconvolution"; }
protected:
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
@@ -272,9 +270,7 @@ class Im2colLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_IM2COL;
- }
+ virtual inline const char* type() const { return "Im2col"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
@@ -314,9 +310,7 @@ class LRNLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_LRN;
- }
+ virtual inline const char* type() const { return "LRN"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
@@ -392,9 +386,7 @@ class PoolingLayer : public Layer<Dtype> {
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_POOLING;
- }
+ virtual inline const char* type() const { return "Pooling"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int MinTopBlobs() const { return 1; }
// MAX POOL layers can output an extra top blob for the mask;
diff --git a/src/caffe/layer_factory.cpp b/src/caffe/layer_factory.cpp
index 80694834..c3fd1f30 100644
--- a/src/caffe/layer_factory.cpp
+++ b/src/caffe/layer_factory.cpp
@@ -29,7 +29,7 @@ Layer<Dtype>* GetConvolutionLayer(
}
}
-REGISTER_LAYER_CREATOR(CONVOLUTION, GetConvolutionLayer);
+REGISTER_LAYER_CREATOR(Convolution, GetConvolutionLayer);
// Get pooling layer according to engine.
template <typename Dtype>
@@ -59,7 +59,7 @@ Layer<Dtype>* GetPoolingLayer(const LayerParameter& param) {
}
}
-REGISTER_LAYER_CREATOR(POOLING, GetPoolingLayer);
+REGISTER_LAYER_CREATOR(Pooling, GetPoolingLayer);
// Get relu layer according to engine.
template <typename Dtype>
@@ -82,7 +82,7 @@ Layer<Dtype>* GetReLULayer(const LayerParameter& param) {
}
}
-REGISTER_LAYER_CREATOR(RELU, GetReLULayer);
+REGISTER_LAYER_CREATOR(ReLU, GetReLULayer);
// Get sigmoid layer according to engine.
template <typename Dtype>
@@ -105,7 +105,7 @@ Layer<Dtype>* GetSigmoidLayer(const LayerParameter& param) {
}
}
-REGISTER_LAYER_CREATOR(SIGMOID, GetSigmoidLayer);
+REGISTER_LAYER_CREATOR(Sigmoid, GetSigmoidLayer);
// Get softmax layer according to engine.
template <typename Dtype>
@@ -128,7 +128,7 @@ Layer<Dtype>* GetSoftmaxLayer(const LayerParameter& param) {
}
}
-REGISTER_LAYER_CREATOR(SOFTMAX, GetSoftmaxLayer);
+REGISTER_LAYER_CREATOR(Softmax, GetSoftmaxLayer);
// Get tanh layer according to engine.
template <typename Dtype>
@@ -151,7 +151,7 @@ Layer<Dtype>* GetTanHLayer(const LayerParameter& param) {
}
}
-REGISTER_LAYER_CREATOR(TANH, GetTanHLayer);
+REGISTER_LAYER_CREATOR(TanH, GetTanHLayer);
// Layers that use their constructor as their default creator should be
// registered in their corresponding cpp files. Do not register them here.
diff --git a/src/caffe/layers/absval_layer.cpp b/src/caffe/layers/absval_layer.cpp
index 0d054ee5..5ce28c9e 100644
--- a/src/caffe/layers/absval_layer.cpp
+++ b/src/caffe/layers/absval_layer.cpp
@@ -10,7 +10,7 @@ template <typename Dtype>
void AbsValLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
NeuronLayer<Dtype>::LayerSetUp(bottom, top);
- CHECK_NE(top[0], bottom[0]) << this->type_name() << " Layer does not "
+ CHECK_NE(top[0], bottom[0]) << this->type() << " Layer does not "
"allow in-place computation.";
}
@@ -40,5 +40,6 @@ STUB_GPU(AbsValLayer);
#endif
INSTANTIATE_CLASS(AbsValLayer);
-REGISTER_LAYER_CLASS(ABSVAL, AbsValLayer);
+REGISTER_LAYER_CLASS(AbsVal);
+
} // namespace caffe
diff --git a/src/caffe/layers/accuracy_layer.cpp b/src/caffe/layers/accuracy_layer.cpp
index 800b848f..3e8df34c 100644
--- a/src/caffe/layers/accuracy_layer.cpp
+++ b/src/caffe/layers/accuracy_layer.cpp
@@ -64,5 +64,6 @@ void AccuracyLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
}
INSTANTIATE_CLASS(AccuracyLayer);
-REGISTER_LAYER_CLASS(ACCURACY, AccuracyLayer);
+REGISTER_LAYER_CLASS(Accuracy);
+
} // namespace caffe
diff --git a/src/caffe/layers/argmax_layer.cpp b/src/caffe/layers/argmax_layer.cpp
index 15e199eb..c4040cdc 100644
--- a/src/caffe/layers/argmax_layer.cpp
+++ b/src/caffe/layers/argmax_layer.cpp
@@ -58,6 +58,6 @@ void ArgMaxLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
}
INSTANTIATE_CLASS(ArgMaxLayer);
-REGISTER_LAYER_CLASS(ARGMAX, ArgMaxLayer);
+REGISTER_LAYER_CLASS(ArgMax);
} // namespace caffe
diff --git a/src/caffe/layers/bnll_layer.cpp b/src/caffe/layers/bnll_layer.cpp
index cb3583ae..9ba0ea9a 100644
--- a/src/caffe/layers/bnll_layer.cpp
+++ b/src/caffe/layers/bnll_layer.cpp
@@ -43,5 +43,6 @@ STUB_GPU(BNLLLayer);
#endif
INSTANTIATE_CLASS(BNLLLayer);
-REGISTER_LAYER_CLASS(BNLL, BNLLLayer);
+REGISTER_LAYER_CLASS(BNLL);
+
} // namespace caffe
diff --git a/src/caffe/layers/concat_layer.cpp b/src/caffe/layers/concat_layer.cpp
index 42082195..fc88433c 100644
--- a/src/caffe/layers/concat_layer.cpp
+++ b/src/caffe/layers/concat_layer.cpp
@@ -105,5 +105,6 @@ STUB_GPU(ConcatLayer);
#endif
INSTANTIATE_CLASS(ConcatLayer);
-REGISTER_LAYER_CLASS(CONCAT, ConcatLayer);
+REGISTER_LAYER_CLASS(Concat);
+
} // namespace caffe
diff --git a/src/caffe/layers/contrastive_loss_layer.cpp b/src/caffe/layers/contrastive_loss_layer.cpp
index 0d0b443b..0692c11c 100644
--- a/src/caffe/layers/contrastive_loss_layer.cpp
+++ b/src/caffe/layers/contrastive_loss_layer.cpp
@@ -97,5 +97,6 @@ STUB_GPU(ContrastiveLossLayer);
#endif
INSTANTIATE_CLASS(ContrastiveLossLayer);
-REGISTER_LAYER_CLASS(CONTRASTIVE_LOSS, ContrastiveLossLayer);
+REGISTER_LAYER_CLASS(ContrastiveLoss);
+
} // namespace caffe
diff --git a/src/caffe/layers/conv_layer.cpp b/src/caffe/layers/conv_layer.cpp
index 9fd2fc6a..c0c9f6f3 100644
--- a/src/caffe/layers/conv_layer.cpp
+++ b/src/caffe/layers/conv_layer.cpp
@@ -79,4 +79,5 @@ STUB_GPU(ConvolutionLayer);
#endif
INSTANTIATE_CLASS(ConvolutionLayer);
+
} // namespace caffe
diff --git a/src/caffe/layers/data_layer.cpp b/src/caffe/layers/data_layer.cpp
index 96964566..227db201 100644
--- a/src/caffe/layers/data_layer.cpp
+++ b/src/caffe/layers/data_layer.cpp
@@ -129,6 +129,6 @@ void DataLayer<Dtype>::InternalThreadEntry() {
}
INSTANTIATE_CLASS(DataLayer);
-REGISTER_LAYER_CLASS(DATA, DataLayer);
+REGISTER_LAYER_CLASS(Data);
} // namespace caffe
diff --git a/src/caffe/layers/deconv_layer.cpp b/src/caffe/layers/deconv_layer.cpp
index 59114f01..e6d65ab5 100644
--- a/src/caffe/layers/deconv_layer.cpp
+++ b/src/caffe/layers/deconv_layer.cpp
@@ -81,5 +81,6 @@ STUB_GPU(DeconvolutionLayer);
#endif
INSTANTIATE_CLASS(DeconvolutionLayer);
-REGISTER_LAYER_CLASS(DECONVOLUTION, DeconvolutionLayer);
+REGISTER_LAYER_CLASS(Deconvolution);
+
} // namespace caffe
diff --git a/src/caffe/layers/dropout_layer.cpp b/src/caffe/layers/dropout_layer.cpp
index 8c8936a7..5f81cc1c 100644
--- a/src/caffe/layers/dropout_layer.cpp
+++ b/src/caffe/layers/dropout_layer.cpp
@@ -73,5 +73,6 @@ STUB_GPU(DropoutLayer);
#endif
INSTANTIATE_CLASS(DropoutLayer);
-REGISTER_LAYER_CLASS(DROPOUT, DropoutLayer);
+REGISTER_LAYER_CLASS(Dropout);
+
} // namespace caffe
diff --git a/src/caffe/layers/dummy_data_layer.cpp b/src/caffe/layers/dummy_data_layer.cpp
index 15cf5a58..d254eb1f 100644
--- a/src/caffe/layers/dummy_data_layer.cpp
+++ b/src/caffe/layers/dummy_data_layer.cpp
@@ -93,5 +93,6 @@ void DummyDataLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
}
INSTANTIATE_CLASS(DummyDataLayer);
-REGISTER_LAYER_CLASS(DUMMY_DATA, DummyDataLayer);
+REGISTER_LAYER_CLASS(DummyData);
+
} // namespace caffe
diff --git a/src/caffe/layers/eltwise_layer.cpp b/src/caffe/layers/eltwise_layer.cpp
index 0c239f4a..bbc34449 100644
--- a/src/caffe/layers/eltwise_layer.cpp
+++ b/src/caffe/layers/eltwise_layer.cpp
@@ -163,5 +163,6 @@ STUB_GPU(EltwiseLayer);
#endif
INSTANTIATE_CLASS(EltwiseLayer);
-REGISTER_LAYER_CLASS(ELTWISE, EltwiseLayer);
+REGISTER_LAYER_CLASS(Eltwise);
+
} // namespace caffe
diff --git a/src/caffe/layers/euclidean_loss_layer.cpp b/src/caffe/layers/euclidean_loss_layer.cpp
index d965027f..b539d348 100644
--- a/src/caffe/layers/euclidean_loss_layer.cpp
+++ b/src/caffe/layers/euclidean_loss_layer.cpp
@@ -54,5 +54,6 @@ STUB_GPU(EuclideanLossLayer);
#endif
INSTANTIATE_CLASS(EuclideanLossLayer);
-REGISTER_LAYER_CLASS(EUCLIDEAN_LOSS, EuclideanLossLayer);
+REGISTER_LAYER_CLASS(EuclideanLoss);
+
} // namespace caffe
diff --git a/src/caffe/layers/exp_layer.cpp b/src/caffe/layers/exp_layer.cpp
index 92cb5deb..c7e7c60c 100644
--- a/src/caffe/layers/exp_layer.cpp
+++ b/src/caffe/layers/exp_layer.cpp
@@ -64,5 +64,6 @@ STUB_GPU(ExpLayer);
#endif
INSTANTIATE_CLASS(ExpLayer);
-REGISTER_LAYER_CLASS(EXP, ExpLayer);
+REGISTER_LAYER_CLASS(Exp);
+
} // namespace caffe
diff --git a/src/caffe/layers/flatten_layer.cpp b/src/caffe/layers/flatten_layer.cpp
index ec43caba..eb7b42bc 100644
--- a/src/caffe/layers/flatten_layer.cpp
+++ b/src/caffe/layers/flatten_layer.cpp
@@ -34,5 +34,6 @@ STUB_GPU(FlattenLayer);
#endif
INSTANTIATE_CLASS(FlattenLayer);
-REGISTER_LAYER_CLASS(FLATTEN, FlattenLayer);
+REGISTER_LAYER_CLASS(Flatten);
+
} // namespace caffe
diff --git a/src/caffe/layers/hdf5_data_layer.cpp b/src/caffe/layers/hdf5_data_layer.cpp
index 706c4a18..77555211 100644
--- a/src/caffe/layers/hdf5_data_layer.cpp
+++ b/src/caffe/layers/hdf5_data_layer.cpp
@@ -121,5 +121,6 @@ STUB_GPU_FORWARD(HDF5DataLayer, Forward);
#endif
INSTANTIATE_CLASS(HDF5DataLayer);
-REGISTER_LAYER_CLASS(HDF5_DATA, HDF5DataLayer);
+REGISTER_LAYER_CLASS(HDF5Data);
+
} // namespace caffe
diff --git a/src/caffe/layers/hdf5_output_layer.cpp b/src/caffe/layers/hdf5_output_layer.cpp
index 4a72a18a..d2fdeffc 100644
--- a/src/caffe/layers/hdf5_output_layer.cpp
+++ b/src/caffe/layers/hdf5_output_layer.cpp
@@ -70,5 +70,6 @@ STUB_GPU(HDF5OutputLayer);
#endif
INSTANTIATE_CLASS(HDF5OutputLayer);
-REGISTER_LAYER_CLASS(HDF5_OUTPUT, HDF5OutputLayer);
+REGISTER_LAYER_CLASS(HDF5Output);
+
} // namespace caffe
diff --git a/src/caffe/layers/hinge_loss_layer.cpp b/src/caffe/layers/hinge_loss_layer.cpp
index 4dfafcc8..a2fb2a18 100644
--- a/src/caffe/layers/hinge_loss_layer.cpp
+++ b/src/caffe/layers/hinge_loss_layer.cpp
@@ -47,7 +47,7 @@ template <typename Dtype>
void HingeLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[1]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to label inputs.";
}
if (propagate_down[0]) {
@@ -77,5 +77,6 @@ void HingeLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
}
INSTANTIATE_CLASS(HingeLossLayer);
-REGISTER_LAYER_CLASS(HINGE_LOSS, HingeLossLayer);
+REGISTER_LAYER_CLASS(HingeLoss);
+
} // namespace caffe
diff --git a/src/caffe/layers/im2col_layer.cpp b/src/caffe/layers/im2col_layer.cpp
index 2c4bb902..11222611 100644
--- a/src/caffe/layers/im2col_layer.cpp
+++ b/src/caffe/layers/im2col_layer.cpp
@@ -88,5 +88,6 @@ STUB_GPU(Im2colLayer);
#endif
INSTANTIATE_CLASS(Im2colLayer);
-REGISTER_LAYER_CLASS(IM2COL, Im2colLayer);
+REGISTER_LAYER_CLASS(Im2col);
+
} // namespace caffe
diff --git a/src/caffe/layers/image_data_layer.cpp b/src/caffe/layers/image_data_layer.cpp
index ef6a342c..b96be6ad 100644
--- a/src/caffe/layers/image_data_layer.cpp
+++ b/src/caffe/layers/image_data_layer.cpp
@@ -149,5 +149,6 @@ void ImageDataLayer<Dtype>::InternalThreadEntry() {
}
INSTANTIATE_CLASS(ImageDataLayer);
-REGISTER_LAYER_CLASS(IMAGE_DATA, ImageDataLayer);
+REGISTER_LAYER_CLASS(ImageData);
+
} // namespace caffe
diff --git a/src/caffe/layers/infogain_loss_layer.cpp b/src/caffe/layers/infogain_loss_layer.cpp
index 8910431d..a1e0b40d 100644
--- a/src/caffe/layers/infogain_loss_layer.cpp
+++ b/src/caffe/layers/infogain_loss_layer.cpp
@@ -75,11 +75,11 @@ void InfogainLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down,
const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[1]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to label inputs.";
}
if (propagate_down.size() > 2 && propagate_down[2]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to infogain inputs.";
}
if (propagate_down[0]) {
@@ -106,5 +106,5 @@ void InfogainLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
}
INSTANTIATE_CLASS(InfogainLossLayer);
-REGISTER_LAYER_CLASS(INFOGAIN_LOSS, InfogainLossLayer);
+REGISTER_LAYER_CLASS(InfogainLoss);
} // namespace caffe
diff --git a/src/caffe/layers/inner_product_layer.cpp b/src/caffe/layers/inner_product_layer.cpp
index ecb849ba..b1ec6cb2 100644
--- a/src/caffe/layers/inner_product_layer.cpp
+++ b/src/caffe/layers/inner_product_layer.cpp
@@ -104,5 +104,6 @@ STUB_GPU(InnerProductLayer);
#endif
INSTANTIATE_CLASS(InnerProductLayer);
-REGISTER_LAYER_CLASS(INNER_PRODUCT, InnerProductLayer);
+REGISTER_LAYER_CLASS(InnerProduct);
+
} // namespace caffe
diff --git a/src/caffe/layers/lrn_layer.cpp b/src/caffe/layers/lrn_layer.cpp
index a878cf84..5e3e7c42 100644
--- a/src/caffe/layers/lrn_layer.cpp
+++ b/src/caffe/layers/lrn_layer.cpp
@@ -252,5 +252,6 @@ STUB_GPU_BACKWARD(LRNLayer, CrossChannelBackward);
#endif
INSTANTIATE_CLASS(LRNLayer);
-REGISTER_LAYER_CLASS(LRN, LRNLayer);
+REGISTER_LAYER_CLASS(LRN);
+
} // namespace caffe
diff --git a/src/caffe/layers/memory_data_layer.cpp b/src/caffe/layers/memory_data_layer.cpp
index 613ca2d4..3272b66a 100644
--- a/src/caffe/layers/memory_data_layer.cpp
+++ b/src/caffe/layers/memory_data_layer.cpp
@@ -72,5 +72,6 @@ void MemoryDataLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
}
INSTANTIATE_CLASS(MemoryDataLayer);
-REGISTER_LAYER_CLASS(MEMORY_DATA, MemoryDataLayer);
+REGISTER_LAYER_CLASS(MemoryData);
+
} // namespace caffe
diff --git a/src/caffe/layers/multinomial_logistic_loss_layer.cpp b/src/caffe/layers/multinomial_logistic_loss_layer.cpp
index 78a1f60f..4267a594 100644
--- a/src/caffe/layers/multinomial_logistic_loss_layer.cpp
+++ b/src/caffe/layers/multinomial_logistic_loss_layer.cpp
@@ -41,7 +41,7 @@ void MultinomialLogisticLossLayer<Dtype>::Backward_cpu(
const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down,
const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[1]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to label inputs.";
}
if (propagate_down[0]) {
@@ -62,5 +62,6 @@ void MultinomialLogisticLossLayer<Dtype>::Backward_cpu(
}
INSTANTIATE_CLASS(MultinomialLogisticLossLayer);
-REGISTER_LAYER_CLASS(MULTINOMIAL_LOGISTIC_LOSS, MultinomialLogisticLossLayer);
+REGISTER_LAYER_CLASS(MultinomialLogisticLoss);
+
} // namespace caffe
diff --git a/src/caffe/layers/mvn_layer.cpp b/src/caffe/layers/mvn_layer.cpp
index 104ad95c..b74d7b4f 100644
--- a/src/caffe/layers/mvn_layer.cpp
+++ b/src/caffe/layers/mvn_layer.cpp
@@ -159,5 +159,6 @@ STUB_GPU(MVNLayer);
#endif
INSTANTIATE_CLASS(MVNLayer);
-REGISTER_LAYER_CLASS(MVN, MVNLayer);
+REGISTER_LAYER_CLASS(MVN);
+
} // namespace caffe
diff --git a/src/caffe/layers/pooling_layer.cpp b/src/caffe/layers/pooling_layer.cpp
index 2bfbb01f..6f4c69c8 100644
--- a/src/caffe/layers/pooling_layer.cpp
+++ b/src/caffe/layers/pooling_layer.cpp
@@ -314,5 +314,4 @@ STUB_GPU(PoolingLayer);
INSTANTIATE_CLASS(PoolingLayer);
-
} // namespace caffe
diff --git a/src/caffe/layers/power_layer.cpp b/src/caffe/layers/power_layer.cpp
index 69bd120e..4fe34c49 100644
--- a/src/caffe/layers/power_layer.cpp
+++ b/src/caffe/layers/power_layer.cpp
@@ -99,5 +99,6 @@ STUB_GPU(PowerLayer);
#endif
INSTANTIATE_CLASS(PowerLayer);
-REGISTER_LAYER_CLASS(POWER, PowerLayer);
+REGISTER_LAYER_CLASS(Power);
+
} // namespace caffe
diff --git a/src/caffe/layers/relu_layer.cpp b/src/caffe/layers/relu_layer.cpp
index 7d5e6034..cc00319a 100644
--- a/src/caffe/layers/relu_layer.cpp
+++ b/src/caffe/layers/relu_layer.cpp
@@ -43,5 +43,4 @@ STUB_GPU(ReLULayer);
INSTANTIATE_CLASS(ReLULayer);
-
} // namespace caffe
diff --git a/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cpp b/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cpp
index d1e327a5..077d9499 100644
--- a/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cpp
+++ b/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cpp
@@ -53,7 +53,7 @@ void SigmoidCrossEntropyLossLayer<Dtype>::Backward_cpu(
const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down,
const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[1]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to label inputs.";
}
if (propagate_down[0]) {
@@ -75,5 +75,6 @@ STUB_GPU(SigmoidCrossEntropyLossLayer);
#endif
INSTANTIATE_CLASS(SigmoidCrossEntropyLossLayer);
-REGISTER_LAYER_CLASS(SIGMOID_CROSS_ENTROPY_LOSS, SigmoidCrossEntropyLossLayer);
+REGISTER_LAYER_CLASS(SigmoidCrossEntropyLoss);
+
} // namespace caffe
diff --git a/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cu b/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cu
index d9db4af6..08f7f492 100644
--- a/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cu
+++ b/src/caffe/layers/sigmoid_cross_entropy_loss_layer.cu
@@ -33,7 +33,7 @@ void SigmoidCrossEntropyLossLayer<Dtype>::Backward_gpu(
const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down,
const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[1]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to label inputs.";
}
if (propagate_down[0]) {
diff --git a/src/caffe/layers/silence_layer.cpp b/src/caffe/layers/silence_layer.cpp
index 9bd20574..4abf9eff 100644
--- a/src/caffe/layers/silence_layer.cpp
+++ b/src/caffe/layers/silence_layer.cpp
@@ -22,5 +22,6 @@ STUB_GPU(SilenceLayer);
#endif
INSTANTIATE_CLASS(SilenceLayer);
-REGISTER_LAYER_CLASS(SILENCE, SilenceLayer);
+REGISTER_LAYER_CLASS(Silence);
+
} // namespace caffe
diff --git a/src/caffe/layers/slice_layer.cpp b/src/caffe/layers/slice_layer.cpp
index 60a5ecfa..46c3acd6 100644
--- a/src/caffe/layers/slice_layer.cpp
+++ b/src/caffe/layers/slice_layer.cpp
@@ -137,5 +137,6 @@ STUB_GPU(SliceLayer);
#endif
INSTANTIATE_CLASS(SliceLayer);
-REGISTER_LAYER_CLASS(SLICE, SliceLayer);
+REGISTER_LAYER_CLASS(Slice);
+
} // namespace caffe
diff --git a/src/caffe/layers/softmax_layer.cpp b/src/caffe/layers/softmax_layer.cpp
index c7b09fff..25142fde 100644
--- a/src/caffe/layers/softmax_layer.cpp
+++ b/src/caffe/layers/softmax_layer.cpp
@@ -92,4 +92,5 @@ STUB_GPU(SoftmaxLayer);
#endif
INSTANTIATE_CLASS(SoftmaxLayer);
+
} // namespace caffe
diff --git a/src/caffe/layers/softmax_loss_layer.cpp b/src/caffe/layers/softmax_loss_layer.cpp
index 14bd3837..bf20b605 100644
--- a/src/caffe/layers/softmax_loss_layer.cpp
+++ b/src/caffe/layers/softmax_loss_layer.cpp
@@ -14,7 +14,7 @@ void SoftmaxWithLossLayer<Dtype>::LayerSetUp(
const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
LossLayer<Dtype>::LayerSetUp(bottom, top);
LayerParameter softmax_param(this->layer_param_);
- softmax_param.set_type(LayerParameter_LayerType_SOFTMAX);
+ softmax_param.set_type("Softmax");
softmax_layer_.reset(LayerRegistry<Dtype>::CreateLayer(softmax_param));
softmax_bottom_vec_.clear();
softmax_bottom_vec_.push_back(bottom[0]);
@@ -80,7 +80,7 @@ template <typename Dtype>
void SoftmaxWithLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[1]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to label inputs.";
}
if (propagate_down[0]) {
@@ -116,6 +116,6 @@ void SoftmaxWithLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
}
INSTANTIATE_CLASS(SoftmaxWithLossLayer);
-REGISTER_LAYER_CLASS(SOFTMAX_LOSS, SoftmaxWithLossLayer);
+REGISTER_LAYER_CLASS(SoftmaxWithLoss);
} // namespace caffe
diff --git a/src/caffe/layers/split_layer.cpp b/src/caffe/layers/split_layer.cpp
index 51ac61f4..d6929b99 100644
--- a/src/caffe/layers/split_layer.cpp
+++ b/src/caffe/layers/split_layer.cpp
@@ -16,7 +16,7 @@ void SplitLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
// the backward pass. (Technically, it should be possible to share the diff
// blob of the first split output with the input, but this seems to cause
// some strange effects in practice...)
- CHECK_NE(top[i], bottom[0]) << this->type_name() << " Layer does not "
+ CHECK_NE(top[i], bottom[0]) << this->type() << " Layer does not "
"allow in-place computation.";
top[i]->Reshape(bottom[0]->num(), bottom[0]->channels(),
bottom[0]->height(), bottom[0]->width());
@@ -56,5 +56,6 @@ STUB_GPU(SplitLayer);
#endif
INSTANTIATE_CLASS(SplitLayer);
-REGISTER_LAYER_CLASS(SPLIT, SplitLayer);
+REGISTER_LAYER_CLASS(Split);
+
} // namespace caffe
diff --git a/src/caffe/layers/threshold_layer.cpp b/src/caffe/layers/threshold_layer.cpp
index 9e68c32d..2365e7b9 100644
--- a/src/caffe/layers/threshold_layer.cpp
+++ b/src/caffe/layers/threshold_layer.cpp
@@ -29,5 +29,6 @@ STUB_GPU_FORWARD(ThresholdLayer, Forward);
#endif
INSTANTIATE_CLASS(ThresholdLayer);
-REGISTER_LAYER_CLASS(THRESHOLD, ThresholdLayer);
+REGISTER_LAYER_CLASS(Threshold);
+
} // namespace caffe
diff --git a/src/caffe/layers/window_data_layer.cpp b/src/caffe/layers/window_data_layer.cpp
index 83372e07..23ec83d1 100644
--- a/src/caffe/layers/window_data_layer.cpp
+++ b/src/caffe/layers/window_data_layer.cpp
@@ -463,5 +463,6 @@ void WindowDataLayer<Dtype>::InternalThreadEntry() {
}
INSTANTIATE_CLASS(WindowDataLayer);
-REGISTER_LAYER_CLASS(WINDOW_DATA, WindowDataLayer);
+REGISTER_LAYER_CLASS(WindowData);
+
} // namespace caffe
diff --git a/src/caffe/proto/caffe.proto b/src/caffe/proto/caffe.proto
index dd2c1471..1fea3445 100644
--- a/src/caffe/proto/caffe.proto
+++ b/src/caffe/proto/caffe.proto
@@ -225,59 +225,7 @@ message LayerParameter {
repeated NetStateRule include = 32;
repeated NetStateRule exclude = 33;
- // NOTE
- // Add new LayerTypes to the enum below in lexicographical order (other than
- // starting with NONE), starting with the next available ID in the comment
- // line above the enum. Update the next available ID when you add a new
- // LayerType.
- //
- // LayerType next available ID: 40 (last added: DECONVOLUTION)
- enum LayerType {
- // "NONE" layer type is 0th enum element so that we don't cause confusion
- // by defaulting to an existent LayerType (instead, should usually error if
- // the type is unspecified).
- NONE = 0;
- ABSVAL = 35;
- ACCURACY = 1;
- ARGMAX = 30;
- BNLL = 2;
- CONCAT = 3;
- CONTRASTIVE_LOSS = 37;
- CONVOLUTION = 4;
- DATA = 5;
- DECONVOLUTION = 39;
- DROPOUT = 6;
- DUMMY_DATA = 32;
- EUCLIDEAN_LOSS = 7;
- ELTWISE = 25;
- EXP = 38;
- FLATTEN = 8;
- HDF5_DATA = 9;
- HDF5_OUTPUT = 10;
- HINGE_LOSS = 28;
- IM2COL = 11;
- IMAGE_DATA = 12;
- INFOGAIN_LOSS = 13;
- INNER_PRODUCT = 14;
- LRN = 15;
- MEMORY_DATA = 29;
- MULTINOMIAL_LOGISTIC_LOSS = 16;
- MVN = 34;
- POOLING = 17;
- POWER = 26;
- RELU = 18;
- SIGMOID = 19;
- SIGMOID_CROSS_ENTROPY_LOSS = 27;
- SILENCE = 36;
- SOFTMAX = 20;
- SOFTMAX_LOSS = 21;
- SPLIT = 22;
- SLICE = 33;
- TANH = 23;
- WINDOW_DATA = 24;
- THRESHOLD = 31;
- }
- optional LayerType type = 5; // the layer type from the enum above
+ optional string type = 5; // the layer type from the enum above
// The blobs containing the numeric parameters of the layer
repeated BlobProto blobs = 6;
diff --git a/src/caffe/test/test_gradient_based_solver.cpp b/src/caffe/test/test_gradient_based_solver.cpp
index 65de52aa..1d8192ae 100644
--- a/src/caffe/test/test_gradient_based_solver.cpp
+++ b/src/caffe/test/test_gradient_based_solver.cpp
@@ -66,7 +66,7 @@ class GradientBasedSolverTest : public MultiDeviceTest<TypeParam> {
" name: 'TestNetwork' "
" layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: " << num_ << " "
" channels: " << channels_ << " "
@@ -85,7 +85,7 @@ class GradientBasedSolverTest : public MultiDeviceTest<TypeParam> {
" } "
" layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1 "
" weight_filler { "
@@ -102,7 +102,7 @@ class GradientBasedSolverTest : public MultiDeviceTest<TypeParam> {
" } "
" layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod' "
" bottom: 'targets' "
" } "
diff --git a/src/caffe/test/test_net.cpp b/src/caffe/test/test_net.cpp
index 319958fe..a4e14f91 100644
--- a/src/caffe/test/test_net.cpp
+++ b/src/caffe/test/test_net.cpp
@@ -61,7 +61,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"name: 'TinyTestNetwork' "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
@@ -85,7 +85,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'innerproduct' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
@@ -106,7 +106,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerproduct' "
" bottom: 'label' "
" top: 'top_loss' "
@@ -115,7 +115,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
proto +=
"layers: { "
" name: 'loss' "
- " type: ACCURACY "
+ " type: 'Accuracy' "
" bottom: 'innerproduct' "
" bottom: 'label' "
" top: 'accuracy' "
@@ -132,7 +132,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"name: 'TinyTestEuclidLossNetwork' "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
@@ -152,7 +152,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'innerproduct' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1 "
" weight_filler { "
@@ -173,7 +173,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerproduct' "
" bottom: 'label' "
"} ";
@@ -192,7 +192,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"name: 'TrickyTestNetwork' "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
@@ -212,7 +212,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'innerproduct' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
@@ -233,7 +233,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'innerproduct' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1 "
" weight_filler { "
@@ -254,7 +254,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS " +
+ " type: 'SoftmaxWithLoss' " +
loss_weight_stream.str() +
" bottom: 'transformed_data' "
" bottom: 'transformed_label' "
@@ -262,8 +262,8 @@ class NetTest : public MultiDeviceTest<TypeParam> {
InitNetFromProtoString(proto);
}
- // loss_weight is the loss weight for the EUCLIDEAN_LOSS layer output.
- // midnet_loss_weight is the loss weight for the first INNER_PRODUCT layer
+ // loss_weight is the loss weight for the 'EuclideanLoss' layer output.
+ // midnet_loss_weight is the loss weight for the first 'InnerProduct' layer
// output. Should both default to 0.0 if unspecified (i.e., if NULL is
// passed to this function).
virtual void InitUnsharedWeightsNet(const Dtype* loss_weight = NULL,
@@ -279,7 +279,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
proto <<
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
@@ -294,7 +294,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'innerproduct1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: " << bias_term <<
@@ -322,7 +322,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'innerproduct2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: " << bias_term <<
@@ -346,7 +346,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS ";
+ " type: 'EuclideanLoss' ";
if (loss_weight) {
proto << " loss_weight: " << *loss_weight << " ";
}
@@ -362,7 +362,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"name: 'SharedWeightsNetwork' "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
@@ -377,7 +377,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'innerproduct1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
@@ -392,7 +392,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'innerproduct2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
@@ -407,7 +407,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerproduct1' "
" bottom: 'innerproduct2' "
"} ";
@@ -419,7 +419,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"name: 'DiffDataUnsharedWeightsNetwork' "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 10 "
" channels: 10 "
@@ -439,7 +439,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'innerproduct1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
@@ -454,7 +454,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'innerproduct2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
@@ -469,7 +469,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'data2' "
" bottom: 'innerproduct2' "
"} ";
@@ -481,7 +481,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"name: 'DiffDataSharedWeightsNetwork' "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 10 "
" channels: 10 "
@@ -501,7 +501,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'innerproduct1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
@@ -516,7 +516,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'innerproduct2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
@@ -531,7 +531,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'data2' "
" bottom: 'innerproduct2' "
"} ";
@@ -548,7 +548,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"input_dim: 100 "
"layers: { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" bottom: 'data' "
" top: 'conv1' "
" convolution_param { "
@@ -567,13 +567,13 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'relu1' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv1' "
" top: 'conv1' "
"} "
"layers: { "
" name: 'pool1' "
- " type: POOLING "
+ " type: 'Pooling' "
" bottom: 'conv1' "
" top: 'pool1' "
" pooling_param { "
@@ -584,7 +584,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'norm1' "
- " type: LRN "
+ " type: 'LRN' "
" bottom: 'pool1' "
" top: 'norm1' "
" lrn_param { "
@@ -593,7 +593,7 @@ class NetTest : public MultiDeviceTest<TypeParam> {
"} "
"layers: { "
" name: 'softmax' "
- " type: SOFTMAX "
+ " type: 'Softmax' "
" bottom: 'norm1' "
" top: 'softmax' "
"} ";
@@ -702,7 +702,7 @@ TYPED_TEST(NetTest, TestBottomNeedBackwardTricky) {
TYPED_TEST(NetTest, TestLossWeight) {
typedef typename TypeParam::Dtype Dtype;
// First, compute the loss and gradients with no loss_weight specified.
- // In this case, the loss weight for the EUCLIDEAN_LOSS layer should default
+ // In this case, the loss weight for the 'EuclideanLoss' layer should default
// to 1.
vector<Blob<Dtype>*> bottom;
Caffe::set_random_seed(this->seed_);
@@ -796,8 +796,8 @@ TYPED_TEST(NetTest, TestComboLossWeight) {
const bool kForceBackward = true;
const Dtype kErrorMargin = 1e-4;
- // Get the loss and gradients with EUCLIDEAN_LOSS weight 1,
- // INNER_PRODUCT weight 1.
+ // Get the loss and gradients with 'EuclideanLoss' weight 1,
+ // 'InnerProduct' weight 1.
loss_weight = 1;
midnet_loss_weight = 1;
Caffe::set_random_seed(this->seed_);
@@ -925,7 +925,7 @@ TYPED_TEST(NetTest, TestBackwardWithAccuracyLayer) {
this->InitTinyNet(kForceBackward, kAccuracyLayer);
EXPECT_TRUE(this->net_->has_blob("accuracy"));
vector<Blob<Dtype>*> bottom;
- // Test that we can do Backward even though we have an ACCURACY layer.
+ // Test that we can do Backward even though we have an 'Accuracy' layer.
this->net_->ForwardBackward(bottom);
}
@@ -1263,19 +1263,19 @@ TEST_F(FilterNetTest, TestNoFilter) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1287,7 +1287,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"name: 'LeNet' "
"layers { "
" name: 'mnist' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
" data_param { "
@@ -1301,7 +1301,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"} "
"layers { "
" name: 'mnist' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
" data_param { "
@@ -1315,7 +1315,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" bottom: 'data' "
" top: 'conv1' "
" blobs_lr: 1 "
@@ -1334,7 +1334,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"} "
"layers { "
" name: 'ip1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'conv1' "
" top: 'ip1' "
" blobs_lr: 1 "
@@ -1351,7 +1351,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"} "
"layers { "
" name: 'accuracy' "
- " type: ACCURACY "
+ " type: 'Accuracy' "
" bottom: 'ip1' "
" bottom: 'label' "
" top: 'accuracy' "
@@ -1359,7 +1359,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"} "
"layers { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'ip2' "
" bottom: 'label' "
" top: 'loss' "
@@ -1370,7 +1370,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"name: 'LeNet' "
"layers { "
" name: 'mnist' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
" data_param { "
@@ -1384,7 +1384,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" bottom: 'data' "
" top: 'conv1' "
" blobs_lr: 1 "
@@ -1403,7 +1403,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"} "
"layers { "
" name: 'ip1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'conv1' "
" top: 'ip1' "
" blobs_lr: 1 "
@@ -1420,7 +1420,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"} "
"layers { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'ip2' "
" bottom: 'label' "
" top: 'loss' "
@@ -1429,7 +1429,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"name: 'LeNet' "
"layers { "
" name: 'mnist' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
" data_param { "
@@ -1443,7 +1443,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" bottom: 'data' "
" top: 'conv1' "
" blobs_lr: 1 "
@@ -1462,7 +1462,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"} "
"layers { "
" name: 'ip1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'conv1' "
" top: 'ip1' "
" blobs_lr: 1 "
@@ -1479,7 +1479,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"} "
"layers { "
" name: 'accuracy' "
- " type: ACCURACY "
+ " type: 'Accuracy' "
" bottom: 'ip1' "
" bottom: 'label' "
" top: 'accuracy' "
@@ -1487,7 +1487,7 @@ TEST_F(FilterNetTest, TestFilterLeNetTrainTest) {
"} "
"layers { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'ip2' "
" bottom: 'label' "
" top: 'loss' "
@@ -1519,20 +1519,20 @@ TEST_F(FilterNetTest, TestFilterOutByStage) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
" include: { stage: 'mystage' } "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1540,13 +1540,13 @@ TEST_F(FilterNetTest, TestFilterOutByStage) {
"name: 'TestNetwork' "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1558,20 +1558,20 @@ TEST_F(FilterNetTest, TestFilterOutByStage2) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { stage: 'mystage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1579,13 +1579,13 @@ TEST_F(FilterNetTest, TestFilterOutByStage2) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1598,20 +1598,20 @@ TEST_F(FilterNetTest, TestFilterInByStage) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { stage: 'mystage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1623,20 +1623,20 @@ TEST_F(FilterNetTest, TestFilterInByStage2) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" exclude: { stage: 'mystage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1649,20 +1649,20 @@ TEST_F(FilterNetTest, TestFilterOutByMultipleStage) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { stage: 'mystage' stage: 'myotherstage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { stage: 'mystage' } "
@@ -1672,13 +1672,13 @@ TEST_F(FilterNetTest, TestFilterOutByMultipleStage) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { stage: 'mystage' } "
@@ -1692,13 +1692,13 @@ TEST_F(FilterNetTest, TestFilterInByMultipleStage) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { stage: 'myotherstage' } "
@@ -1706,7 +1706,7 @@ TEST_F(FilterNetTest, TestFilterInByMultipleStage) {
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { stage: 'mystage' } "
@@ -1720,20 +1720,20 @@ TEST_F(FilterNetTest, TestFilterInByMultipleStage2) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { stage: 'mystage' stage: 'myotherstage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { stage: 'mystage' } "
@@ -1747,20 +1747,20 @@ TEST_F(FilterNetTest, TestFilterInByNotStage) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { not_stage: 'myotherstage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { not_stage: 'myotherstage' } "
@@ -1774,20 +1774,20 @@ TEST_F(FilterNetTest, TestFilterOutByNotStage) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { not_stage: 'mystage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { not_stage: 'mystage' } "
@@ -1797,7 +1797,7 @@ TEST_F(FilterNetTest, TestFilterOutByNotStage) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} ";
@@ -1809,20 +1809,20 @@ TEST_F(FilterNetTest, TestFilterOutByMinLevel) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { min_level: 3 } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1830,13 +1830,13 @@ TEST_F(FilterNetTest, TestFilterOutByMinLevel) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1848,20 +1848,20 @@ TEST_F(FilterNetTest, TestFilterOutByMaxLevel) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { max_level: -3 } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1869,13 +1869,13 @@ TEST_F(FilterNetTest, TestFilterOutByMaxLevel) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1887,20 +1887,20 @@ TEST_F(FilterNetTest, TestFilterInByMinLevel) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { min_level: 0 } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1913,20 +1913,20 @@ TEST_F(FilterNetTest, TestFilterInByMinLevel2) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { min_level: 3 } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1938,20 +1938,20 @@ TEST_F(FilterNetTest, TestFilterInByMaxLevel) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { max_level: 0 } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1964,20 +1964,20 @@ TEST_F(FilterNetTest, TestFilterInByMaxLevel2) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { max_level: -3 } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -1989,20 +1989,20 @@ TEST_F(FilterNetTest, TestFilterInOutByIncludeMultiRule) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { min_level: 2 phase: TRAIN } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { min_level: 2 phase: TEST } "
@@ -2016,13 +2016,13 @@ TEST_F(FilterNetTest, TestFilterInOutByIncludeMultiRule) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { min_level: 2 phase: TRAIN } "
@@ -2032,13 +2032,13 @@ TEST_F(FilterNetTest, TestFilterInOutByIncludeMultiRule) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { min_level: 2 phase: TEST } "
@@ -2052,13 +2052,13 @@ TEST_F(FilterNetTest, TestFilterInByIncludeMultiRule) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { min_level: 2 phase: TRAIN } "
@@ -2066,7 +2066,7 @@ TEST_F(FilterNetTest, TestFilterInByIncludeMultiRule) {
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { min_level: 2 phase: TEST } "
@@ -2085,20 +2085,20 @@ TEST_F(FilterNetTest, TestFilterInOutByExcludeMultiRule) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" exclude: { min_level: 2 phase: TRAIN } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" exclude: { min_level: 2 phase: TEST } "
@@ -2112,13 +2112,13 @@ TEST_F(FilterNetTest, TestFilterInOutByExcludeMultiRule) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" exclude: { min_level: 2 phase: TEST } "
@@ -2128,13 +2128,13 @@ TEST_F(FilterNetTest, TestFilterInOutByExcludeMultiRule) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" exclude: { min_level: 2 phase: TRAIN } "
diff --git a/src/caffe/test/test_protobuf.cpp b/src/caffe/test/test_protobuf.cpp
index 0c502d6d..01de461a 100644
--- a/src/caffe/test/test_protobuf.cpp
+++ b/src/caffe/test/test_protobuf.cpp
@@ -16,7 +16,7 @@ class ProtoTest : public ::testing::Test {};
TEST_F(ProtoTest, TestSerialization) {
LayerParameter param;
param.set_name("test");
- param.set_type(LayerParameter_LayerType_NONE);
+ param.set_type("Test");
std::cout << "Printing in binary format." << std::endl;
std::cout << param.SerializeAsString() << std::endl;
std::cout << "Printing in text format." << std::endl;
diff --git a/src/caffe/test/test_solver.cpp b/src/caffe/test/test_solver.cpp
index a7dbf77f..d3f646c5 100644
--- a/src/caffe/test/test_solver.cpp
+++ b/src/caffe/test/test_solver.cpp
@@ -53,7 +53,7 @@ TYPED_TEST(SolverTest, TestInitTrainTestNets) {
" name: 'TestNetwork' "
" layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 3 "
@@ -69,7 +69,7 @@ TYPED_TEST(SolverTest, TestInitTrainTestNets) {
" } "
" layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" } "
@@ -78,7 +78,7 @@ TYPED_TEST(SolverTest, TestInitTrainTestNets) {
" } "
" layers: { "
" name: 'accuracy' "
- " type: ACCURACY "
+ " type: 'Accuracy' "
" bottom: 'innerprod' "
" bottom: 'label' "
" top: 'accuracy' "
@@ -86,7 +86,7 @@ TYPED_TEST(SolverTest, TestInitTrainTestNets) {
" } "
" layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { phase: TRAIN } "
diff --git a/src/caffe/test/test_split_layer.cpp b/src/caffe/test/test_split_layer.cpp
index 38e76219..584d0a4a 100644
--- a/src/caffe/test/test_split_layer.cpp
+++ b/src/caffe/test/test_split_layer.cpp
@@ -116,19 +116,19 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertion1) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -140,32 +140,32 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertion2) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'data_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'data' "
" top: 'data_split_0' "
" top: 'data_split_1' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_split_0' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_split_1' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod2' "
"} ";
@@ -177,7 +177,7 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"name: 'CaffeNet' "
"layers { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
@@ -192,7 +192,7 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 96 "
" kernel_size: 11 "
@@ -215,13 +215,13 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'relu1' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv1' "
" top: 'conv1' "
"} "
"layers { "
" name: 'pool1' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" pool: MAX "
" kernel_size: 3 "
@@ -232,7 +232,7 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'norm1' "
- " type: LRN "
+ " type: 'LRN' "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
@@ -243,7 +243,7 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'conv2' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 256 "
" group: 2 "
@@ -267,13 +267,13 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'relu2' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv2' "
" top: 'conv2' "
"} "
"layers { "
" name: 'pool2' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" pool: MAX "
" kernel_size: 3 "
@@ -284,7 +284,7 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'norm2' "
- " type: LRN "
+ " type: 'LRN' "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
@@ -295,7 +295,7 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'conv3' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 384 "
" kernel_size: 3 "
@@ -318,13 +318,13 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'relu3' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv3' "
" top: 'conv3' "
"} "
"layers { "
" name: 'conv4' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 384 "
" group: 2 "
@@ -348,13 +348,13 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'relu4' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv4' "
" top: 'conv4' "
"} "
"layers { "
" name: 'conv5' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 256 "
" group: 2 "
@@ -378,13 +378,13 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'relu5' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv5' "
" top: 'conv5' "
"} "
"layers { "
" name: 'pool5' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" kernel_size: 3 "
" pool: MAX "
@@ -395,7 +395,7 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'fc6' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 4096 "
" weight_filler { "
@@ -416,13 +416,13 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'relu6' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" name: 'drop6' "
- " type: DROPOUT "
+ " type: 'Dropout' "
" dropout_param { "
" dropout_ratio: 0.5 "
" } "
@@ -431,7 +431,7 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'fc7' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 4096 "
" weight_filler { "
@@ -452,13 +452,13 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'relu7' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layers { "
" name: 'drop7' "
- " type: DROPOUT "
+ " type: 'Dropout' "
" dropout_param { "
" dropout_ratio: 0.5 "
" } "
@@ -467,7 +467,7 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'fc8' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
@@ -488,7 +488,7 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionImageNet) {
"} "
"layers { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
@@ -500,25 +500,25 @@ TEST_F(SplitLayerInsertionTest, TestNoInsertionWithInPlace) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
"} "
"layers: { "
" name: 'relu' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'innerprod' "
" top: 'innerprod' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
@@ -531,7 +531,7 @@ TEST_F(SplitLayerInsertionTest, TestLossInsertion) {
"force_backward: true "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
@@ -546,7 +546,7 @@ TEST_F(SplitLayerInsertionTest, TestLossInsertion) {
"} "
"layers: { "
" name: 'innerproduct1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
@@ -562,7 +562,7 @@ TEST_F(SplitLayerInsertionTest, TestLossInsertion) {
"} "
"layers: { "
" name: 'innerproduct2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
@@ -577,7 +577,7 @@ TEST_F(SplitLayerInsertionTest, TestLossInsertion) {
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerproduct1' "
" bottom: 'innerproduct2' "
"} ";
@@ -586,7 +586,7 @@ TEST_F(SplitLayerInsertionTest, TestLossInsertion) {
"force_backward: true "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
@@ -601,14 +601,14 @@ TEST_F(SplitLayerInsertionTest, TestLossInsertion) {
"} "
"layers: { "
" name: 'data_data_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'data' "
" top: 'data_data_0_split_0' "
" top: 'data_data_0_split_1' "
"} "
"layers: { "
" name: 'innerproduct1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
@@ -623,7 +623,7 @@ TEST_F(SplitLayerInsertionTest, TestLossInsertion) {
"} "
"layers: { "
" name: 'innerproduct1_innerproduct1_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'innerproduct1' "
" top: 'innerproduct1_innerproduct1_0_split_0' "
" top: 'innerproduct1_innerproduct1_0_split_1' "
@@ -632,7 +632,7 @@ TEST_F(SplitLayerInsertionTest, TestLossInsertion) {
"} "
"layers: { "
" name: 'innerproduct2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
@@ -647,7 +647,7 @@ TEST_F(SplitLayerInsertionTest, TestLossInsertion) {
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerproduct1_innerproduct1_0_split_1' "
" bottom: 'innerproduct2' "
"} ";
@@ -659,37 +659,37 @@ TEST_F(SplitLayerInsertionTest, TestInsertion) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'innerprod3' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod3' "
"} "
"layers: { "
" name: 'loss1' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod2' "
"} "
"layers: { "
" name: 'loss2' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod2' "
" bottom: 'innerprod3' "
"} ";
@@ -697,13 +697,13 @@ TEST_F(SplitLayerInsertionTest, TestInsertion) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'data_data_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'data' "
" top: 'data_data_0_split_0' "
" top: 'data_data_0_split_1' "
@@ -711,38 +711,38 @@ TEST_F(SplitLayerInsertionTest, TestInsertion) {
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_data_0_split_0' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_data_0_split_1' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'innerprod2_innerprod2_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'innerprod2' "
" top: 'innerprod2_innerprod2_0_split_0' "
" top: 'innerprod2_innerprod2_0_split_1' "
"} "
"layers: { "
" name: 'innerprod3' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_data_0_split_2' "
" top: 'innerprod3' "
"} "
"layers: { "
" name: 'loss1' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod2_innerprod2_0_split_0' "
"} "
"layers: { "
" name: 'loss2' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod2_innerprod2_0_split_1' "
" bottom: 'innerprod3' "
"} ";
@@ -754,43 +754,43 @@ TEST_F(SplitLayerInsertionTest, TestInsertionTwoTop) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'label' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'innerprod3' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod3' "
"} "
"layers: { "
" name: 'innerprod4' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'label' "
" top: 'innerprod4' "
"} "
"layers: { "
" name: 'loss1' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod3' "
"} "
"layers: { "
" name: 'loss2' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod2' "
" bottom: 'innerprod4' "
"} ";
@@ -798,57 +798,57 @@ TEST_F(SplitLayerInsertionTest, TestInsertionTwoTop) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'data_data_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'data' "
" top: 'data_data_0_split_0' "
" top: 'data_data_0_split_1' "
"} "
"layers: { "
" name: 'label_data_1_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'label' "
" top: 'label_data_1_split_0' "
" top: 'label_data_1_split_1' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_data_0_split_0' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'label_data_1_split_0' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'innerprod3' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_data_0_split_1' "
" top: 'innerprod3' "
"} "
"layers: { "
" name: 'innerprod4' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'label_data_1_split_1' "
" top: 'innerprod4' "
"} "
"layers: { "
" name: 'loss1' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod3' "
"} "
"layers: { "
" name: 'loss2' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod2' "
" bottom: 'innerprod4' "
"} ";
@@ -865,19 +865,19 @@ TEST_F(SplitLayerInsertionTest, TestInputInsertion) {
"input_dim: 227 "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod2' "
"} ";
@@ -890,26 +890,26 @@ TEST_F(SplitLayerInsertionTest, TestInputInsertion) {
"input_dim: 227 "
"layers: { "
" name: 'data_input_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'data' "
" top: 'data_input_0_split_0' "
" top: 'data_input_0_split_1' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_input_0_split_0' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_input_0_split_1' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod2' "
"} ";
@@ -921,37 +921,37 @@ TEST_F(SplitLayerInsertionTest, TestWithInPlace) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'relu1' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'innerprod1' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'innerprod1' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'loss1' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'label' "
"} "
"layers: { "
" name: 'loss2' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod2' "
" bottom: 'data' "
"} ";
@@ -959,51 +959,51 @@ TEST_F(SplitLayerInsertionTest, TestWithInPlace) {
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'data_data_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'data' "
" top: 'data_data_0_split_0' "
" top: 'data_data_0_split_1' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_data_0_split_0' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'relu1' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'innerprod1' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod1_relu1_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'innerprod1' "
" top: 'innerprod1_relu1_0_split_0' "
" top: 'innerprod1_relu1_0_split_1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'innerprod1_relu1_0_split_0' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'loss1' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1_relu1_0_split_1' "
" bottom: 'label' "
"} "
"layers: { "
" name: 'loss2' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod2' "
" bottom: 'data_data_0_split_1' "
"} ";
diff --git a/src/caffe/test/test_upgrade_proto.cpp b/src/caffe/test/test_upgrade_proto.cpp
index 52e7f1f9..e520b85f 100644
--- a/src/caffe/test/test_upgrade_proto.cpp
+++ b/src/caffe/test/test_upgrade_proto.cpp
@@ -1184,7 +1184,7 @@ TEST_F(V0UpgradeTest, TestSimple) {
"name: 'CaffeNet' "
"layers { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
@@ -1199,7 +1199,7 @@ TEST_F(V0UpgradeTest, TestSimple) {
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 96 "
" kernel_size: 11 "
@@ -1223,7 +1223,7 @@ TEST_F(V0UpgradeTest, TestSimple) {
"} "
"layers { "
" name: 'fc8' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
@@ -1244,7 +1244,7 @@ TEST_F(V0UpgradeTest, TestSimple) {
"} "
"layers { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
@@ -1520,7 +1520,7 @@ TEST_F(V0UpgradeTest, TestAllParams) {
"input_dim: 32 "
"layers { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
@@ -1537,7 +1537,7 @@ TEST_F(V0UpgradeTest, TestAllParams) {
"} "
"layers { "
" name: 'images' "
- " type: IMAGE_DATA "
+ " type: 'ImageData' "
" image_data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-images' "
" batch_size: 256 "
@@ -1557,7 +1557,7 @@ TEST_F(V0UpgradeTest, TestAllParams) {
"} "
"layers { "
" name: 'window_data' "
- " type: WINDOW_DATA "
+ " type: 'WindowData' "
" window_data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
@@ -1577,7 +1577,7 @@ TEST_F(V0UpgradeTest, TestAllParams) {
"} "
"layers { "
" name: 'hdf5data' "
- " type: HDF5_DATA "
+ " type: 'HDF5Data' "
" hdf5_data_param { "
" source: '/my/hdf5/data' "
" batch_size: 256 "
@@ -1586,7 +1586,7 @@ TEST_F(V0UpgradeTest, TestAllParams) {
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 96 "
" bias_term: false "
@@ -1611,7 +1611,7 @@ TEST_F(V0UpgradeTest, TestAllParams) {
"} "
"layers { "
" name: 'pool1ave' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" pool: AVE "
" kernel_size: 3 "
@@ -1622,7 +1622,7 @@ TEST_F(V0UpgradeTest, TestAllParams) {
"} "
"layers { "
" name: 'pool1stoch' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" pool: STOCHASTIC "
" kernel_size: 4 "
@@ -1633,7 +1633,7 @@ TEST_F(V0UpgradeTest, TestAllParams) {
"} "
"layers { "
" name: 'concat' "
- " type: CONCAT "
+ " type: 'Concat' "
" concat_param { "
" concat_dim: 2 "
" } "
@@ -1643,7 +1643,7 @@ TEST_F(V0UpgradeTest, TestAllParams) {
"} "
"layers { "
" name: 'norm1' "
- " type: LRN "
+ " type: 'LRN' "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
@@ -1654,7 +1654,7 @@ TEST_F(V0UpgradeTest, TestAllParams) {
"} "
"layers { "
" name: 'fc6' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 4096 "
" bias_term: false "
@@ -1676,13 +1676,13 @@ TEST_F(V0UpgradeTest, TestAllParams) {
"} "
"layers { "
" name: 'relu6' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" name: 'drop6' "
- " type: DROPOUT "
+ " type: 'Dropout' "
" dropout_param { "
" dropout_ratio: 0.2 "
" } "
@@ -1691,7 +1691,7 @@ TEST_F(V0UpgradeTest, TestAllParams) {
"} "
"layers { "
" name: 'loss' "
- " type: INFOGAIN_LOSS "
+ " type: 'InfogainLoss' "
" infogain_loss_param { "
" source: '/my/infogain/matrix' "
" } "
@@ -1700,54 +1700,54 @@ TEST_F(V0UpgradeTest, TestAllParams) {
"} "
"layers { "
" name: 'accuracy' "
- " type: ACCURACY "
+ " type: 'Accuracy' "
"} "
"layers { "
" name: 'bnll' "
- " type: BNLL "
+ " type: 'BNLL' "
"} "
"layers { "
" name: 'euclidean_loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
"} "
"layers { "
" name: 'flatten' "
- " type: FLATTEN "
+ " type: 'Flatten' "
"} "
"layers { "
" name: 'hdf5_output' "
- " type: HDF5_OUTPUT "
+ " type: 'HDF5Output' "
" hdf5_output_param { "
" file_name: '/my/hdf5/output/file' "
" } "
"} "
"layers { "
" name: 'im2col' "
- " type: IM2COL "
+ " type: 'Im2Col' "
"} "
"layers { "
" name: 'images' "
- " type: IMAGE_DATA "
+ " type: 'ImageData' "
"} "
"layers { "
" name: 'multinomial_logistic_loss' "
- " type: MULTINOMIAL_LOGISTIC_LOSS "
+ " type: 'MultinomialLogisticLoss' "
"} "
"layers { "
" name: 'sigmoid' "
- " type: SIGMOID "
+ " type: 'Sigmoid' "
"} "
"layers { "
" name: 'softmax' "
- " type: SOFTMAX "
+ " type: 'Softmax' "
"} "
"layers { "
" name: 'split' "
- " type: SPLIT "
+ " type: 'Split' "
"} "
"layers { "
" name: 'tanh' "
- " type: TANH "
+ " type: 'TanH' "
"} ";
this->RunV0UpgradeTest(input_proto, expected_output_proto);
}
@@ -2122,7 +2122,7 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"name: 'CaffeNet' "
"layers { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
@@ -2137,7 +2137,7 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 96 "
" kernel_size: 11 "
@@ -2160,13 +2160,13 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'relu1' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv1' "
" top: 'conv1' "
"} "
"layers { "
" name: 'pool1' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" pool: MAX "
" kernel_size: 3 "
@@ -2177,7 +2177,7 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'norm1' "
- " type: LRN "
+ " type: 'LRN' "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
@@ -2188,7 +2188,7 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'conv2' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 256 "
" group: 2 "
@@ -2212,13 +2212,13 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'relu2' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv2' "
" top: 'conv2' "
"} "
"layers { "
" name: 'pool2' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" pool: MAX "
" kernel_size: 3 "
@@ -2229,7 +2229,7 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'norm2' "
- " type: LRN "
+ " type: 'LRN' "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
@@ -2240,7 +2240,7 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'conv3' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 384 "
" kernel_size: 3 "
@@ -2263,13 +2263,13 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'relu3' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv3' "
" top: 'conv3' "
"} "
"layers { "
" name: 'conv4' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 384 "
" group: 2 "
@@ -2293,13 +2293,13 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'relu4' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv4' "
" top: 'conv4' "
"} "
"layers { "
" name: 'conv5' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 256 "
" group: 2 "
@@ -2323,13 +2323,13 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'relu5' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv5' "
" top: 'conv5' "
"} "
"layers { "
" name: 'pool5' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" kernel_size: 3 "
" pool: MAX "
@@ -2340,7 +2340,7 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'fc6' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 4096 "
" weight_filler { "
@@ -2361,13 +2361,13 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'relu6' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" name: 'drop6' "
- " type: DROPOUT "
+ " type: 'Dropout' "
" dropout_param { "
" dropout_ratio: 0.5 "
" } "
@@ -2376,7 +2376,7 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'fc7' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 4096 "
" weight_filler { "
@@ -2397,13 +2397,13 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'relu7' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layers { "
" name: 'drop7' "
- " type: DROPOUT "
+ " type: 'Dropout' "
" dropout_param { "
" dropout_ratio: 0.5 "
" } "
@@ -2412,7 +2412,7 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'fc8' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
@@ -2433,7 +2433,7 @@ TEST_F(V0UpgradeTest, TestImageNet) {
"} "
"layers { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
diff --git a/src/caffe/util/insert_splits.cpp b/src/caffe/util/insert_splits.cpp
index f20efdae..8a0ad53e 100644
--- a/src/caffe/util/insert_splits.cpp
+++ b/src/caffe/util/insert_splits.cpp
@@ -111,7 +111,7 @@ void ConfigureSplitLayer(const string& layer_name, const string& blob_name,
split_layer_param->Clear();
split_layer_param->add_bottom(blob_name);
split_layer_param->set_name(SplitLayerName(layer_name, blob_name, blob_idx));
- split_layer_param->set_type(LayerParameter_LayerType_SPLIT);
+ split_layer_param->set_type("Split");
for (int k = 0; k < split_count; ++k) {
split_layer_param->add_top(
SplitBlobName(layer_name, blob_name, blob_idx, k));
diff --git a/src/caffe/util/upgrade_proto.cpp b/src/caffe/util/upgrade_proto.cpp
index cbd6003c..31e7d4db 100644
--- a/src/caffe/util/upgrade_proto.cpp
+++ b/src/caffe/util/upgrade_proto.cpp
@@ -459,78 +459,78 @@ bool UpgradeLayerParameter(const LayerParameter& v0_layer_connection,
return is_fully_compatible;
}
-LayerParameter_LayerType UpgradeV0LayerType(const string& type) {
+const char* UpgradeV0LayerType(const string& type) {
if (type == "accuracy") {
- return LayerParameter_LayerType_ACCURACY;
+ return "Accuracy";
} else if (type == "bnll") {
- return LayerParameter_LayerType_BNLL;
+ return "BNLL";
} else if (type == "concat") {
- return LayerParameter_LayerType_CONCAT;
+ return "Concat";
} else if (type == "conv") {
- return LayerParameter_LayerType_CONVOLUTION;
+ return "Convolution";
} else if (type == "data") {
- return LayerParameter_LayerType_DATA;
+ return "Data";
} else if (type == "dropout") {
- return LayerParameter_LayerType_DROPOUT;
+ return "Dropout";
} else if (type == "euclidean_loss") {
- return LayerParameter_LayerType_EUCLIDEAN_LOSS;
+ return "EuclideanLoss";
} else if (type == "flatten") {
- return LayerParameter_LayerType_FLATTEN;
+ return "Flatten";
} else if (type == "hdf5_data") {
- return LayerParameter_LayerType_HDF5_DATA;
+ return "HDF5Data";
} else if (type == "hdf5_output") {
- return LayerParameter_LayerType_HDF5_OUTPUT;
+ return "HDF5Output";
} else if (type == "im2col") {
- return LayerParameter_LayerType_IM2COL;
+ return "Im2Col";
} else if (type == "images") {
- return LayerParameter_LayerType_IMAGE_DATA;
+ return "ImageData";
} else if (type == "infogain_loss") {
- return LayerParameter_LayerType_INFOGAIN_LOSS;
+ return "InfogainLoss";
} else if (type == "innerproduct") {
- return LayerParameter_LayerType_INNER_PRODUCT;
+ return "InnerProduct";
} else if (type == "lrn") {
- return LayerParameter_LayerType_LRN;
+ return "LRN";
} else if (type == "multinomial_logistic_loss") {
- return LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS;
+ return "MultinomialLogisticLoss";
} else if (type == "pool") {
- return LayerParameter_LayerType_POOLING;
+ return "Pooling";
} else if (type == "relu") {
- return LayerParameter_LayerType_RELU;
+ return "ReLU";
} else if (type == "sigmoid") {
- return LayerParameter_LayerType_SIGMOID;
+ return "Sigmoid";
} else if (type == "softmax") {
- return LayerParameter_LayerType_SOFTMAX;
+ return "Softmax";
} else if (type == "softmax_loss") {
- return LayerParameter_LayerType_SOFTMAX_LOSS;
+ return "SoftmaxWithLoss";
} else if (type == "split") {
- return LayerParameter_LayerType_SPLIT;
+ return "Split";
} else if (type == "tanh") {
- return LayerParameter_LayerType_TANH;
+ return "TanH";
} else if (type == "window_data") {
- return LayerParameter_LayerType_WINDOW_DATA;
+ return "WindowData";
} else {
- LOG(FATAL) << "Unknown layer name: " << type;
- return LayerParameter_LayerType_NONE;
+ LOG(FATAL) << "Unknown layer type: " << type;
+ return "";
}
}
bool NetNeedsDataUpgrade(const NetParameter& net_param) {
for (int i = 0; i < net_param.layers_size(); ++i) {
- if (net_param.layers(i).type() == LayerParameter_LayerType_DATA) {
+ if (net_param.layers(i).type() == "Data") {
DataParameter layer_param = net_param.layers(i).data_param();
if (layer_param.has_scale()) { return true; }
if (layer_param.has_mean_file()) { return true; }
if (layer_param.has_crop_size()) { return true; }
if (layer_param.has_mirror()) { return true; }
}
- if (net_param.layers(i).type() == LayerParameter_LayerType_IMAGE_DATA) {
+ if (net_param.layers(i).type() == "ImageData") {
ImageDataParameter layer_param = net_param.layers(i).image_data_param();
if (layer_param.has_scale()) { return true; }
if (layer_param.has_mean_file()) { return true; }
if (layer_param.has_crop_size()) { return true; }
if (layer_param.has_mirror()) { return true; }
}
- if (net_param.layers(i).type() == LayerParameter_LayerType_WINDOW_DATA) {
+ if (net_param.layers(i).type() == "WindowData") {
WindowDataParameter layer_param = net_param.layers(i).window_data_param();
if (layer_param.has_scale()) { return true; }
if (layer_param.has_mean_file()) { return true; }
@@ -541,11 +541,11 @@ bool NetNeedsDataUpgrade(const NetParameter& net_param) {
return false;
}
-#define CONVERT_LAYER_TRANSFORM_PARAM(TYPE, Name, param_name) \
+#define CONVERT_LAYER_TRANSFORM_PARAM(TYPE_NAME, PARAM_NAME) \
do { \
- if (net_param->layers(i).type() == LayerParameter_LayerType_##TYPE) { \
- Name##Parameter* layer_param = \
- net_param->mutable_layers(i)->mutable_##param_name##_param(); \
+ if (net_param->layers(i).type() == #TYPE_NAME) { \
+ TYPE_NAME##Parameter* layer_param = \
+ net_param->mutable_layers(i)->mutable_##PARAM_NAME##_param(); \
TransformationParameter* transform_param = \
net_param->mutable_layers(i)->mutable_transform_param(); \
if (layer_param->has_scale()) { \
@@ -569,9 +569,9 @@ bool NetNeedsDataUpgrade(const NetParameter& net_param) {
void UpgradeNetDataTransformation(NetParameter* net_param) {
for (int i = 0; i < net_param->layers_size(); ++i) {
- CONVERT_LAYER_TRANSFORM_PARAM(DATA, Data, data);
- CONVERT_LAYER_TRANSFORM_PARAM(IMAGE_DATA, ImageData, image_data);
- CONVERT_LAYER_TRANSFORM_PARAM(WINDOW_DATA, WindowData, window_data);
+ CONVERT_LAYER_TRANSFORM_PARAM(Data, data);
+ CONVERT_LAYER_TRANSFORM_PARAM(ImageData, image_data);
+ CONVERT_LAYER_TRANSFORM_PARAM(WindowData, window_data);
}
}