summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorYangqing Jia <jiayq84@gmail.com>2013-10-24 20:44:09 -0700
committerYangqing Jia <jiayq84@gmail.com>2013-10-24 20:44:09 -0700
commitec647a17b3dec78cc6f2dc8f729d37b9b589a28e (patch)
tree4072994bb7f44c9347279c8305e911c9c784d41b /src
parent3a97ca6063e11ea6ec41355c5823888d4ad148be (diff)
parent1c9febc73fe32740ca3fdddbcb10a6a0595aa541 (diff)
downloadcaffe-ec647a17b3dec78cc6f2dc8f729d37b9b589a28e.tar.gz
caffe-ec647a17b3dec78cc6f2dc8f729d37b9b589a28e.tar.bz2
caffe-ec647a17b3dec78cc6f2dc8f729d37b9b589a28e.zip
Merge branch 'master' of github.com:Yangqing/caffeine
Diffstat (limited to 'src')
-rw-r--r--src/caffe/common.cpp4
-rw-r--r--src/caffe/layer_factory.cpp62
-rw-r--r--src/caffe/layers/conv_layer.cpp2
-rw-r--r--src/caffe/layers/data_layer.cpp2
-rw-r--r--src/caffe/layers/dropout_layer.cu1
-rw-r--r--src/caffe/layers/im2col_layer.cpp1
-rw-r--r--src/caffe/layers/inner_product_layer.cpp1
-rw-r--r--src/caffe/layers/loss_layer.cu4
-rw-r--r--src/caffe/layers/lrn_layer.cpp1
-rw-r--r--src/caffe/layers/padding_layer.cu1
-rw-r--r--src/caffe/layers/pooling_layer.cpp1
-rw-r--r--src/caffe/layers/relu_layer.cu2
-rw-r--r--src/caffe/layers/softmax_layer.cu1
-rw-r--r--src/caffe/layers/softmax_loss_layer.cu1
-rw-r--r--src/caffe/net.cpp4
15 files changed, 22 insertions, 66 deletions
diff --git a/src/caffe/common.cpp b/src/caffe/common.cpp
index 1fce86a2..c254d70e 100644
--- a/src/caffe/common.cpp
+++ b/src/caffe/common.cpp
@@ -36,13 +36,13 @@ Caffe::Caffe()
// Try to create a curand handler.
if (curandCreateGenerator(&curand_generator_, CURAND_RNG_PSEUDO_DEFAULT)
!= CURAND_STATUS_SUCCESS ||
- curandSetPseudoRandomGeneratorSeed(curand_generator_, 1701ULL)
+ curandSetPseudoRandomGeneratorSeed(curand_generator_, time(NULL))
!= CURAND_STATUS_SUCCESS) {
LOG(ERROR) << "Cannot create Curand generator. Curand won't be available.";
}
// Try to create a vsl stream. This should almost always work, but we will
// check it anyway.
- if (vslNewStream(&vsl_stream_, VSL_BRNG_MT19937, 1701) != VSL_STATUS_OK) {
+ if (vslNewStream(&vsl_stream_, VSL_BRNG_MT19937, time(NULL)) != VSL_STATUS_OK) {
LOG(ERROR) << "Cannot create vsl stream. VSL random number generator "
<< "won't be available.";
}
diff --git a/src/caffe/layer_factory.cpp b/src/caffe/layer_factory.cpp
deleted file mode 100644
index 6961bb3f..00000000
--- a/src/caffe/layer_factory.cpp
+++ /dev/null
@@ -1,62 +0,0 @@
-// Copyright 2013 Yangqing Jia
-
-#ifndef CAFFE_LAYER_FACTORY_HPP_
-#define CAFFE_LAYER_FACTORY_HPP_
-
-#include <string>
-
-#include "caffe/layer.hpp"
-#include "caffe/vision_layers.hpp"
-#include "caffe/proto/caffe.pb.h"
-
-
-namespace caffe {
-
-
-// A function to get a specific layer from the specification given in
-// LayerParameter. Ideally this would be replaced by a factory pattern,
-// but we will leave it this way for now.
-template <typename Dtype>
-Layer<Dtype>* GetLayer(const LayerParameter& param) {
- const std::string& type = param.type();
- if (type == "accuracy") {
- return new AccuracyLayer<Dtype>(param);
- } else if (type == "conv") {
- return new ConvolutionLayer<Dtype>(param);
- } else if (type == "data") {
- return new DataLayer<Dtype>(param);
- } else if (type == "dropout") {
- return new DropoutLayer<Dtype>(param);
- } else if (type == "euclidean_loss") {
- return new EuclideanLossLayer<Dtype>(param);
- } else if (type == "im2col") {
- return new Im2colLayer<Dtype>(param);
- } else if (type == "innerproduct") {
- return new InnerProductLayer<Dtype>(param);
- } else if (type == "lrn") {
- return new LRNLayer<Dtype>(param);
- } else if (type == "padding") {
- return new PaddingLayer<Dtype>(param);
- } else if (type == "pool") {
- return new PoolingLayer<Dtype>(param);
- } else if (type == "relu") {
- return new ReLULayer<Dtype>(param);
- } else if (type == "softmax") {
- return new SoftmaxLayer<Dtype>(param);
- } else if (type == "softmax_loss") {
- return new SoftmaxWithLossLayer<Dtype>(param);
- } else if (type == "multinomial_logistic_loss") {
- return new MultinomialLogisticLossLayer<Dtype>(param);
- } else {
- LOG(FATAL) << "Unknown layer name: " << type;
- }
- // just to suppress old compiler warnings.
- return (Layer<Dtype>*)(NULL);
-}
-
-template Layer<float>* GetLayer(const LayerParameter& param);
-template Layer<double>* GetLayer(const LayerParameter& param);
-
-} // namespace caffe
-
-#endif // CAFFE_LAYER_FACTORY_HPP_
diff --git a/src/caffe/layers/conv_layer.cpp b/src/caffe/layers/conv_layer.cpp
index f2608be2..5531b2d4 100644
--- a/src/caffe/layers/conv_layer.cpp
+++ b/src/caffe/layers/conv_layer.cpp
@@ -250,5 +250,7 @@ Dtype ConvolutionLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
}
INSTANTIATE_CLASS(ConvolutionLayer);
+REGISTER_LAYER("conv", ConvolutionLayer);
+
} // namespace caffe
diff --git a/src/caffe/layers/data_layer.cpp b/src/caffe/layers/data_layer.cpp
index 9ed95165..a1b55c20 100644
--- a/src/caffe/layers/data_layer.cpp
+++ b/src/caffe/layers/data_layer.cpp
@@ -225,5 +225,7 @@ Dtype DataLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
}
INSTANTIATE_CLASS(DataLayer);
+REGISTER_LAYER("data", DataLayer);
+
} // namespace caffe
diff --git a/src/caffe/layers/dropout_layer.cu b/src/caffe/layers/dropout_layer.cu
index df94f2de..8b98dbc8 100644
--- a/src/caffe/layers/dropout_layer.cu
+++ b/src/caffe/layers/dropout_layer.cu
@@ -119,6 +119,7 @@ Dtype DropoutLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
}
INSTANTIATE_CLASS(DropoutLayer);
+REGISTER_LAYER("dropout", DropoutLayer);
} // namespace caffe
diff --git a/src/caffe/layers/im2col_layer.cpp b/src/caffe/layers/im2col_layer.cpp
index 976c8441..d0fa4e34 100644
--- a/src/caffe/layers/im2col_layer.cpp
+++ b/src/caffe/layers/im2col_layer.cpp
@@ -71,5 +71,6 @@ Dtype Im2colLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
}
INSTANTIATE_CLASS(Im2colLayer);
+REGISTER_LAYER("im2col", Im2colLayer);
} // namespace caffe
diff --git a/src/caffe/layers/inner_product_layer.cpp b/src/caffe/layers/inner_product_layer.cpp
index 18f1df0d..e3a21f99 100644
--- a/src/caffe/layers/inner_product_layer.cpp
+++ b/src/caffe/layers/inner_product_layer.cpp
@@ -140,5 +140,6 @@ Dtype InnerProductLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
}
INSTANTIATE_CLASS(InnerProductLayer);
+REGISTER_LAYER("innerproduct", InnerProductLayer);
} // namespace caffe
diff --git a/src/caffe/layers/loss_layer.cu b/src/caffe/layers/loss_layer.cu
index 18a8023d..f8437f9c 100644
--- a/src/caffe/layers/loss_layer.cu
+++ b/src/caffe/layers/loss_layer.cu
@@ -117,7 +117,11 @@ void AccuracyLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
}
INSTANTIATE_CLASS(MultinomialLogisticLossLayer);
+REGISTER_LAYER("multinomial_logistic_loss", MultinomialLogisticLossLayer);
INSTANTIATE_CLASS(EuclideanLossLayer);
+REGISTER_LAYER("euclidean_loss", EuclideanLossLayer);
INSTANTIATE_CLASS(AccuracyLayer);
+REGISTER_LAYER("accuracy", AccuracyLayer);
+
} // namespace caffe
diff --git a/src/caffe/layers/lrn_layer.cpp b/src/caffe/layers/lrn_layer.cpp
index 337b77b7..0876f51e 100644
--- a/src/caffe/layers/lrn_layer.cpp
+++ b/src/caffe/layers/lrn_layer.cpp
@@ -130,6 +130,7 @@ Dtype LRNLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
}
INSTANTIATE_CLASS(LRNLayer);
+REGISTER_LAYER("lrn", LRNLayer);
} // namespace caffe
diff --git a/src/caffe/layers/padding_layer.cu b/src/caffe/layers/padding_layer.cu
index 90f5508b..b0b21f6d 100644
--- a/src/caffe/layers/padding_layer.cu
+++ b/src/caffe/layers/padding_layer.cu
@@ -134,6 +134,7 @@ Dtype PaddingLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
}
INSTANTIATE_CLASS(PaddingLayer);
+REGISTER_LAYER("padding", PaddingLayer);
} // namespace caffe
diff --git a/src/caffe/layers/pooling_layer.cpp b/src/caffe/layers/pooling_layer.cpp
index 59ce3fe7..498abab2 100644
--- a/src/caffe/layers/pooling_layer.cpp
+++ b/src/caffe/layers/pooling_layer.cpp
@@ -182,6 +182,7 @@ Dtype PoolingLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
INSTANTIATE_CLASS(PoolingLayer);
+REGISTER_LAYER("pool", PoolingLayer);
} // namespace caffe
diff --git a/src/caffe/layers/relu_layer.cu b/src/caffe/layers/relu_layer.cu
index b0fc46ef..79945b58 100644
--- a/src/caffe/layers/relu_layer.cu
+++ b/src/caffe/layers/relu_layer.cu
@@ -84,6 +84,6 @@ Dtype ReLULayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
}
INSTANTIATE_CLASS(ReLULayer);
-
+REGISTER_LAYER("relu", ReLULayer);
} // namespace caffe
diff --git a/src/caffe/layers/softmax_layer.cu b/src/caffe/layers/softmax_layer.cu
index a7659697..635de685 100644
--- a/src/caffe/layers/softmax_layer.cu
+++ b/src/caffe/layers/softmax_layer.cu
@@ -176,6 +176,7 @@ Dtype SoftmaxLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
}
INSTANTIATE_CLASS(SoftmaxLayer);
+REGISTER_LAYER("softmax", SoftmaxLayer);
} // namespace caffe
diff --git a/src/caffe/layers/softmax_loss_layer.cu b/src/caffe/layers/softmax_loss_layer.cu
index 3a001c08..a90968a4 100644
--- a/src/caffe/layers/softmax_loss_layer.cu
+++ b/src/caffe/layers/softmax_loss_layer.cu
@@ -68,6 +68,7 @@ Dtype SoftmaxWithLossLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
}
INSTANTIATE_CLASS(SoftmaxWithLossLayer);
+REGISTER_LAYER("softmax_loss", SoftmaxWithLossLayer);
} // namespace caffe
diff --git a/src/caffe/net.cpp b/src/caffe/net.cpp
index 38a806df..f25dba24 100644
--- a/src/caffe/net.cpp
+++ b/src/caffe/net.cpp
@@ -7,6 +7,7 @@
#include "caffe/proto/caffe.pb.h"
#include "caffe/layer.hpp"
+#include "caffe/layer_register.hpp"
#include "caffe/net.hpp"
using std::pair;
@@ -47,7 +48,8 @@ Net<Dtype>::Net(const NetParameter& param,
for (int i = 0; i < param.layers_size(); ++i) {
const LayerConnection& layer_connection = param.layers(i);
const LayerParameter& layer_param = layer_connection.layer();
- layers_.push_back(shared_ptr<Layer<Dtype> >(GetLayer<Dtype>(layer_param)));
+ layers_.push_back(
+ shared_ptr<Layer<Dtype> >(CreateLayer<Dtype>(layer_param)));
layer_names_.push_back(layer_param.name());
LOG(INFO) << "Creating Layer " << layer_param.name();
bool need_backward = false;