diff options
author | Ronghang Hu <huronghang@hotmail.com> | 2015-08-12 12:05:56 -0700 |
---|---|---|
committer | Ronghang Hu <huronghang@hotmail.com> | 2015-08-12 14:46:32 -0700 |
commit | 6b50ed6fc1897ce1ccd673cf0287788b38b58a6d (patch) | |
tree | 71217c1e2b3f75c5774db1b0e0afb9ac0d040b2d /src | |
parent | 0d34d5ba0fbdc09ac8f372cb581ccaec599f10bc (diff) | |
download | caffeonacl-6b50ed6fc1897ce1ccd673cf0287788b38b58a6d.tar.gz caffeonacl-6b50ed6fc1897ce1ccd673cf0287788b38b58a6d.tar.bz2 caffeonacl-6b50ed6fc1897ce1ccd673cf0287788b38b58a6d.zip |
Apply mutex only to shared layers and fix NVCC warning
Diffstat (limited to 'src')
-rw-r--r-- | src/caffe/layer.cpp | 27 | ||||
-rw-r--r-- | src/caffe/net.cpp | 7 |
2 files changed, 31 insertions, 3 deletions
diff --git a/src/caffe/layer.cpp b/src/caffe/layer.cpp new file mode 100644 index 00000000..3b912898 --- /dev/null +++ b/src/caffe/layer.cpp @@ -0,0 +1,27 @@ +#include <boost/thread.hpp> +#include "caffe/layer.hpp" + +namespace caffe { + +template <typename Dtype> +void Layer<Dtype>::InitMutex() { + forward_mutex_.reset(new boost::mutex()); +} + +template <typename Dtype> +void Layer<Dtype>::Lock() { + if (IsShared()) { + forward_mutex_->lock(); + } +} + +template <typename Dtype> +void Layer<Dtype>::Unlock() { + if (IsShared()) { + forward_mutex_->unlock(); + } +} + +INSTANTIATE_CLASS(Layer); + +} // namespace caffe diff --git a/src/caffe/net.cpp b/src/caffe/net.cpp index 14f8385c..7f5bdf7e 100644 --- a/src/caffe/net.cpp +++ b/src/caffe/net.cpp @@ -84,7 +84,7 @@ void Net<Dtype>::Init(const NetParameter& in_param) { bottom_need_backward_.resize(param.layer_size()); for (int layer_id = 0; layer_id < param.layer_size(); ++layer_id) { // For non-root solvers, whether this layer is shared from root_net_. - bool is_shared_layer = !Caffe::root_solver() + bool share_from_root = !Caffe::root_solver() && root_net_->layers_[layer_id]->ShareInParallel(); // Inherit phase from net if unset. if (!param.layer(layer_id).has_phase()) { @@ -98,9 +98,10 @@ void Net<Dtype>::Init(const NetParameter& in_param) { << "propagate_down param must be specified " << "either 0 or bottom_size times "; } - if (is_shared_layer) { + if (share_from_root) { LOG(INFO) << "Sharing layer " << layer_param.name() << " from root net"; layers_.push_back(root_net_->layers_[layer_id]); + layers_[layer_id]->SetShared(true); } else { layers_.push_back(LayerRegistry<Dtype>::CreateLayer(layer_param)); } @@ -137,7 +138,7 @@ void Net<Dtype>::Init(const NetParameter& in_param) { } } // After this layer is connected, set it up. - if (is_shared_layer) { + if (share_from_root) { // Set up size of top blobs using root_net_ const vector<Blob<Dtype>*>& base_top = root_net_->top_vecs_[layer_id]; const vector<Blob<Dtype>*>& this_top = this->top_vecs_[layer_id]; |