summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEvan Shelhamer <shelhamer@imaginarynumber.net>2015-09-25 14:57:17 -0700
committerEvan Shelhamer <shelhamer@imaginarynumber.net>2015-09-25 14:57:17 -0700
commit3b854406a51693891a4cb5199d0fd8aa6c1c4fb6 (patch)
tree020210a255741b031ad6bba8aafc330d0115b46c
parentaa6900aa066281847eeacda3fc314f201907cc4a (diff)
parent1394cdc383e2f41d7435862442b15151e8ac1237 (diff)
downloadcaffeonacl-3b854406a51693891a4cb5199d0fd8aa6c1c4fb6.tar.gz
caffeonacl-3b854406a51693891a4cb5199d0fd8aa6c1c4fb6.tar.bz2
caffeonacl-3b854406a51693891a4cb5199d0fd8aa6c1c4fb6.zip
Merge pull request #3032 from ronghanghu/ban-pythonlayer-in-parallel
Disallow PythonLayer in Multi-GPU training
-rw-r--r--include/caffe/python_layer.hpp6
1 files changed, 6 insertions, 0 deletions
diff --git a/include/caffe/python_layer.hpp b/include/caffe/python_layer.hpp
index c43c1e8a..b839d526 100644
--- a/include/caffe/python_layer.hpp
+++ b/include/caffe/python_layer.hpp
@@ -18,6 +18,12 @@ class PythonLayer : public Layer<Dtype> {
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
+ // Disallow PythonLayer in MultiGPU training stage, due to GIL issues
+ // Details: https://github.com/BVLC/caffe/issues/2936
+ if (this->phase_ == TRAIN && Caffe::solver_count() > 1
+ && !ShareInParallel()) {
+ LOG(FATAL) << "PythonLayer is not implemented in Multi-GPU training";
+ }
self_.attr("param_str") = bp::str(
this->layer_param_.python_param().param_str());
self_.attr("setup")(bottom, top);