diff options
author | Jeff Donahue <jeff.donahue@gmail.com> | 2014-06-10 12:38:59 -0700 |
---|---|---|
committer | Evan Shelhamer <shelhamer@imaginarynumber.net> | 2014-06-26 11:34:19 -0700 |
commit | 74443591d62feb2755d98ed87fb4aecc22d78297 (patch) | |
tree | 0457d47ed4e48da19caa5684ad2b3fe815c488af /src/caffe/layers/bnll_layer.cu | |
parent | 2cd155f5459e47b713f3d8dc1d6ee9b555a89ceb (diff) | |
download | caffeonacl-74443591d62feb2755d98ed87fb4aecc22d78297.tar.gz caffeonacl-74443591d62feb2755d98ed87fb4aecc22d78297.tar.bz2 caffeonacl-74443591d62feb2755d98ed87fb4aecc22d78297.zip |
change Backward interface: propagate_down is a vector -- use to fix
long-standing issue with how this is handled in loss layers (esp.
EuclideanLossLayer)
Diffstat (limited to 'src/caffe/layers/bnll_layer.cu')
-rw-r--r-- | src/caffe/layers/bnll_layer.cu | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/src/caffe/layers/bnll_layer.cu b/src/caffe/layers/bnll_layer.cu index 75bea00e..7849d11c 100644 --- a/src/caffe/layers/bnll_layer.cu +++ b/src/caffe/layers/bnll_layer.cu @@ -45,9 +45,9 @@ __global__ void BNLLBackward(const int n, const Dtype* in_diff, template <typename Dtype> void BNLLLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top, - const bool propagate_down, + const vector<bool>& propagate_down, vector<Blob<Dtype>*>* bottom) { - if (propagate_down) { + if (propagate_down[0]) { const Dtype* bottom_data = (*bottom)[0]->gpu_data(); const Dtype* top_diff = top[0]->gpu_diff(); Dtype* bottom_diff = (*bottom)[0]->mutable_gpu_diff(); |