diff options
author | Sergio <sguada@gmail.com> | 2014-06-27 18:54:08 -0700 |
---|---|---|
committer | Sergio <sguada@gmail.com> | 2014-06-27 18:54:08 -0700 |
commit | aa654a5daea5e6f6f5c5514687ed75662637901e (patch) | |
tree | 49f167c9ed70edf48e0fd87c44fae4eebd91c173 /src | |
parent | e954481f8fde0194e0a88b369952e6dd6737fc30 (diff) | |
download | caffeonacl-aa654a5daea5e6f6f5c5514687ed75662637901e.tar.gz caffeonacl-aa654a5daea5e6f6f5c5514687ed75662637901e.tar.bz2 caffeonacl-aa654a5daea5e6f6f5c5514687ed75662637901e.zip |
Modify Dropout to allow backward pass in TEST phase
Conflicts:
src/caffe/layers/dropout_layer.cpp
src/caffe/layers/dropout_layer.cu
Diffstat (limited to 'src')
-rw-r--r-- | src/caffe/layers/dropout_layer.cpp | 13 | ||||
-rw-r--r-- | src/caffe/layers/dropout_layer.cu | 19 |
2 files changed, 19 insertions, 13 deletions
diff --git a/src/caffe/layers/dropout_layer.cpp b/src/caffe/layers/dropout_layer.cpp index e9a1a524..a3501bf5 100644 --- a/src/caffe/layers/dropout_layer.cpp +++ b/src/caffe/layers/dropout_layer.cpp @@ -49,14 +49,17 @@ template <typename Dtype> void DropoutLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down, vector<Blob<Dtype>*>* bottom) { - CHECK(Caffe::phase() == Caffe::TRAIN); if (propagate_down[0]) { const Dtype* top_diff = top[0]->cpu_diff(); Dtype* bottom_diff = (*bottom)[0]->mutable_cpu_diff(); - const unsigned int* mask = rand_vec_->cpu_data(); - const int count = (*bottom)[0]->count(); - for (int i = 0; i < count; ++i) { - bottom_diff[i] = top_diff[i] * mask[i] * scale_; + if (Caffe::phase() == Caffe::TRAIN) { + const unsigned int* mask = rand_vec_->cpu_data(); + const int count = (*bottom)[0]->count(); + for (int i = 0; i < count; ++i) { + bottom_diff[i] = top_diff[i] * mask[i] * scale_; + } + } else { + caffe_copy(top[0]->count(), top_diff, bottom_diff); } } } diff --git a/src/caffe/layers/dropout_layer.cu b/src/caffe/layers/dropout_layer.cu index 2c722640..0040d263 100644 --- a/src/caffe/layers/dropout_layer.cu +++ b/src/caffe/layers/dropout_layer.cu @@ -58,17 +58,20 @@ template <typename Dtype> void DropoutLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down, vector<Blob<Dtype>*>* bottom) { - CHECK(Caffe::phase() == Caffe::TRAIN); if (propagate_down[0]) { const Dtype* top_diff = top[0]->gpu_diff(); Dtype* bottom_diff = (*bottom)[0]->mutable_gpu_diff(); - const unsigned int* mask = - static_cast<const unsigned int*>(rand_vec_->gpu_data()); - const int count = (*bottom)[0]->count(); - // NOLINT_NEXT_LINE(whitespace/operators) - DropoutBackward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>( - count, top_diff, mask, uint_thres_, scale_, bottom_diff); - CUDA_POST_KERNEL_CHECK; + if (Caffe::phase() == Caffe::TRAIN) { + const unsigned int* mask = + static_cast<const unsigned int*>(rand_vec_->gpu_data()); + const int count = (*bottom)[0]->count(); + // NOLINT_NEXT_LINE(whitespace/operators) + DropoutBackward<Dtype><<<CAFFE_GET_BLOCKS(count), CAFFE_CUDA_NUM_THREADS>>>( + count, top_diff, mask, uint_thres_, scale_, bottom_diff); + CUDA_POST_KERNEL_CHECK; + } else { + caffe_gpu_copy(top[0]->count(), bottom_data, top_data); + } } } |