summaryrefslogtreecommitdiff
path: root/src/caffe/layers/cudnn_relu_layer.cu
diff options
context:
space:
mode:
Diffstat (limited to 'src/caffe/layers/cudnn_relu_layer.cu')
-rw-r--r--src/caffe/layers/cudnn_relu_layer.cu24
1 files changed, 19 insertions, 5 deletions
diff --git a/src/caffe/layers/cudnn_relu_layer.cu b/src/caffe/layers/cudnn_relu_layer.cu
index 86250870..b9d0870a 100644
--- a/src/caffe/layers/cudnn_relu_layer.cu
+++ b/src/caffe/layers/cudnn_relu_layer.cu
@@ -17,9 +17,16 @@ void CuDNNReLULayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
const Dtype* bottom_data = bottom[0]->gpu_data();
Dtype* top_data = top[0]->mutable_gpu_data();
+
+ Dtype alpha = 1.0;
+ Dtype beta = 0.0;
+
CUDNN_CHECK(cudnnActivationForward(this->handle_,
- CUDNN_ACTIVATION_RELU,
- this->bottom_desc_, bottom_data, this->top_desc_, top_data));
+ CUDNN_ACTIVATION_RELU,
+ reinterpret_cast<void *>(&alpha),
+ this->bottom_desc_, bottom_data,
+ reinterpret_cast<void *>(&beta),
+ this->top_desc_, top_data));
}
template <typename Dtype>
@@ -39,10 +46,17 @@ void CuDNNReLULayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
const Dtype* top_diff = top[0]->gpu_diff();
const Dtype* bottom_data = bottom[0]->gpu_data();
Dtype* bottom_diff = bottom[0]->mutable_gpu_diff();
+
+ Dtype alpha = 1.0;
+ Dtype beta = 0.0;
+
CUDNN_CHECK(cudnnActivationBackward(this->handle_,
- CUDNN_ACTIVATION_RELU,
- this->top_desc_, top_data, this->top_desc_, top_diff,
- this->bottom_desc_, bottom_data, this->bottom_desc_, bottom_diff));
+ CUDNN_ACTIVATION_RELU,
+ reinterpret_cast<void *>(&alpha),
+ this->top_desc_, top_data, this->top_desc_, top_diff,
+ this->bottom_desc_, bottom_data,
+ reinterpret_cast<void *>(&beta),
+ this->bottom_desc_, bottom_diff));
}
INSTANTIATE_LAYER_GPU_FUNCS(CuDNNReLULayer);