summaryrefslogtreecommitdiff
path: root/src/caffe/layers/tanh_layer.cpp
diff options
context:
space:
mode:
authorEric Tzeng <eric.s.tzeng@gmail.com>2014-02-26 18:45:45 -0800
committerEric Tzeng <eric.s.tzeng@gmail.com>2014-02-26 18:45:45 -0800
commitb17ac6620b4e6ae33d4d889b6cdbde1c447bb944 (patch)
tree537d1c964fed4191b424c105328a1c6f72281c4a /src/caffe/layers/tanh_layer.cpp
parentde4f7a419f176a49d77b2a0506c78333d988058c (diff)
downloadcaffeonacl-b17ac6620b4e6ae33d4d889b6cdbde1c447bb944.tar.gz
caffeonacl-b17ac6620b4e6ae33d4d889b6cdbde1c447bb944.tar.bz2
caffeonacl-b17ac6620b4e6ae33d4d889b6cdbde1c447bb944.zip
Splitting source files between CUDA and CPU code.
Diffstat (limited to 'src/caffe/layers/tanh_layer.cpp')
-rw-r--r--src/caffe/layers/tanh_layer.cpp48
1 files changed, 48 insertions, 0 deletions
diff --git a/src/caffe/layers/tanh_layer.cpp b/src/caffe/layers/tanh_layer.cpp
new file mode 100644
index 00000000..d6f99560
--- /dev/null
+++ b/src/caffe/layers/tanh_layer.cpp
@@ -0,0 +1,48 @@
+// Copyright 2014 Aravindh Mahendran
+// TanH neuron activation function layer.
+// Adapted from ReLU layer code written by Yangqing Jia
+
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+namespace caffe {
+
+template <typename Dtype>
+void TanHLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top) {
+ const Dtype* bottom_data = bottom[0]->cpu_data();
+ Dtype* top_data = (*top)[0]->mutable_cpu_data();
+ Dtype exp2x;
+ const int count = bottom[0]->count();
+ for (int i = 0; i < count; ++i) {
+ exp2x = exp(2*bottom_data[i]);
+ top_data[i] = (exp2x - Dtype(1))/(exp2x + Dtype(1));
+ }
+}
+
+template <typename Dtype>
+Dtype TanHLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
+ const bool propagate_down,
+ vector<Blob<Dtype>*>* bottom) {
+ if (propagate_down) {
+ const Dtype* bottom_data = (*bottom)[0]->cpu_data();
+ const Dtype* top_diff = top[0]->cpu_diff();
+ Dtype* bottom_diff = (*bottom)[0]->mutable_cpu_diff();
+ const int count = (*bottom)[0]->count();
+ Dtype exp2x;
+ Dtype tanhx;
+ for (int i = 0; i < count; ++i) {
+ exp2x = exp(2*bottom_data[i]);
+ tanhx = (exp2x - Dtype(1))/(exp2x + Dtype(1));
+ bottom_diff[i] = top_diff[i] * (1 - tanhx*tanhx);
+ }
+ }
+ return Dtype(0);
+}
+
+INSTANTIATE_CLASS(TanHLayer);
+
+} // namespace caffe