summaryrefslogtreecommitdiff
path: root/include
diff options
context:
space:
mode:
authorAravindh Mahendran <aravindh@aravindh-VirtualBox.(none)>2014-02-16 10:43:34 -0500
committerEvan Shelhamer <shelhamer@imaginarynumber.net>2014-02-26 13:03:39 -0800
commit8448708ba37c920ae6b126a3b6f0f4353e848b01 (patch)
treec817e9a0b5d7fd6dd288e04b790c5ca61893f520 /include
parentf0b76ea244a07dd258671015d0e944da5deac7c6 (diff)
downloadcaffeonacl-8448708ba37c920ae6b126a3b6f0f4353e848b01.tar.gz
caffeonacl-8448708ba37c920ae6b126a3b6f0f4353e848b01.tar.bz2
caffeonacl-8448708ba37c920ae6b126a3b6f0f4353e848b01.zip
Added tanh activation function layer.
Diffstat (limited to 'include')
-rw-r--r--include/caffe/vision_layers.hpp17
1 files changed, 17 insertions, 0 deletions
diff --git a/include/caffe/vision_layers.hpp b/include/caffe/vision_layers.hpp
index 82e52cd5..47909a21 100644
--- a/include/caffe/vision_layers.hpp
+++ b/include/caffe/vision_layers.hpp
@@ -44,6 +44,23 @@ class ReLULayer : public NeuronLayer<Dtype> {
const bool propagate_down, vector<Blob<Dtype>*>* bottom);
};
+template <typename Dtype>
+class TanHLayer : public NeuronLayer<Dtype> {
+ public:
+ explicit TanHLayer(const LayerParameter& param)
+ : NeuronLayer<Dtype>(param) {}
+
+ protected:
+ virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
+ virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
+
+ virtual Dtype Backward_cpu(const vector<Blob<Dtype>*>& top,
+ const bool propagate_down, vector<Blob<Dtype>*>* bottom);
+ virtual Dtype Backward_gpu(const vector<Blob<Dtype>*>& top,
+ const bool propagate_down, vector<Blob<Dtype>*>* bottom);
+};
template <typename Dtype>
class SigmoidLayer : public NeuronLayer<Dtype> {