diff options
author | Evan Shelhamer <shelhamer@imaginarynumber.net> | 2014-02-26 14:56:37 -0800 |
---|---|---|
committer | Evan Shelhamer <shelhamer@imaginarynumber.net> | 2014-02-26 14:56:37 -0800 |
commit | 527cfab7cf5b7fd7eff973851d96d6f6ea75807f (patch) | |
tree | d4d8a5f76283f2e88b357b95784e36ed4fb68f40 /include | |
parent | a8c74072716e5d8271ae75a0a4fc031cd5b879b8 (diff) | |
download | caffeonacl-527cfab7cf5b7fd7eff973851d96d6f6ea75807f.tar.gz caffeonacl-527cfab7cf5b7fd7eff973851d96d6f6ea75807f.tar.bz2 caffeonacl-527cfab7cf5b7fd7eff973851d96d6f6ea75807f.zip |
Define split layer (merge trick)
Originally authored by Jeff Donahue, but this commit was needed for
historical reasons.
Diffstat (limited to 'include')
-rw-r--r-- | include/caffe/vision_layers.hpp | 21 |
1 files changed, 21 insertions, 0 deletions
diff --git a/include/caffe/vision_layers.hpp b/include/caffe/vision_layers.hpp index 47909a21..1861535d 100644 --- a/include/caffe/vision_layers.hpp +++ b/include/caffe/vision_layers.hpp @@ -126,6 +126,27 @@ class DropoutLayer : public NeuronLayer<Dtype> { template <typename Dtype> +class SplitLayer : public Layer<Dtype> { + public: + explicit SplitLayer(const LayerParameter& param) + : Layer<Dtype>(param) {} + virtual void SetUp(const vector<Blob<Dtype>*>& bottom, + vector<Blob<Dtype>*>* top); + + protected: + virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom, + vector<Blob<Dtype>*>* top); + virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom, + vector<Blob<Dtype>*>* top); + virtual Dtype Backward_cpu(const vector<Blob<Dtype>*>& top, + const bool propagate_down, vector<Blob<Dtype>*>* bottom); + virtual Dtype Backward_gpu(const vector<Blob<Dtype>*>& top, + const bool propagate_down, vector<Blob<Dtype>*>* bottom); + int count_; +}; + + +template <typename Dtype> class FlattenLayer : public Layer<Dtype> { public: explicit FlattenLayer(const LayerParameter& param) |