diff options
author | Evan Shelhamer <shelhamer@imaginarynumber.net> | 2014-06-26 10:21:19 -0700 |
---|---|---|
committer | Evan Shelhamer <shelhamer@imaginarynumber.net> | 2014-06-26 10:21:19 -0700 |
commit | 61e1ef03537461f708bcb110e9decb69ca4de544 (patch) | |
tree | 52844f605782a033f401e43d31e35cbd732a4cb9 /include/caffe/vision_layers.hpp | |
parent | adab413fc65f760f9bc2097f379c2703e3641de4 (diff) | |
download | caffeonacl-61e1ef03537461f708bcb110e9decb69ca4de544.tar.gz caffeonacl-61e1ef03537461f708bcb110e9decb69ca4de544.tar.bz2 caffeonacl-61e1ef03537461f708bcb110e9decb69ca4de544.zip |
file SoftmaxWithLoss in with loss layers
Diffstat (limited to 'include/caffe/vision_layers.hpp')
-rw-r--r-- | include/caffe/vision_layers.hpp | 40 |
1 files changed, 0 insertions, 40 deletions
diff --git a/include/caffe/vision_layers.hpp b/include/caffe/vision_layers.hpp index b7e1068c..3fd7e2f8 100644 --- a/include/caffe/vision_layers.hpp +++ b/include/caffe/vision_layers.hpp @@ -405,46 +405,6 @@ class SoftmaxLayer : public Layer<Dtype> { Blob<Dtype> scale_; }; -/* SoftmaxWithLossLayer - Implements softmax and computes the loss. - - It is preferred over separate softmax + multinomiallogisticloss - layers due to more numerically stable gradients. - - In test, this layer could be replaced by simple softmax layer. -*/ -template <typename Dtype> -class SoftmaxWithLossLayer : public Layer<Dtype> { - public: - explicit SoftmaxWithLossLayer(const LayerParameter& param) - : Layer<Dtype>(param), softmax_layer_(new SoftmaxLayer<Dtype>(param)) {} - virtual void SetUp(const vector<Blob<Dtype>*>& bottom, - vector<Blob<Dtype>*>* top); - - virtual inline LayerParameter_LayerType type() const { - return LayerParameter_LayerType_SOFTMAX_LOSS; - } - virtual inline int ExactNumBottomBlobs() const { return 2; } - virtual inline int MaxTopBlobs() const { return 2; } - - protected: - virtual Dtype Forward_cpu(const vector<Blob<Dtype>*>& bottom, - vector<Blob<Dtype>*>* top); - virtual Dtype Forward_gpu(const vector<Blob<Dtype>*>& bottom, - vector<Blob<Dtype>*>* top); - virtual void Backward_cpu(const vector<Blob<Dtype>*>& top, - const bool propagate_down, vector<Blob<Dtype>*>* bottom); - virtual void Backward_gpu(const vector<Blob<Dtype>*>& top, - const bool propagate_down, vector<Blob<Dtype>*>* bottom); - - shared_ptr<SoftmaxLayer<Dtype> > softmax_layer_; - // prob stores the output probability of the layer. - Blob<Dtype> prob_; - // Vector holders to call the underlying softmax layer forward and backward. - vector<Blob<Dtype>*> softmax_bottom_vec_; - vector<Blob<Dtype>*> softmax_top_vec_; -}; - /* SplitLayer */ template <typename Dtype> |