diff options
author | Jeff Donahue <jeff.donahue@gmail.com> | 2014-06-16 16:37:17 -0700 |
---|---|---|
committer | Evan Shelhamer <shelhamer@imaginarynumber.net> | 2014-06-26 12:07:39 -0700 |
commit | a04834f6ceee13310b7f4e17ccf639c1e7abd459 (patch) | |
tree | bdd42194e955373c5d37e461849ba8b8b29a917a /include/caffe | |
parent | 74443591d62feb2755d98ed87fb4aecc22d78297 (diff) | |
download | caffeonacl-a04834f6ceee13310b7f4e17ccf639c1e7abd459.tar.gz caffeonacl-a04834f6ceee13310b7f4e17ccf639c1e7abd459.tar.bz2 caffeonacl-a04834f6ceee13310b7f4e17ccf639c1e7abd459.zip |
force_backward works properly with non-backproppable things
Diffstat (limited to 'include/caffe')
-rw-r--r-- | include/caffe/layer.hpp | 8 | ||||
-rw-r--r-- | include/caffe/loss_layers.hpp | 15 |
2 files changed, 23 insertions, 0 deletions
diff --git a/include/caffe/layer.hpp b/include/caffe/layer.hpp index 12e7610d..690c36ba 100644 --- a/include/caffe/layer.hpp +++ b/include/caffe/layer.hpp @@ -84,6 +84,14 @@ class Layer { virtual inline int MinTopBlobs() const { return -1; } virtual inline int MaxTopBlobs() const { return -1; } + // Declare for each bottom blob whether to allow force_backward -- that is, + // if AllowForceBackward(i) == false, we will ignore the force_backward + // setting and backpropagate to blob i only if it needs gradient information + // (as is done when force_backward == false). + virtual inline bool AllowForceBackward(const int bottom_index) const { + return true; + } + protected: // The protobuf that stores the layer parameters LayerParameter layer_param_; diff --git a/include/caffe/loss_layers.hpp b/include/caffe/loss_layers.hpp index b8adc996..bb03f637 100644 --- a/include/caffe/loss_layers.hpp +++ b/include/caffe/loss_layers.hpp @@ -38,6 +38,11 @@ class LossLayer : public Layer<Dtype> { virtual inline int ExactNumBottomBlobs() const { return 2; } virtual inline int MaxTopBlobs() const { return 1; } + // We usually cannot backpropagate to the labels; ignore force_backward for + // these inputs. + virtual inline bool AllowForceBackward(const int bottom_index) const { + return bottom_index != 1; + } }; // Forward declare SoftmaxLayer for use in SoftmaxWithLossLayer. @@ -63,6 +68,11 @@ class SoftmaxWithLossLayer : public Layer<Dtype> { return LayerParameter_LayerType_SOFTMAX_LOSS; } virtual inline int MaxTopBlobs() const { return 2; } + // We cannot backpropagate to the labels; ignore force_backward for these + // inputs. + virtual inline bool AllowForceBackward(const int bottom_index) const { + return bottom_index != 1; + } protected: virtual Dtype Forward_cpu(const vector<Blob<Dtype>*>& bottom, @@ -133,6 +143,11 @@ class EuclideanLossLayer : public LossLayer<Dtype> { virtual inline LayerParameter_LayerType type() const { return LayerParameter_LayerType_EUCLIDEAN_LOSS; } + // Unlike most loss layers, in the EuclideanLossLayer we can backpropagate + // to both inputs. + virtual inline bool AllowForceBackward(const int bottom_index) const { + return true; + } protected: virtual Dtype Forward_cpu(const vector<Blob<Dtype>*>& bottom, |