summaryrefslogtreecommitdiff
path: root/src/caffe/layers/loss_layer.cpp
diff options
context:
space:
mode:
authorJeff Donahue <jeff.donahue@gmail.com>2014-07-11 01:55:17 -0700
committerJeff Donahue <jeff.donahue@gmail.com>2014-08-13 13:22:04 -0700
commit512a626fc71c69ed4460024b31c5fe8dff1e668c (patch)
treef3d11beb593a4e64e779a99b82538ceee7fae21a /src/caffe/layers/loss_layer.cpp
parent7a3ed9b8edf43895770b63cb4d9f5cacf0dba047 (diff)
downloadcaffeonacl-512a626fc71c69ed4460024b31c5fe8dff1e668c.tar.gz
caffeonacl-512a626fc71c69ed4460024b31c5fe8dff1e668c.tar.bz2
caffeonacl-512a626fc71c69ed4460024b31c5fe8dff1e668c.zip
Generalize loss by allowing any top blob to be used as a loss in which
its elements are summed with a scalar coefficient. Forward for layers no longer returns a loss; instead all loss layers must have top blobs. Existing loss layers are given a top blob automatically by Net::Init, with an associated top_loss_weight of 1 (set in LossLayer::FurtherSetUp). Due to the increased amount of common SetUp logic, the SetUp interface is modified such that all subclasses should normally override FurtherSetUp only, which is called by SetUp.
Diffstat (limited to 'src/caffe/layers/loss_layer.cpp')
-rw-r--r--src/caffe/layers/loss_layer.cpp11
1 files changed, 5 insertions, 6 deletions
diff --git a/src/caffe/layers/loss_layer.cpp b/src/caffe/layers/loss_layer.cpp
index 48665221..89d8c91e 100644
--- a/src/caffe/layers/loss_layer.cpp
+++ b/src/caffe/layers/loss_layer.cpp
@@ -11,16 +11,15 @@
namespace caffe {
template <typename Dtype>
-void LossLayer<Dtype>::SetUp(
+void LossLayer<Dtype>::LayerSetUp(
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
- Layer<Dtype>::SetUp(bottom, top);
CHECK_EQ(bottom[0]->num(), bottom[1]->num())
<< "The data and label should have the same number.";
- if (top->size() == 1) {
- // Layers should copy the loss in the top blob
- (*top)[0]->Reshape(1, 1, 1, 1);
+ (*top)[0]->Reshape(1, 1, 1, 1);
+ // LossLayers have a non-zero (1) loss by default.
+ if (this->layer_param_.loss_weight_size() == 0) {
+ this->layer_param_.add_loss_weight(Dtype(1));
}
- FurtherSetUp(bottom, top);
}
INSTANTIATE_CLASS(LossLayer);