summaryrefslogtreecommitdiff
path: root/src/caffe/test/test_hinge_loss_layer.cpp
diff options
context:
space:
mode:
authorJeff Donahue <jeff.donahue@gmail.com>2014-07-11 01:55:17 -0700
committerJeff Donahue <jeff.donahue@gmail.com>2014-08-13 13:22:04 -0700
commit512a626fc71c69ed4460024b31c5fe8dff1e668c (patch)
treef3d11beb593a4e64e779a99b82538ceee7fae21a /src/caffe/test/test_hinge_loss_layer.cpp
parent7a3ed9b8edf43895770b63cb4d9f5cacf0dba047 (diff)
downloadcaffeonacl-512a626fc71c69ed4460024b31c5fe8dff1e668c.tar.gz
caffeonacl-512a626fc71c69ed4460024b31c5fe8dff1e668c.tar.bz2
caffeonacl-512a626fc71c69ed4460024b31c5fe8dff1e668c.zip
Generalize loss by allowing any top blob to be used as a loss in which
its elements are summed with a scalar coefficient. Forward for layers no longer returns a loss; instead all loss layers must have top blobs. Existing loss layers are given a top blob automatically by Net::Init, with an associated top_loss_weight of 1 (set in LossLayer::FurtherSetUp). Due to the increased amount of common SetUp logic, the SetUp interface is modified such that all subclasses should normally override FurtherSetUp only, which is called by SetUp.
Diffstat (limited to 'src/caffe/test/test_hinge_loss_layer.cpp')
-rw-r--r--src/caffe/test/test_hinge_loss_layer.cpp21
1 files changed, 11 insertions, 10 deletions
diff --git a/src/caffe/test/test_hinge_loss_layer.cpp b/src/caffe/test/test_hinge_loss_layer.cpp
index 8f6f6f78..3c11b9ac 100644
--- a/src/caffe/test/test_hinge_loss_layer.cpp
+++ b/src/caffe/test/test_hinge_loss_layer.cpp
@@ -22,7 +22,8 @@ class HingeLossLayerTest : public MultiDeviceTest<TypeParam> {
protected:
HingeLossLayerTest()
: blob_bottom_data_(new Blob<Dtype>(10, 5, 1, 1)),
- blob_bottom_label_(new Blob<Dtype>(10, 1, 1, 1)) {
+ blob_bottom_label_(new Blob<Dtype>(10, 1, 1, 1)),
+ blob_top_loss_(new Blob<Dtype>()) {
// fill the values
Caffe::set_random_seed(1701);
FillerParameter filler_param;
@@ -34,13 +35,16 @@ class HingeLossLayerTest : public MultiDeviceTest<TypeParam> {
blob_bottom_label_->mutable_cpu_data()[i] = caffe_rng_rand() % 5;
}
blob_bottom_vec_.push_back(blob_bottom_label_);
+ blob_top_vec_.push_back(blob_top_loss_);
}
virtual ~HingeLossLayerTest() {
delete blob_bottom_data_;
delete blob_bottom_label_;
+ delete blob_top_loss_;
}
Blob<Dtype>* const blob_bottom_data_;
Blob<Dtype>* const blob_bottom_label_;
+ Blob<Dtype>* const blob_top_loss_;
vector<Blob<Dtype>*> blob_bottom_vec_;
vector<Blob<Dtype>*> blob_top_vec_;
};
@@ -52,10 +56,9 @@ TYPED_TEST(HingeLossLayerTest, TestGradientL1) {
typedef typename TypeParam::Dtype Dtype;
LayerParameter layer_param;
HingeLossLayer<Dtype> layer(layer_param);
- layer.SetUp(this->blob_bottom_vec_, &this->blob_top_vec_);
- GradientChecker<Dtype> checker(1e-2, 1e-3, 1701, 1, 0.01);
- checker.CheckGradientSingle(&layer, &(this->blob_bottom_vec_),
- &(this->blob_top_vec_), 0, -1, -1);
+ GradientChecker<Dtype> checker(1e-2, 2e-3, 1701, 1, 0.01);
+ checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+ &(this->blob_top_vec_), 0);
}
TYPED_TEST(HingeLossLayerTest, TestGradientL2) {
@@ -65,11 +68,9 @@ TYPED_TEST(HingeLossLayerTest, TestGradientL2) {
HingeLossParameter* hinge_loss_param = layer_param.mutable_hinge_loss_param();
hinge_loss_param->set_norm(HingeLossParameter_Norm_L2);
HingeLossLayer<Dtype> layer(layer_param);
- layer.SetUp(this->blob_bottom_vec_, &this->blob_top_vec_);
- GradientChecker<Dtype> checker(1e-2, 2e-3, 1701);
- checker.CheckGradientSingle(&layer, &(this->blob_bottom_vec_),
- &(this->blob_top_vec_), 0, -1, -1);
+ GradientChecker<Dtype> checker(1e-2, 1e-2, 1701);
+ checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+ &(this->blob_top_vec_), 0);
}
-
} // namespace caffe