summaryrefslogtreecommitdiff
path: root/src/caffe/test/test_net.cpp
diff options
context:
space:
mode:
authormanuele <manuele.tamburrano@gmail.com>2015-05-15 11:17:00 +0200
committermanuele <manuele.tamburrano@gmail.com>2015-05-15 11:17:00 +0200
commitc7c4c648c48177171c358f8c8c805ebe7e7cea9b (patch)
tree0b9a344dd1ce60350d6ed1e4c08a9e608e0f2bf2 /src/caffe/test/test_net.cpp
parent77ab8f649f78844dfbbd7d91e984428c637df499 (diff)
downloadcaffeonacl-c7c4c648c48177171c358f8c8c805ebe7e7cea9b.tar.gz
caffeonacl-c7c4c648c48177171c358f8c8c805ebe7e7cea9b.tar.bz2
caffeonacl-c7c4c648c48177171c358f8c8c805ebe7e7cea9b.zip
Added "propagate_down" param to LayerParameter
Diffstat (limited to 'src/caffe/test/test_net.cpp')
-rw-r--r--src/caffe/test/test_net.cpp145
1 files changed, 145 insertions, 0 deletions
diff --git a/src/caffe/test/test_net.cpp b/src/caffe/test/test_net.cpp
index 08106e79..782a96bc 100644
--- a/src/caffe/test/test_net.cpp
+++ b/src/caffe/test/test_net.cpp
@@ -613,6 +613,103 @@ class NetTest : public MultiDeviceTest<TypeParam> {
InitNetFromProtoString(proto);
}
+ virtual void InitSkipPropNet(bool test_skip_true) {
+ string proto =
+ "name: 'SkipPropTestNetwork' "
+ "layer { "
+ " name: 'data' "
+ " type: 'DummyData' "
+ " dummy_data_param { "
+ " shape { "
+ " dim: 5 "
+ " dim: 2 "
+ " dim: 3 "
+ " dim: 4 "
+ " } "
+ " data_filler { "
+ " type: 'gaussian' "
+ " std: 0.01 "
+ " } "
+ " shape { "
+ " dim: 5 "
+ " } "
+ " data_filler { "
+ " type: 'constant' "
+ " value: 0 "
+ " } "
+ " } "
+ " top: 'data' "
+ " top: 'label' "
+ "} "
+ "layer { "
+ " name: 'silence' "
+ " bottom: 'label' "
+ " type: 'Silence' "
+ "} "
+ "layer { "
+ " name: 'innerproduct' "
+ " type: 'InnerProduct' "
+ " inner_product_param { "
+ " num_output: 1 "
+ " weight_filler { "
+ " type: 'gaussian' "
+ " std: 0.01 "
+ " } "
+ " bias_filler { "
+ " type: 'constant' "
+ " value: 0 "
+ " } "
+ " } "
+ " param { "
+ " lr_mult: 1 "
+ " decay_mult: 1 "
+ " } "
+ " param { "
+ " lr_mult: 2 "
+ " decay_mult: 0 "
+ " } "
+ " bottom: 'data' "
+ " top: 'innerproduct' "
+ "} "
+ "layer { "
+ " name: 'ip_fake_labels' "
+ " type: 'InnerProduct' "
+ " inner_product_param { "
+ " num_output: 1 "
+ " weight_filler { "
+ " type: 'gaussian' "
+ " std: 0.01 "
+ " } "
+ " bias_filler { "
+ " type: 'constant' "
+ " value: 0 "
+ " } "
+ " } "
+ " bottom: 'data' "
+ " top: 'fake_labels' "
+ "} "
+ "layer { "
+ " name: 'argmax' "
+ " bottom: 'fake_labels' "
+ " top: 'label_argmax' "
+ " type: 'ArgMax' "
+ "} "
+ "layer { "
+ " name: 'loss' "
+ " bottom: 'innerproduct' "
+ " bottom: 'label_argmax' ";
+ if (test_skip_true)
+ proto += " propagate_down: [true, false] ";
+ else
+ proto += " propagate_down: [true, true] ";
+ proto +=
+ " top: 'cross_entropy_loss' "
+ " type: 'SigmoidCrossEntropyLoss' "
+ " loss_weight: 0.1 "
+ "} ";
+ InitNetFromProtoString(proto);
+ }
+
int seed_;
shared_ptr<Net<Dtype> > net_;
};
@@ -2224,4 +2321,52 @@ TYPED_TEST(NetTest, TestReshape) {
}
}
+TYPED_TEST(NetTest, TestSkipPropagateDown) {
+ // check bottom_need_backward if propagate_down is true
+ this->InitSkipPropNet(false);
+ vector<bool> vec_layer_need_backward = this->net_->layer_need_backward();
+ for (int layer_id = 0; layer_id < this->net_->layers().size(); ++layer_id) {
+ string layer_name = this->net_->layer_names()[layer_id];
+ if (layer_name == "loss") {
+ // access to bottom_need_backward coresponding to label's blob
+ bool need_back = this->net_->bottom_need_backward()[layer_id][1];
+ // if propagate_down is true, the loss layer will try to
+ // backpropagate on labels
+ EXPECT_TRUE(need_back) << "bottom_need_backward should be True";
+ }
+ // layer_need_backward should be True except for data and silence layers
+ if (layer_name.find("data") != std::string::npos ||
+ layer_name == "silence") {
+ EXPECT_FALSE(vec_layer_need_backward[layer_id])
+ << "layer_need_backward for " << layer_name << " should be False";
+ } else {
+ EXPECT_TRUE(vec_layer_need_backward[layer_id])
+ << "layer_need_backward for " << layer_name << " should be True";
+ }
+ }
+ // check bottom_need_backward if propagat_down is false
+ this->InitSkipPropNet(true);
+ vec_layer_need_backward.clear();
+ vec_layer_need_backward = this->net_->layer_need_backward();
+ for (int layer_id = 0; layer_id < this->net_->layers().size(); ++layer_id) {
+ string layer_name = this->net_->layer_names()[layer_id];
+ if (layer_name == "loss") {
+ // access to bottom_need_backward coresponding to label's blob
+ bool need_back = this->net_->bottom_need_backward()[layer_id][1];
+ // if propagate_down is false, the loss layer will not try to
+ // backpropagate on labels
+ EXPECT_FALSE(need_back) << "bottom_need_backward should be False";
+ }
+ // layer_need_backward should be False except for innerproduct and
+ // loss layers
+ if (layer_name == "innerproduct" || layer_name == "loss") {
+ EXPECT_TRUE(vec_layer_need_backward[layer_id])
+ << "layer_need_backward for " << layer_name << " should be True";
+ } else {
+ EXPECT_FALSE(vec_layer_need_backward[layer_id])
+ << "layer_need_backward for " << layer_name << " should be False";
+ }
+ }
+}
+
} // namespace caffe