diff options
author | Jeff Donahue <jeff.donahue@gmail.com> | 2014-07-14 23:49:36 -0700 |
---|---|---|
committer | Jeff Donahue <jeff.donahue@gmail.com> | 2014-07-14 23:49:36 -0700 |
commit | 304be2fec6440aef246ac19b4832d3d5e72fc6c9 (patch) | |
tree | 4b5e4123261049a1f7b0a7a3f439d60595b74cb0 | |
parent | 12fe64846120db80ed2cfeda057382993c23c067 (diff) | |
download | caffeonacl-304be2fec6440aef246ac19b4832d3d5e72fc6c9.tar.gz caffeonacl-304be2fec6440aef246ac19b4832d3d5e72fc6c9.tar.bz2 caffeonacl-304be2fec6440aef246ac19b4832d3d5e72fc6c9.zip |
Fix SoftmaxLayerTest: forgot to change this one to use DtypesAndDevices;
was causing Travis build to randomly fail if a previous test had set the
mode to GPU (which no test that is run by 'make runtestnogpu' should, so
I guess there's another bug somewhere).
-rw-r--r-- | src/caffe/test/test_softmax_layer.cpp | 19 |
1 files changed, 11 insertions, 8 deletions
diff --git a/src/caffe/test/test_softmax_layer.cpp b/src/caffe/test/test_softmax_layer.cpp index 4a9c0f28..f0be279b 100644 --- a/src/caffe/test/test_softmax_layer.cpp +++ b/src/caffe/test/test_softmax_layer.cpp @@ -18,8 +18,9 @@ namespace caffe { extern cudaDeviceProp CAFFE_TEST_CUDA_PROP; -template <typename Dtype> -class SoftmaxLayerTest : public ::testing::Test { +template <typename TypeParam> +class SoftmaxLayerTest : public MultiDeviceTest<TypeParam> { + typedef typename TypeParam::Dtype Dtype; protected: SoftmaxLayerTest() : blob_bottom_(new Blob<Dtype>(2, 10, 1, 1)), @@ -38,16 +39,17 @@ class SoftmaxLayerTest : public ::testing::Test { vector<Blob<Dtype>*> blob_top_vec_; }; -TYPED_TEST_CASE(SoftmaxLayerTest, TestDtypes); +TYPED_TEST_CASE(SoftmaxLayerTest, TestDtypesAndDevices); TYPED_TEST(SoftmaxLayerTest, TestForward) { + typedef typename TypeParam::Dtype Dtype; LayerParameter layer_param; - SoftmaxLayer<TypeParam> layer(layer_param); + SoftmaxLayer<Dtype> layer(layer_param); layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_)); layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_)); // Test sum for (int i = 0; i < this->blob_bottom_->num(); ++i) { - TypeParam sum = 0; + Dtype sum = 0; for (int j = 0; j < this->blob_top_->channels(); ++j) { sum += this->blob_top_->data_at(i, j, 0, 0); } @@ -56,7 +58,7 @@ TYPED_TEST(SoftmaxLayerTest, TestForward) { } // Test exact values for (int i = 0; i < this->blob_bottom_->num(); ++i) { - TypeParam scale = 0; + Dtype scale = 0; for (int j = 0; j < this->blob_bottom_->channels(); ++j) { scale += exp(this->blob_bottom_->data_at(i, j, 0, 0)); } @@ -72,9 +74,10 @@ TYPED_TEST(SoftmaxLayerTest, TestForward) { } TYPED_TEST(SoftmaxLayerTest, TestGradient) { + typedef typename TypeParam::Dtype Dtype; LayerParameter layer_param; - SoftmaxLayer<TypeParam> layer(layer_param); - GradientChecker<TypeParam> checker(1e-2, 1e-3); + SoftmaxLayer<Dtype> layer(layer_param); + GradientChecker<Dtype> checker(1e-2, 1e-3); checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_), &(this->blob_top_vec_)); } |