summaryrefslogtreecommitdiff
path: root/src/caffe/test/test_argmax_layer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/caffe/test/test_argmax_layer.cpp')
-rw-r--r--src/caffe/test/test_argmax_layer.cpp137
1 files changed, 132 insertions, 5 deletions
diff --git a/src/caffe/test/test_argmax_layer.cpp b/src/caffe/test/test_argmax_layer.cpp
index 895c3d37..bbf19099 100644
--- a/src/caffe/test/test_argmax_layer.cpp
+++ b/src/caffe/test/test_argmax_layer.cpp
@@ -16,7 +16,7 @@ template <typename Dtype>
class ArgMaxLayerTest : public CPUDeviceTest<Dtype> {
protected:
ArgMaxLayerTest()
- : blob_bottom_(new Blob<Dtype>(10, 20, 1, 1)),
+ : blob_bottom_(new Blob<Dtype>(10, 10, 20, 20)),
blob_top_(new Blob<Dtype>()),
top_k_(5) {
Caffe::set_random_seed(1701);
@@ -55,6 +55,43 @@ TYPED_TEST(ArgMaxLayerTest, TestSetupMaxVal) {
EXPECT_EQ(this->blob_top_->channels(), 2);
}
+TYPED_TEST(ArgMaxLayerTest, TestSetupAxis) {
+ LayerParameter layer_param;
+ ArgMaxParameter* argmax_param = layer_param.mutable_argmax_param();
+ argmax_param->set_axis(0);
+ ArgMaxLayer<TypeParam> layer(layer_param);
+ layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+ EXPECT_EQ(this->blob_top_->shape(0), argmax_param->top_k());
+ EXPECT_EQ(this->blob_top_->shape(1), this->blob_bottom_->shape(0));
+ EXPECT_EQ(this->blob_top_->shape(2), this->blob_bottom_->shape(2));
+ EXPECT_EQ(this->blob_top_->shape(3), this->blob_bottom_->shape(3));
+}
+
+TYPED_TEST(ArgMaxLayerTest, TestSetupAxisNegativeIndexing) {
+ LayerParameter layer_param;
+ ArgMaxParameter* argmax_param = layer_param.mutable_argmax_param();
+ argmax_param->set_axis(-2);
+ ArgMaxLayer<TypeParam> layer(layer_param);
+ layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+ EXPECT_EQ(this->blob_top_->shape(0), this->blob_bottom_->shape(0));
+ EXPECT_EQ(this->blob_top_->shape(1), this->blob_bottom_->shape(1));
+ EXPECT_EQ(this->blob_top_->shape(2), argmax_param->top_k());
+ EXPECT_EQ(this->blob_top_->shape(3), this->blob_bottom_->shape(3));
+}
+
+TYPED_TEST(ArgMaxLayerTest, TestSetupAxisMaxVal) {
+ LayerParameter layer_param;
+ ArgMaxParameter* argmax_param = layer_param.mutable_argmax_param();
+ argmax_param->set_axis(2);
+ argmax_param->set_out_max_val(true);
+ ArgMaxLayer<TypeParam> layer(layer_param);
+ layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+ EXPECT_EQ(this->blob_top_->shape(0), this->blob_bottom_->shape(0));
+ EXPECT_EQ(this->blob_top_->shape(1), this->blob_bottom_->shape(1));
+ EXPECT_EQ(this->blob_top_->shape(2), argmax_param->top_k());
+ EXPECT_EQ(this->blob_top_->shape(3), this->blob_bottom_->shape(3));
+}
+
TYPED_TEST(ArgMaxLayerTest, TestCPU) {
LayerParameter layer_param;
ArgMaxLayer<TypeParam> layer(layer_param);
@@ -112,6 +149,7 @@ TYPED_TEST(ArgMaxLayerTest, TestCPUTopK) {
layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
// Now, check values
+ const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
int max_ind;
TypeParam max_val;
int num = this->blob_bottom_->num();
@@ -121,10 +159,10 @@ TYPED_TEST(ArgMaxLayerTest, TestCPUTopK) {
EXPECT_LE(this->blob_top_->data_at(i, 0, 0, 0), dim);
for (int j = 0; j < this->top_k_; ++j) {
max_ind = this->blob_top_->data_at(i, 0, j, 0);
- max_val = this->blob_bottom_->data_at(i, max_ind, 0, 0);
+ max_val = bottom_data[i * dim + max_ind];
int count = 0;
for (int k = 0; k < dim; ++k) {
- if (this->blob_bottom_->data_at(i, k, 0, 0) > max_val) {
+ if (bottom_data[i * dim + k] > max_val) {
++count;
}
}
@@ -142,6 +180,7 @@ TYPED_TEST(ArgMaxLayerTest, TestCPUMaxValTopK) {
layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
// Now, check values
+ const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
int max_ind;
TypeParam max_val;
int num = this->blob_bottom_->num();
@@ -152,10 +191,10 @@ TYPED_TEST(ArgMaxLayerTest, TestCPUMaxValTopK) {
for (int j = 0; j < this->top_k_; ++j) {
max_ind = this->blob_top_->data_at(i, 0, j, 0);
max_val = this->blob_top_->data_at(i, 1, j, 0);
- EXPECT_EQ(this->blob_bottom_->data_at(i, max_ind, 0, 0), max_val);
+ EXPECT_EQ(bottom_data[i * dim + max_ind], max_val);
int count = 0;
for (int k = 0; k < dim; ++k) {
- if (this->blob_bottom_->data_at(i, k, 0, 0) > max_val) {
+ if (bottom_data[i * dim + k] > max_val) {
++count;
}
}
@@ -164,5 +203,93 @@ TYPED_TEST(ArgMaxLayerTest, TestCPUMaxValTopK) {
}
}
+TYPED_TEST(ArgMaxLayerTest, TestCPUAxis) {
+ LayerParameter layer_param;
+ ArgMaxParameter* argmax_param = layer_param.mutable_argmax_param();
+ argmax_param->set_axis(0);
+ ArgMaxLayer<TypeParam> layer(layer_param);
+ layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+ layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+ // Now, check values
+ int max_ind;
+ TypeParam max_val;
+ std::vector<int> shape = this->blob_bottom_->shape();
+ for (int i = 0; i < shape[1]; ++i) {
+ for (int j = 0; j < shape[2]; ++j) {
+ for (int k = 0; k < shape[3]; ++k) {
+ max_ind = this->blob_top_->data_at(0, i, j, k);
+ max_val = this->blob_bottom_->data_at(max_ind, i, j, k);
+ EXPECT_GE(max_ind, 0);
+ EXPECT_LE(max_ind, shape[0]);
+ for (int l = 0; l < shape[0]; ++l) {
+ EXPECT_LE(this->blob_bottom_->data_at(l, i, j, k), max_val);
+ }
+ }
+ }
+ }
+}
+
+TYPED_TEST(ArgMaxLayerTest, TestCPUAxisTopK) {
+ LayerParameter layer_param;
+ ArgMaxParameter* argmax_param = layer_param.mutable_argmax_param();
+ argmax_param->set_axis(2);
+ argmax_param->set_top_k(this->top_k_);
+ ArgMaxLayer<TypeParam> layer(layer_param);
+ layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+ layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+ // Now, check values
+ int max_ind;
+ TypeParam max_val;
+ std::vector<int> shape = this->blob_bottom_->shape();
+ for (int i = 0; i < shape[0]; ++i) {
+ for (int j = 0; j < shape[1]; ++j) {
+ for (int k = 0; k < shape[3]; ++k) {
+ for (int m = 0; m < this->top_k_; ++m) {
+ max_ind = this->blob_top_->data_at(i, j, m, k);
+ max_val = this->blob_bottom_->data_at(i, j, max_ind, k);
+ EXPECT_GE(max_ind, 0);
+ EXPECT_LE(max_ind, shape[2]);
+ int count = 0;
+ for (int l = 0; l < shape[2]; ++l) {
+ if (this->blob_bottom_->data_at(i, j, l, k) > max_val) {
+ ++count;
+ }
+ }
+ EXPECT_EQ(m, count);
+ }
+ }
+ }
+ }
+}
+
+TYPED_TEST(ArgMaxLayerTest, TestCPUAxisMaxValTopK) {
+ LayerParameter layer_param;
+ ArgMaxParameter* argmax_param = layer_param.mutable_argmax_param();
+ argmax_param->set_axis(-1);
+ argmax_param->set_top_k(this->top_k_);
+ argmax_param->set_out_max_val(true);
+ ArgMaxLayer<TypeParam> layer(layer_param);
+ layer.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+ layer.Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+ // Now, check values
+ TypeParam max_val;
+ std::vector<int> shape = this->blob_bottom_->shape();
+ for (int i = 0; i < shape[0]; ++i) {
+ for (int j = 0; j < shape[1]; ++j) {
+ for (int k = 0; k < shape[2]; ++k) {
+ for (int m = 0; m < this->top_k_; ++m) {
+ max_val = this->blob_top_->data_at(i, j, k, m);
+ int count = 0;
+ for (int l = 0; l < shape[3]; ++l) {
+ if (this->blob_bottom_->data_at(i, j, k, l) > max_val) {
+ ++count;
+ }
+ }
+ EXPECT_EQ(m, count);
+ }
+ }
+ }
+ }
+}
} // namespace caffe