summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorJeff Donahue <jeff.donahue@gmail.com>2013-12-04 20:16:25 -0800
committerJeff Donahue <jeff.donahue@gmail.com>2013-12-04 20:16:25 -0800
commitb9063277e3ae65e5a302cc3e36c938ab77d36c02 (patch)
tree09b55d99ad88b49828fbcfdbbd1cc5470ab7dd15 /src
parent8c96ac2e87ba1c370ee376152dce99236aac5bfd (diff)
downloadcaffe-b9063277e3ae65e5a302cc3e36c938ab77d36c02.tar.gz
caffe-b9063277e3ae65e5a302cc3e36c938ab77d36c02.tar.bz2
caffe-b9063277e3ae65e5a302cc3e36c938ab77d36c02.zip
fix really stupid bug in flatten layer (and add test that shows the
failure case; not sure why CheckGradientExhaustive didn't catch it)
Diffstat (limited to 'src')
-rw-r--r--src/caffe/layers/flatten_layer.cpp16
-rw-r--r--src/caffe/test/test_flatten_layer.cpp2
2 files changed, 11 insertions, 7 deletions
diff --git a/src/caffe/layers/flatten_layer.cpp b/src/caffe/layers/flatten_layer.cpp
index 9ffe4d24..f2467444 100644
--- a/src/caffe/layers/flatten_layer.cpp
+++ b/src/caffe/layers/flatten_layer.cpp
@@ -13,18 +13,20 @@ void FlattenLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top) {
CHECK_EQ(bottom.size(), 1) << "Flatten Layer takes a single blob as input.";
CHECK_EQ(top->size(), 1) << "Flatten Layer takes a single blob as output.";
- channels_out_ = bottom[0]->channels() * bottom[0]->height()
+ int channels_out = bottom[0]->channels() * bottom[0]->height()
* bottom[0]->width();
- (*top)[0]->Reshape(bottom[0]->num(), channels_out_, 1, 1);
+ (*top)[0]->Reshape(bottom[0]->num(), channels_out, 1, 1);
+ count_ = bottom[0]->num() * channels_out;
+ CHECK_EQ(count_, bottom[0]->count());
+ CHECK_EQ(count_, (*top)[0]->count());
};
template <typename Dtype>
void FlattenLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top) {
-
const Dtype* bottom_data = bottom[0]->cpu_data();
Dtype* top_data = (*top)[0]->mutable_cpu_data();
- caffe_copy(channels_out_, bottom_data, top_data);
+ caffe_copy(count_, bottom_data, top_data);
}
template <typename Dtype>
@@ -32,7 +34,7 @@ void FlattenLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top) {
const Dtype* bottom_data = bottom[0]->gpu_data();
Dtype* top_data = (*top)[0]->mutable_gpu_data();
- caffe_gpu_copy(channels_out_, bottom_data, top_data);
+ caffe_gpu_copy(count_, bottom_data, top_data);
}
template <typename Dtype>
@@ -40,7 +42,7 @@ Dtype FlattenLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
const bool propagate_down, vector<Blob<Dtype>*>* bottom) {
const Dtype* top_diff = top[0]->cpu_diff();
Dtype* bottom_diff = (*bottom)[0]->mutable_cpu_diff();
- caffe_copy(channels_out_, top_diff, bottom_diff);
+ caffe_copy(count_, top_diff, bottom_diff);
}
@@ -49,7 +51,7 @@ Dtype FlattenLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top,
const bool propagate_down, vector<Blob<Dtype>*>* bottom) {
const Dtype* top_diff = top[0]->gpu_diff();
Dtype* bottom_diff = (*bottom)[0]->mutable_gpu_diff();
- caffe_gpu_copy(channels_out_, top_diff, bottom_diff);
+ caffe_gpu_copy(count_, top_diff, bottom_diff);
}
INSTANTIATE_CLASS(FlattenLayer);
diff --git a/src/caffe/test/test_flatten_layer.cpp b/src/caffe/test/test_flatten_layer.cpp
index 23bce9d9..b97e56aa 100644
--- a/src/caffe/test/test_flatten_layer.cpp
+++ b/src/caffe/test/test_flatten_layer.cpp
@@ -58,6 +58,8 @@ TYPED_TEST(FlattenLayerTest, TestCPU) {
for (int c = 0; c < 3 * 6 * 5; ++c) {
EXPECT_EQ(this->blob_top_->data_at(0, c, 0, 0),
this->blob_bottom_->data_at(0, c / (6 * 5), (c / 5) % 6, c % 5));
+ EXPECT_EQ(this->blob_top_->data_at(1, c, 0, 0),
+ this->blob_bottom_->data_at(1, c / (6 * 5), (c / 5) % 6, c % 5));
}
}