summaryrefslogtreecommitdiff
path: root/include
diff options
context:
space:
mode:
authorJeff Donahue <jeff.donahue@gmail.com>2013-12-04 20:16:25 -0800
committerJeff Donahue <jeff.donahue@gmail.com>2013-12-04 20:16:25 -0800
commitb9063277e3ae65e5a302cc3e36c938ab77d36c02 (patch)
tree09b55d99ad88b49828fbcfdbbd1cc5470ab7dd15 /include
parent8c96ac2e87ba1c370ee376152dce99236aac5bfd (diff)
downloadcaffeonacl-b9063277e3ae65e5a302cc3e36c938ab77d36c02.tar.gz
caffeonacl-b9063277e3ae65e5a302cc3e36c938ab77d36c02.tar.bz2
caffeonacl-b9063277e3ae65e5a302cc3e36c938ab77d36c02.zip
fix really stupid bug in flatten layer (and add test that shows the
failure case; not sure why CheckGradientExhaustive didn't catch it)
Diffstat (limited to 'include')
-rw-r--r--include/caffe/vision_layers.hpp2
1 files changed, 1 insertions, 1 deletions
diff --git a/include/caffe/vision_layers.hpp b/include/caffe/vision_layers.hpp
index 111d56e7..2c23b456 100644
--- a/include/caffe/vision_layers.hpp
+++ b/include/caffe/vision_layers.hpp
@@ -106,7 +106,7 @@ class FlattenLayer : public Layer<Dtype> {
const bool propagate_down, vector<Blob<Dtype>*>* bottom);
virtual Dtype Backward_gpu(const vector<Blob<Dtype>*>& top,
const bool propagate_down, vector<Blob<Dtype>*>* bottom);
- int channels_out_;
+ int count_;
};