summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorSergey Karayev <sergeykarayev@gmail.com>2014-03-17 16:07:39 -0700
committerSergey Karayev <sergeykarayev@gmail.com>2014-03-17 16:07:39 -0700
commite6055c167ca354e88690c491ec877d76477982ff (patch)
treea4bbb772873c48c00205ba84e1176445dcd79e33 /src
parentf7160844e238885f3c1263e71f35e66309ce9974 (diff)
downloadcaffeonacl-e6055c167ca354e88690c491ec877d76477982ff.tar.gz
caffeonacl-e6055c167ca354e88690c491ec877d76477982ff.tar.bz2
caffeonacl-e6055c167ca354e88690c491ec877d76477982ff.zip
Lint errors fixed, except still using stream.
Diffstat (limited to 'src')
-rw-r--r--src/caffe/layers/hdf5_data_layer.cpp11
-rw-r--r--src/caffe/test/test_hdf5data_layer.cpp18
-rw-r--r--src/caffe/util/io.cpp16
3 files changed, 25 insertions, 20 deletions
diff --git a/src/caffe/layers/hdf5_data_layer.cpp b/src/caffe/layers/hdf5_data_layer.cpp
index 98873cb1..60e25d01 100644
--- a/src/caffe/layers/hdf5_data_layer.cpp
+++ b/src/caffe/layers/hdf5_data_layer.cpp
@@ -1,3 +1,4 @@
+// Copyright 2014 BVLC.
/*
Contributors:
- Sergey Karayev, 2014.
@@ -8,11 +9,11 @@ TODO:
- can be smarter about the memcpy call instead of doing it row-by-row
:: use util functions caffe_copy, and Blob->offset()
:: don't forget to update hdf5_daa_layer.cu accordingly
+- add ability to shuffle filenames if flag is set
*/
#include <stdint.h>
#include <string>
#include <vector>
-#include <iostream>
#include <fstream>
#include "hdf5.h"
@@ -22,8 +23,6 @@ TODO:
#include "caffe/util/io.hpp"
#include "caffe/vision_layers.hpp"
-using std::string;
-
namespace caffe {
template <typename Dtype>
@@ -42,12 +41,12 @@ void HDF5DataLayer<Dtype>::load_hdf5_file_data(const char* filename) {
const int MIN_DATA_DIM = 2;
const int MAX_DATA_DIM = 4;
hdf5_load_nd_dataset(
- file_id, "data", MIN_DATA_DIM, MAX_DATA_DIM, data_blob_);
+ file_id, "data", MIN_DATA_DIM, MAX_DATA_DIM, &data_blob_);
const int MIN_LABEL_DIM = 1;
const int MAX_LABEL_DIM = 2;
hdf5_load_nd_dataset(
- file_id, "label", MIN_LABEL_DIM, MAX_LABEL_DIM, label_blob_);
+ file_id, "label", MIN_LABEL_DIM, MAX_LABEL_DIM, &label_blob_);
herr_t status = H5Fclose(file_id);
CHECK_EQ(data_blob_.num(), label_blob_.num());
@@ -65,7 +64,7 @@ void HDF5DataLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom,
hdf_filenames_.clear();
std::ifstream myfile(this->layer_param_.source().c_str());
if (myfile.is_open()) {
- string line = "";
+ std::string line;
while (myfile >> line) {
hdf_filenames_.push_back(line);
}
diff --git a/src/caffe/test/test_hdf5data_layer.cpp b/src/caffe/test/test_hdf5data_layer.cpp
index 59aee0c5..0b0b97ee 100644
--- a/src/caffe/test/test_hdf5data_layer.cpp
+++ b/src/caffe/test/test_hdf5data_layer.cpp
@@ -64,9 +64,9 @@ TYPED_TEST(HDF5DataLayerTest, TestRead) {
int num_cols = 8;
int height = 5;
int width = 5;
- HDF5DataLayer<TypeParam> layer(param);
// Test that the layer setup got the correct parameters.
+ HDF5DataLayer<TypeParam> layer(param);
layer.SetUp(this->blob_bottom_vec_, &this->blob_top_vec_);
EXPECT_EQ(this->blob_top_data_->num(), batchsize);
EXPECT_EQ(this->blob_top_data_->channels(), num_cols);
@@ -78,16 +78,18 @@ TYPED_TEST(HDF5DataLayerTest, TestRead) {
EXPECT_EQ(this->blob_top_label_->height(), 1);
EXPECT_EQ(this->blob_top_label_->width(), 1);
- for (int t=0; t<2; ++t) {
+ for (int t = 0; t < 2; ++t) {
+ // TODO: make this a TypedTest instead of this silly loop.
if (t == 0) {
Caffe::set_mode(Caffe::CPU);
} else {
Caffe::set_mode(Caffe::GPU);
}
+ layer.SetUp(this->blob_bottom_vec_, &this->blob_top_vec_);
- // Go through the data 100 times (50 batches).
+ // Go through the data 10 times (5 batches).
const int data_size = num_cols * height * width;
- for (int iter = 0; iter < 100; ++iter) {
+ for (int iter = 0; iter < 5; ++iter) {
layer.Forward(this->blob_bottom_vec_, &this->blob_top_vec_);
// On even iterations, we're reading the first half of the data.
@@ -109,11 +111,15 @@ TYPED_TEST(HDF5DataLayerTest, TestRead) {
for (int j = 0; j < num_cols; ++j) {
for (int h = 0; h < height; ++h) {
for (int w = 0; w < width; ++w) {
- int idx = i * num_cols * height * width + j * height * width + h * width + w;
+ int idx = (
+ i * num_cols * height * width +
+ j * height * width +
+ h * width + w);
EXPECT_EQ(
file_offset + data_offset + idx,
this->blob_top_data_->cpu_data()[idx])
- << "debug: i " << i << " j " << j << " iter " << iter;
+ << "debug: i " << i << " j " << j
+ << " iter " << iter << " t " << t;
}
}
}
diff --git a/src/caffe/util/io.cpp b/src/caffe/util/io.cpp
index 72ceb8d2..3ac69f97 100644
--- a/src/caffe/util/io.cpp
+++ b/src/caffe/util/io.cpp
@@ -103,7 +103,8 @@ bool ReadImageToDatum(const string& filename, const int label,
// Verifies format of data stored in HDF5 file and reshapes blob accordingly.
template <typename Dtype>
void hdf5_load_nd_dataset_helper(
- hid_t file_id, const char* dataset_name_, int min_dim, int max_dim, Blob<Dtype>& blob) {
+ hid_t file_id, const char* dataset_name_, int min_dim, int max_dim,
+ Blob<Dtype>* blob) {
// Verify that the number of dimensions is in the accepted range.
herr_t status;
int ndims;
@@ -118,28 +119,27 @@ void hdf5_load_nd_dataset_helper(
file_id, dataset_name_, dims.data(), &class_, NULL);
CHECK_EQ(class_, H5T_FLOAT) << "Expected float or double data";
- blob.Reshape(
+ blob->Reshape(
dims[0],
(dims.size() > 1) ? dims[1] : 1,
(dims.size() > 2) ? dims[2] : 1,
- (dims.size() > 3) ? dims[3] : 1
- );
+ (dims.size() > 3) ? dims[3] : 1);
}
template <>
void hdf5_load_nd_dataset<float>(hid_t file_id, const char* dataset_name_,
- int min_dim, int max_dim, Blob<float>& blob) {
+ int min_dim, int max_dim, Blob<float>* blob) {
hdf5_load_nd_dataset_helper(file_id, dataset_name_, min_dim, max_dim, blob);
herr_t status = H5LTread_dataset_float(
- file_id, dataset_name_, blob.mutable_cpu_data());
+ file_id, dataset_name_, blob->mutable_cpu_data());
}
template <>
void hdf5_load_nd_dataset<double>(hid_t file_id, const char* dataset_name_,
- int min_dim, int max_dim, Blob<double>& blob) {
+ int min_dim, int max_dim, Blob<double>* blob) {
hdf5_load_nd_dataset_helper(file_id, dataset_name_, min_dim, max_dim, blob);
herr_t status = H5LTread_dataset_double(
- file_id, dataset_name_, blob.mutable_cpu_data());
+ file_id, dataset_name_, blob->mutable_cpu_data());
}
} // namespace caffe