summaryrefslogtreecommitdiff
path: root/src/caffe/util/io.cpp
diff options
context:
space:
mode:
authorSergey Karayev <sergeykarayev@gmail.com>2014-03-17 16:07:39 -0700
committerSergey Karayev <sergeykarayev@gmail.com>2014-03-17 16:07:39 -0700
commite6055c167ca354e88690c491ec877d76477982ff (patch)
treea4bbb772873c48c00205ba84e1176445dcd79e33 /src/caffe/util/io.cpp
parentf7160844e238885f3c1263e71f35e66309ce9974 (diff)
downloadcaffeonacl-e6055c167ca354e88690c491ec877d76477982ff.tar.gz
caffeonacl-e6055c167ca354e88690c491ec877d76477982ff.tar.bz2
caffeonacl-e6055c167ca354e88690c491ec877d76477982ff.zip
Lint errors fixed, except still using stream.
Diffstat (limited to 'src/caffe/util/io.cpp')
-rw-r--r--src/caffe/util/io.cpp16
1 files changed, 8 insertions, 8 deletions
diff --git a/src/caffe/util/io.cpp b/src/caffe/util/io.cpp
index 72ceb8d2..3ac69f97 100644
--- a/src/caffe/util/io.cpp
+++ b/src/caffe/util/io.cpp
@@ -103,7 +103,8 @@ bool ReadImageToDatum(const string& filename, const int label,
// Verifies format of data stored in HDF5 file and reshapes blob accordingly.
template <typename Dtype>
void hdf5_load_nd_dataset_helper(
- hid_t file_id, const char* dataset_name_, int min_dim, int max_dim, Blob<Dtype>& blob) {
+ hid_t file_id, const char* dataset_name_, int min_dim, int max_dim,
+ Blob<Dtype>* blob) {
// Verify that the number of dimensions is in the accepted range.
herr_t status;
int ndims;
@@ -118,28 +119,27 @@ void hdf5_load_nd_dataset_helper(
file_id, dataset_name_, dims.data(), &class_, NULL);
CHECK_EQ(class_, H5T_FLOAT) << "Expected float or double data";
- blob.Reshape(
+ blob->Reshape(
dims[0],
(dims.size() > 1) ? dims[1] : 1,
(dims.size() > 2) ? dims[2] : 1,
- (dims.size() > 3) ? dims[3] : 1
- );
+ (dims.size() > 3) ? dims[3] : 1);
}
template <>
void hdf5_load_nd_dataset<float>(hid_t file_id, const char* dataset_name_,
- int min_dim, int max_dim, Blob<float>& blob) {
+ int min_dim, int max_dim, Blob<float>* blob) {
hdf5_load_nd_dataset_helper(file_id, dataset_name_, min_dim, max_dim, blob);
herr_t status = H5LTread_dataset_float(
- file_id, dataset_name_, blob.mutable_cpu_data());
+ file_id, dataset_name_, blob->mutable_cpu_data());
}
template <>
void hdf5_load_nd_dataset<double>(hid_t file_id, const char* dataset_name_,
- int min_dim, int max_dim, Blob<double>& blob) {
+ int min_dim, int max_dim, Blob<double>* blob) {
hdf5_load_nd_dataset_helper(file_id, dataset_name_, min_dim, max_dim, blob);
herr_t status = H5LTread_dataset_double(
- file_id, dataset_name_, blob.mutable_cpu_data());
+ file_id, dataset_name_, blob->mutable_cpu_data());
}
} // namespace caffe