summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorKai Li <kaili_kloud@163.com>2014-02-26 22:07:48 +0800
committerKai Li <kaili_kloud@163.com>2014-03-19 23:04:42 +0800
commit5bcdebdafbaba6e35a01f45b54ec6ee78dbb1ffa (patch)
tree0cfd53f6e9634d1f4a5ff1a56836d2850cc18f9b /tools
parent8e7153b4db7ae46e4e7a4ec579dc11f58b99cb2e (diff)
downloadcaffeonacl-5bcdebdafbaba6e35a01f45b54ec6ee78dbb1ffa.tar.gz
caffeonacl-5bcdebdafbaba6e35a01f45b54ec6ee78dbb1ffa.tar.bz2
caffeonacl-5bcdebdafbaba6e35a01f45b54ec6ee78dbb1ffa.zip
Fix cpplint errors for Net, its tests and feature related 3 examples
Diffstat (limited to 'tools')
-rw-r--r--tools/binarize_features.cpp24
-rw-r--r--tools/extract_features.cpp22
-rw-r--r--tools/retrieve_images.cpp38
3 files changed, 51 insertions, 33 deletions
diff --git a/tools/binarize_features.cpp b/tools/binarize_features.cpp
index 881755a9..e15e125f 100644
--- a/tools/binarize_features.cpp
+++ b/tools/binarize_features.cpp
@@ -1,8 +1,10 @@
// Copyright 2014 kloudkl@github
-#include <cmath> // for std::signbit
#include <cuda_runtime.h>
#include <google/protobuf/text_format.h>
+#include <cmath> // for std::signbit
+#include <string>
+#include <vector>
#include "caffe/blob.hpp"
#include "caffe/common.hpp"
@@ -11,7 +13,7 @@
#include "caffe/proto/caffe.pb.h"
#include "caffe/util/io.hpp"
-using namespace caffe;
+using namespace caffe; // NOLINT(build/namespaces)
template<typename Dtype>
void binarize(const vector<shared_ptr<Blob<Dtype> > >& feature_blob_vector,
@@ -31,8 +33,9 @@ int features_binarization_pipeline(int argc, char** argv) {
if (argc < num_required_args) {
LOG(ERROR)<<
"This program compresses real valued features into compact binary codes.\n"
- "Usage: demo_binarize_features real_valued_feature_prototxt feature_blob_name"
- " save_binarized_feature_binaryproto_file num_mini_batches [CPU/GPU] [DEVICE_ID=0]";
+ "Usage: demo_binarize_features real_valued_feature_prototxt"
+ " feature_blob_name save_binarized_feature_binaryproto_file"
+ " num_mini_batches [CPU/GPU] [DEVICE_ID=0]";
return 1;
}
int arg_pos = num_required_args;
@@ -57,7 +60,8 @@ int features_binarization_pipeline(int argc, char** argv) {
arg_pos = 0; // the name of the executable
- // Expected prototxt contains at least one data layer as the real valued features.
+ // Expected prototxt contains at least one data layer as the real valued
+ // features.
/*
layers {
layer {
@@ -79,8 +83,8 @@ int features_binarization_pipeline(int argc, char** argv) {
string feature_blob_name(argv[++arg_pos]);
CHECK(real_valued_feature_net->has_blob(feature_blob_name))
- << "Unknown feature blob name " << feature_blob_name << " in the network "
- << real_valued_feature_prototxt;
+ << "Unknown feature blob name " << feature_blob_name
+ << " in the network " << real_valued_feature_prototxt;
string save_binarized_feature_binaryproto_file(argv[++arg_pos]);
@@ -101,11 +105,13 @@ int features_binarization_pipeline(int argc, char** argv) {
BlobProto blob_proto;
feature_binary_codes->ToProto(&blob_proto);
WriteProtoToBinaryFile(blob_proto, save_binarized_feature_binaryproto_file);
- LOG(ERROR)<< "Successfully binarized " << feature_binary_codes->num() << " features!";
+ LOG(ERROR) << "Successfully binarized " << feature_binary_codes->num()
+ << " features!";
return 0;
}
-// http://scikit-learn.org/stable/modules/preprocessing.html#feature-binarization
+// http://scikit-learn.org/stable/modules/preprocessing.html
+// #feature-binarization
template<typename Dtype>
void binarize(const vector<shared_ptr<Blob<Dtype> > >& feature_blob_vector,
shared_ptr<Blob<Dtype> > binary_codes) {
diff --git a/tools/extract_features.cpp b/tools/extract_features.cpp
index 1902aad8..0766eea6 100644
--- a/tools/extract_features.cpp
+++ b/tools/extract_features.cpp
@@ -1,10 +1,12 @@
// Copyright 2014 kloudkl@github
-#include <stdio.h> // for snprintf
+#include <stdio.h> // for snprintf
#include <cuda_runtime.h>
#include <google/protobuf/text_format.h>
#include <leveldb/db.h>
#include <leveldb/write_batch.h>
+#include <string>
+#include <vector>
#include "caffe/blob.hpp"
#include "caffe/common.hpp"
@@ -13,7 +15,7 @@
#include "caffe/proto/caffe.pb.h"
#include "caffe/util/io.hpp"
-using namespace caffe;
+using namespace caffe; // NOLINT(build/namespaces)
template<typename Dtype>
int feature_extraction_pipeline(int argc, char** argv);
@@ -89,7 +91,6 @@ int feature_extraction_pipeline(int argc, char** argv) {
}
*/
NetParameter feature_extraction_net_param;
- ;
string feature_extraction_proto(argv[++arg_pos]);
ReadProtoFromTextFile(feature_extraction_proto,
&feature_extraction_net_param);
@@ -120,8 +121,8 @@ int feature_extraction_pipeline(int argc, char** argv) {
Datum datum;
leveldb::WriteBatch* batch = new leveldb::WriteBatch();
- const int max_key_str_length = 100;
- char key_str[max_key_str_length];
+ const int kMaxKeyStrLength = 100;
+ char key_str[kMaxKeyStrLength];
int num_bytes_of_binary_code = sizeof(Dtype);
vector<Blob<float>*> input_vec;
int image_index = 0;
@@ -138,18 +139,20 @@ int feature_extraction_pipeline(int argc, char** argv) {
datum.set_channels(1);
datum.clear_data();
datum.clear_float_data();
- feature_blob_data = feature_blob->mutable_cpu_data() + feature_blob->offset(n);
+ feature_blob_data = feature_blob->mutable_cpu_data() +
+ feature_blob->offset(n);
for (int d = 0; d < dim_features; ++d) {
datum.add_float_data(feature_blob_data[d]);
}
string value;
datum.SerializeToString(&value);
- snprintf(key_str, max_key_str_length, "%d", image_index);
+ snprintf(key_str, kMaxKeyStrLength, "%d", image_index);
batch->Put(string(key_str), value);
++image_index;
if (image_index % 1000 == 0) {
db->Write(leveldb::WriteOptions(), batch);
- LOG(ERROR)<< "Extracted features of " << image_index << " query images.";
+ LOG(ERROR)<< "Extracted features of " << image_index <<
+ " query images.";
delete batch;
batch = new leveldb::WriteBatch();
}
@@ -158,7 +161,8 @@ int feature_extraction_pipeline(int argc, char** argv) {
// write the last batch
if (image_index % 1000 != 0) {
db->Write(leveldb::WriteOptions(), batch);
- LOG(ERROR)<< "Extracted features of " << image_index << " query images.";
+ LOG(ERROR)<< "Extracted features of " << image_index <<
+ " query images.";
delete batch;
batch = new leveldb::WriteBatch();
}
diff --git a/tools/retrieve_images.cpp b/tools/retrieve_images.cpp
index f3399818..dddff691 100644
--- a/tools/retrieve_images.cpp
+++ b/tools/retrieve_images.cpp
@@ -1,9 +1,12 @@
// Copyright 2014 kloudkl@github
-#include <fstream> // for std::ofstream
-#include <queue> // for std::priority_queue
#include <cuda_runtime.h>
#include <google/protobuf/text_format.h>
+#include <stdio.h>
+#include <queue> // for std::priority_queue
+#include <string>
+#include <utility> // for pair
+#include <vector>
#include "caffe/blob.hpp"
#include "caffe/common.hpp"
@@ -13,7 +16,7 @@
#include "caffe/util/io.hpp"
#include "caffe/util/math_functions.hpp"
-using namespace caffe;
+using namespace caffe; // NOLINT(build/namespaces)
template<typename Dtype>
void similarity_search(
@@ -92,8 +95,8 @@ int image_retrieval_pipeline(int argc, char** argv) {
string save_retrieval_result_filename(argv[++arg_pos]);
LOG(ERROR)<< "Opening result file " << save_retrieval_result_filename;
- std::ofstream retrieval_result_ofs(save_retrieval_result_filename.c_str(),
- std::ofstream::out);
+ FILE * result_fileid = fopen(save_retrieval_result_filename.c_str(),
+ "w");
LOG(ERROR)<< "Retrieving images";
vector<vector<std::pair<int, int> > > retrieval_results;
@@ -104,16 +107,18 @@ int image_retrieval_pipeline(int argc, char** argv) {
&retrieval_results);
int num_results = retrieval_results.size();
for (int i = 0; i < num_results; ++i) {
- retrieval_result_ofs << query_image_index++;
+ fprintf(result_fileid, "%d", query_image_index++);
for (int j = 0; j < retrieval_results[i].size(); ++j) {
- retrieval_result_ofs << " " << retrieval_results[i][j].first << ":"
- << retrieval_results[i][j].second;
+ fprintf(result_fileid, " %d:%d", retrieval_results[i][j].first,
+ retrieval_results[i][j].second);
}
- retrieval_result_ofs << "\n";
+ fprintf(result_fileid, "\n");
}
-
- retrieval_result_ofs.close();
- LOG(ERROR)<< "Successfully retrieved similar images for " << num_results << " queries!";
+ if (result_fileid != NULL) {
+ fclose(result_fileid);
+ }
+ LOG(ERROR) << "Successfully retrieved similar images for " << num_results
+ << " queries!";
return 0;
}
@@ -134,7 +139,8 @@ void similarity_search(
int num_samples = sample_images_feature_blob->num();
int num_queries = query_binary_feature_blob->num();
int dim = query_binary_feature_blob->count() / num_queries;
- LOG(ERROR)<< "num_samples " << num_samples << ", num_queries " << num_queries << ", dim " << dim;
+ LOG(ERROR)<< "num_samples " << num_samples << ", num_queries " <<
+ num_queries << ", dim " << dim;
int hamming_dist;
int neighbor_index;
retrieval_results->resize(num_queries);
@@ -152,7 +158,8 @@ void similarity_search(
hamming_dist = caffe_hamming_distance(dim, query_data, sample_data);
if (results.size() < top_k_results) {
results.push(std::make_pair(-hamming_dist, k));
- } else if (-hamming_dist > results.top().first) { // smaller hamming dist, nearer neighbor
+ } else if (-hamming_dist > results.top().first) {
+ // smaller hamming dist, nearer neighbor
results.pop();
results.push(std::make_pair(-hamming_dist, k));
}
@@ -161,7 +168,8 @@ void similarity_search(
for (int k = results.size() - 1; k >= 0; --k) {
hamming_dist = -results.top().first;
neighbor_index = results.top().second;
- retrieval_results->at(i)[k] = std::make_pair<int, int>(neighbor_index, hamming_dist);
+ retrieval_results->at(i)[k] = std::make_pair(neighbor_index,
+ hamming_dist);
results.pop();
}
} // for (int i = 0; i < num_queries; ++i) {