summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPrzemysław Dolata <snowball91b@gmail.com>2018-08-21 14:51:57 +0200
committerWook Song <wook16.song@samsung.com>2020-02-10 19:14:46 +0900
commitd1ffbeebd32d387dee5453af5b1a74eb3b9bfc6c (patch)
tree70a822b6c7279ee39855e23830a06dfcb66d5597
parentb8e927ac6fe4f7ada69358879d82e7aebbb4f0f7 (diff)
parent4b085652ea03d147f22ad11ebe6343568be3356f (diff)
downloadcaffe-d1ffbeebd32d387dee5453af5b1a74eb3b9bfc6c.tar.gz
caffe-d1ffbeebd32d387dee5453af5b1a74eb3b9bfc6c.tar.bz2
caffe-d1ffbeebd32d387dee5453af5b1a74eb3b9bfc6c.zip
Merge pull request #6461 from open-cv/patch_1
fix typos and add passing by reference
-rw-r--r--cmake/Modules/FindMKL.cmake2
-rw-r--r--include/caffe/net.hpp6
-rw-r--r--include/caffe/solver.hpp4
-rw-r--r--include/caffe/util/signal_handler.h2
-rw-r--r--python/caffe/_caffe.cpp2
-rw-r--r--src/caffe/layers/pooling_layer.cpp2
-rw-r--r--src/caffe/net.cpp8
-rw-r--r--src/caffe/proto/caffe.proto6
-rw-r--r--src/caffe/solver.cpp6
-rw-r--r--src/caffe/util/signal_handler.cpp2
10 files changed, 20 insertions, 20 deletions
diff --git a/cmake/Modules/FindMKL.cmake b/cmake/Modules/FindMKL.cmake
index 5ab93b2d..ef0c3bf1 100644
--- a/cmake/Modules/FindMKL.cmake
+++ b/cmake/Modules/FindMKL.cmake
@@ -9,7 +9,7 @@
# This module defines the following variables:
#
# MKL_FOUND : True mkl is found
-# MKL_INCLUDE_DIR : unclude directory
+# MKL_INCLUDE_DIR : include directory
# MKL_LIBRARIES : the libraries to link against.
diff --git a/include/caffe/net.hpp b/include/caffe/net.hpp
index d3c9306e..143d5d28 100644
--- a/include/caffe/net.hpp
+++ b/include/caffe/net.hpp
@@ -111,9 +111,9 @@ class Net {
* another Net.
*/
void CopyTrainedLayersFrom(const NetParameter& param);
- void CopyTrainedLayersFrom(const string trained_filename);
- void CopyTrainedLayersFromBinaryProto(const string trained_filename);
- void CopyTrainedLayersFromHDF5(const string trained_filename);
+ void CopyTrainedLayersFrom(const string& trained_filename);
+ void CopyTrainedLayersFromBinaryProto(const string& trained_filename);
+ void CopyTrainedLayersFromHDF5(const string& trained_filename);
/// @brief Writes the net to a proto.
void ToProto(NetParameter* param, bool write_diff = false) const;
/// @brief Writes the net to an HDF5 file.
diff --git a/include/caffe/solver.hpp b/include/caffe/solver.hpp
index 75560f9f..7a0d7777 100644
--- a/include/caffe/solver.hpp
+++ b/include/caffe/solver.hpp
@@ -55,7 +55,7 @@ class Solver {
// The main entry of the solver function. In default, iter will be zero. Pass
// in a non-zero iter number to resume training for a pre-trained net.
virtual void Solve(const char* resume_file = NULL);
- inline void Solve(const string resume_file) { Solve(resume_file.c_str()); }
+ inline void Solve(const string& resume_file) { Solve(resume_file.c_str()); }
void Step(int iters);
// The Restore method simply dispatches to one of the
// RestoreSolverStateFrom___ protected methods. You should implement these
@@ -98,7 +98,7 @@ class Solver {
virtual void ApplyUpdate() = 0;
protected:
- string SnapshotFilename(const string extension);
+ string SnapshotFilename(const string& extension);
string SnapshotToBinaryProto();
string SnapshotToHDF5();
// The test routine
diff --git a/include/caffe/util/signal_handler.h b/include/caffe/util/signal_handler.h
index fb84c65b..52463325 100644
--- a/include/caffe/util/signal_handler.h
+++ b/include/caffe/util/signal_handler.h
@@ -8,7 +8,7 @@ namespace caffe {
class SignalHandler {
public:
- // Contructor. Specify what action to take when a signal is received.
+ // Constructor. Specify what action to take when a signal is received.
SignalHandler(SolverAction::Enum SIGINT_action,
SolverAction::Enum SIGHUP_action);
~SignalHandler();
diff --git a/python/caffe/_caffe.cpp b/python/caffe/_caffe.cpp
index 9e7f6140..82bf21e6 100644
--- a/python/caffe/_caffe.cpp
+++ b/python/caffe/_caffe.cpp
@@ -416,7 +416,7 @@ BOOST_PYTHON_MODULE(_caffe) {
.def("reshape", &Net<Dtype>::Reshape)
.def("clear_param_diffs", &Net<Dtype>::ClearParamDiffs)
// The cast is to select a particular overload.
- .def("copy_from", static_cast<void (Net<Dtype>::*)(const string)>(
+ .def("copy_from", static_cast<void (Net<Dtype>::*)(const string&)>(
&Net<Dtype>::CopyTrainedLayersFrom))
.def("share_with", &Net<Dtype>::ShareTrainedLayersWith)
.add_property("_blob_loss_weights", bp::make_function(
diff --git a/src/caffe/layers/pooling_layer.cpp b/src/caffe/layers/pooling_layer.cpp
index f2a08857..32dc0482 100644
--- a/src/caffe/layers/pooling_layer.cpp
+++ b/src/caffe/layers/pooling_layer.cpp
@@ -145,7 +145,7 @@ void PoolingLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const int top_count = top[0]->count();
// We'll output the mask to top[1] if it's of size >1.
const bool use_top_mask = top.size() > 1;
- int* mask = NULL; // suppress warnings about uninitalized variables
+ int* mask = NULL; // suppress warnings about uninitialized variables
Dtype* top_mask = NULL;
// Different pooling methods. We explicitly do the switch outside the for
// loop to save time, although this results in more code.
diff --git a/src/caffe/net.cpp b/src/caffe/net.cpp
index 73adcc6d..5e844b03 100644
--- a/src/caffe/net.cpp
+++ b/src/caffe/net.cpp
@@ -166,7 +166,7 @@ void Net<Dtype>::Init(const NetParameter& in_param) {
// loss. We can skip backward computation for blobs that don't contribute
// to the loss.
// Also checks if all bottom blobs don't need backward computation (possible
- // because the skip_propagate_down param) and so we can skip bacward
+ // because the skip_propagate_down param) and so we can skip backward
// computation for the entire layer
set<string> blobs_under_loss;
set<string> blobs_skip_backp;
@@ -770,7 +770,7 @@ void Net<Dtype>::CopyTrainedLayersFrom(const NetParameter& param) {
}
template <typename Dtype>
-void Net<Dtype>::CopyTrainedLayersFrom(const string trained_filename) {
+void Net<Dtype>::CopyTrainedLayersFrom(const string& trained_filename) {
if (H5Fis_hdf5(trained_filename.c_str())) {
CopyTrainedLayersFromHDF5(trained_filename);
} else {
@@ -780,14 +780,14 @@ void Net<Dtype>::CopyTrainedLayersFrom(const string trained_filename) {
template <typename Dtype>
void Net<Dtype>::CopyTrainedLayersFromBinaryProto(
- const string trained_filename) {
+ const string& trained_filename) {
NetParameter param;
ReadNetParamsFromBinaryFileOrDie(trained_filename, &param);
CopyTrainedLayersFrom(param);
}
template <typename Dtype>
-void Net<Dtype>::CopyTrainedLayersFromHDF5(const string trained_filename) {
+void Net<Dtype>::CopyTrainedLayersFromHDF5(const string& trained_filename) {
#ifdef USE_HDF5
hid_t file_hid = H5Fopen(trained_filename.c_str(), H5F_ACC_RDONLY,
H5P_DEFAULT);
diff --git a/src/caffe/proto/caffe.proto b/src/caffe/proto/caffe.proto
index 5c235c6f..3dcad697 100644
--- a/src/caffe/proto/caffe.proto
+++ b/src/caffe/proto/caffe.proto
@@ -187,7 +187,7 @@ message SolverParameter {
optional int32 snapshot = 14 [default = 0]; // The snapshot interval
// The prefix for the snapshot.
- // If not set then is replaced by prototxt file path without extention.
+ // If not set then is replaced by prototxt file path without extension.
// If is set to directory then is augmented by prototxt file name
// without extention.
optional string snapshot_prefix = 15;
@@ -248,8 +248,8 @@ message SolverParameter {
// Path to caffemodel file(s) with pretrained weights to initialize finetuning.
// Tha same as command line --weights parameter for caffe train command.
- // If command line --weights parameter if specified, it has higher priority
- // and owerwrites this one(s).
+ // If command line --weights parameter is specified, it has higher priority
+ // and overwrites this one(s).
// If --snapshot command line parameter is specified, this one(s) are ignored.
// If several model files are expected, they can be listed in a one
// weights parameter separated by ',' (like in a command string) or
diff --git a/src/caffe/solver.cpp b/src/caffe/solver.cpp
index bf27beee..842312e0 100644
--- a/src/caffe/solver.cpp
+++ b/src/caffe/solver.cpp
@@ -78,7 +78,7 @@ template <typename Dtype>
void Solver<Dtype>::InitTrainNet() {
const int num_train_nets = param_.has_net() + param_.has_net_param() +
param_.has_train_net() + param_.has_train_net_param();
- const string& field_names = "net, net_param, train_net, train_net_param";
+ const string field_names = "net, net_param, train_net, train_net_param";
CHECK_GE(num_train_nets, 1) << "SolverParameter must specify a train net "
<< "using one of these fields: " << field_names;
CHECK_LE(num_train_nets, 1) << "SolverParameter must not contain more than "
@@ -447,13 +447,13 @@ void Solver<Dtype>::CheckSnapshotWritePermissions() {
} else {
LOG(FATAL) << "Cannot write to snapshot prefix '"
<< param_.snapshot_prefix() << "'. Make sure "
- << "that the directory exists and is writeable.";
+ << "that the directory exists and is writable.";
}
}
}
template <typename Dtype>
-string Solver<Dtype>::SnapshotFilename(const string extension) {
+string Solver<Dtype>::SnapshotFilename(const string& extension) {
return param_.snapshot_prefix() + "_iter_" + caffe::format_int(iter_)
+ extension;
}
diff --git a/src/caffe/util/signal_handler.cpp b/src/caffe/util/signal_handler.cpp
index 5d764ec5..9658fb39 100644
--- a/src/caffe/util/signal_handler.cpp
+++ b/src/caffe/util/signal_handler.cpp
@@ -48,7 +48,7 @@ namespace {
void UnhookHandler() {
if (already_hooked_up) {
struct sigaction sa;
- // Setup the sighub handler
+ // Setup the sighup handler
sa.sa_handler = SIG_DFL;
// Restart the system call, if at all possible
sa.sa_flags = SA_RESTART;