summaryrefslogtreecommitdiff
path: root/src/caffe/util
diff options
context:
space:
mode:
authorEvan Shelhamer <shelhamer@imaginarynumber.net>2016-02-29 21:17:21 -0800
committerEvan Shelhamer <shelhamer@imaginarynumber.net>2016-02-29 21:24:29 -0800
commitff6c6e487534e4e738b301da2169bd369344b7f0 (patch)
treea4322ed5d9885a152a9cac2ad9d0df6769a89cbd /src/caffe/util
parenteffa9411ca270f32730400861c08dd2aa3f03ffa (diff)
downloadcaffeonacl-ff6c6e487534e4e738b301da2169bd369344b7f0.tar.gz
caffeonacl-ff6c6e487534e4e738b301da2169bd369344b7f0.tar.bz2
caffeonacl-ff6c6e487534e4e738b301da2169bd369344b7f0.zip
fix input field -> input layer net upgrade: only convert full defs
convert inputs in legacy definitions (prototxt), but simply strip inputs from legacy weights (caffemodel). fix #3750
Diffstat (limited to 'src/caffe/util')
-rw-r--r--src/caffe/util/upgrade_proto.cpp46
1 files changed, 26 insertions, 20 deletions
diff --git a/src/caffe/util/upgrade_proto.cpp b/src/caffe/util/upgrade_proto.cpp
index 775285f1..511a2dea 100644
--- a/src/caffe/util/upgrade_proto.cpp
+++ b/src/caffe/util/upgrade_proto.cpp
@@ -953,29 +953,35 @@ bool NetNeedsInputUpgrade(const NetParameter& net_param) {
}
void UpgradeNetInput(NetParameter* net_param) {
- LayerParameter* layer_param = net_param->add_layer();
- layer_param->set_name("input");
- layer_param->set_type("Input");
- InputParameter* input_param = layer_param->mutable_input_param();
+ // Collect inputs and convert to Input layer definitions.
+ // If the NetParameter holds an input alone, without shape/dim, then
+ // it's a legacy caffemodel and simply stripping the input field is enough.
bool has_shape = net_param->input_shape_size() > 0;
- // Convert input fields into a layer.
- for (int i = 0; i < net_param->input_size(); ++i) {
- layer_param->add_top(net_param->input(i));
- if (has_shape) {
- input_param->add_shape()->CopyFrom(net_param->input_shape(i));
- } else {
- // Turn legacy input dimensions into shape.
- BlobShape* shape = input_param->add_shape();
- int first_dim = i*4;
- int last_dim = first_dim + 4;
- for (int j = first_dim; j < last_dim; j++) {
- shape->add_dim(net_param->input_dim(j));
+ bool has_dim = net_param->input_dim_size() > 0;
+ if (has_shape || has_dim) {
+ LayerParameter* layer_param = net_param->add_layer();
+ layer_param->set_name("input");
+ layer_param->set_type("Input");
+ InputParameter* input_param = layer_param->mutable_input_param();
+ // Convert input fields into a layer.
+ for (int i = 0; i < net_param->input_size(); ++i) {
+ layer_param->add_top(net_param->input(i));
+ if (has_shape) {
+ input_param->add_shape()->CopyFrom(net_param->input_shape(i));
+ } else {
+ // Turn legacy input dimensions into shape.
+ BlobShape* shape = input_param->add_shape();
+ int first_dim = i*4;
+ int last_dim = first_dim + 4;
+ for (int j = first_dim; j < last_dim; j++) {
+ shape->add_dim(net_param->input_dim(j));
+ }
}
}
- }
- // Swap input layer to beginning of net to satisfy layer dependencies.
- for (int i = net_param->layer_size() - 1; i > 0; --i) {
- net_param->mutable_layer(i-1)->Swap(net_param->mutable_layer(i));
+ // Swap input layer to beginning of net to satisfy layer dependencies.
+ for (int i = net_param->layer_size() - 1; i > 0; --i) {
+ net_param->mutable_layer(i-1)->Swap(net_param->mutable_layer(i));
+ }
}
// Clear inputs.
net_param->clear_input();