Skip to content

Commit

Permalink
automagic upgrade for v1->v2
Browse files Browse the repository at this point in the history
  • Loading branch information
jeffdonahue committed Feb 5, 2015
1 parent bb5ba1b commit 2e6a82c
Show file tree
Hide file tree
Showing 6 changed files with 410 additions and 78 deletions.
25 changes: 20 additions & 5 deletions include/caffe/util/upgrade_proto.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,12 @@

namespace caffe {

// Return true iff the net is not the current version.
bool NetNeedsUpgrade(const NetParameter& net_param);

// Return true iff any layer contains parameters specified using
// deprecated V0LayerParameter.
bool NetNeedsUpgrade(const NetParameter& net_param);
bool NetNeedsV0ToV1Upgrade(const NetParameter& net_param);

// Perform all necessary transformations to upgrade a V0NetParameter into a
// NetParameter (including upgrading padding layers and LayerParameters).
Expand All @@ -23,9 +26,9 @@ bool UpgradeV0Net(const NetParameter& v0_net_param, NetParameter* net_param);
void UpgradeV0PaddingLayers(const NetParameter& param,
NetParameter* param_upgraded_pad);

// Upgrade a single V0LayerConnection to the new LayerParameter format.
bool UpgradeLayerParameter(const V1LayerParameter& v0_layer_connection,
V1LayerParameter* layer_param);
// Upgrade a single V0LayerConnection to the V1LayerParameter format.
bool UpgradeV0LayerParameter(const V1LayerParameter& v0_layer_connection,
V1LayerParameter* layer_param);

V1LayerParameter_LayerType UpgradeV0LayerType(const string& type);

Expand All @@ -36,13 +39,25 @@ bool NetNeedsDataUpgrade(const NetParameter& net_param);
// into a TransformationParameter.
void UpgradeNetDataTransformation(NetParameter* net_param);

// Return true iff the Net contains any layers specified as V1LayerParameters.
bool NetNeedsV1ToV2Upgrade(const NetParameter& net_param);

// Perform all necessary transformations to upgrade a NetParameter with
// deprecated V1LayerParameters.
bool UpgradeV1Net(const NetParameter& v1_net_param, NetParameter* net_param);

bool UpgradeV1LayerParameter(const V1LayerParameter& v1_layer_param,
LayerParameter* layer_param);

const char* UpgradeV1LayerType(const V1LayerParameter_LayerType type);

// Convert a NetParameter to NetParameterPrettyPrint used for dumping to
// proto text files.
void NetParameterToPrettyPrint(const NetParameter& param,
NetParameterPrettyPrint* pretty_param);

// Check for deprecations and upgrade the NetParameter as needed.
void UpgradeNetAsNeeded(NetParameter* param);
bool UpgradeNetAsNeeded(const string& param_file, NetParameter* param);

// Read parameters from a file into a NetParameter proto message.
void ReadNetParamsFromTextFileOrDie(const string& param_file,
Expand Down
13 changes: 7 additions & 6 deletions src/caffe/net.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -138,9 +138,10 @@ void Net<Dtype>::Init(const NetParameter& in_param) {
CHECK(param_size == num_param_blobs || param_size == 0)
<< "Incorrect param size: should be either 0 or the same as "
"the number of the layer's parameter blobs: " << num_param_blobs;
const int blob_share_mode_size = layer_param.blob_share_mode_size();
CHECK(blob_share_mode_size == num_param_blobs || blob_share_mode_size == 0)
<< "Incorrect blob_share_mode size: should be either 0 or the same as "
const int param_share_mode_size = layer_param.param_share_mode_size();
CHECK(param_share_mode_size == num_param_blobs ||
param_share_mode_size == 0)
<< "Incorrect param_share_mode size: should be either 0 or the same as "
"the number of the layer's parameter blobs: " << num_param_blobs;
for (int param_id = 0; param_id < num_param_blobs; ++param_id) {
AppendParam(param, layer_id, param_id);
Expand Down Expand Up @@ -441,9 +442,9 @@ void Net<Dtype>::AppendParam(const NetParameter& param, const int layer_id,
Blob<Dtype>* this_blob = layers_[layer_id]->blobs()[param_id].get();
Blob<Dtype>* owner_blob =
layers_[owner_layer_id]->blobs()[owner_param_id].get();
const int blob_share_mode_size = layer_param.blob_share_mode_size();
if (blob_share_mode_size > param_id &&
(layer_param.blob_share_mode(param_id) ==
const int param_share_mode_size = layer_param.param_share_mode_size();
if (param_share_mode_size > param_id &&
(layer_param.param_share_mode(param_id) ==
LayerParameter_DimCheckMode_PERMISSIVE)) {
// Permissive dimension checking -- only check counts are the same.
CHECK_EQ(this_blob->count(), owner_blob->count())
Expand Down
105 changes: 54 additions & 51 deletions src/caffe/proto/caffe.proto
Original file line number Diff line number Diff line change
Expand Up @@ -218,77 +218,80 @@ message NetStateRule {
//
// LayerParameter next available ID: 43 (last added: loss_param)
message LayerParameter {
repeated string bottom = 2; // the name of the bottom blobs
repeated string top = 3; // the name of the top blobs
optional string name = 4; // the layer name

// Rules controlling whether and when a layer is included in the network,
// based on the current NetState. You may specify a non-zero number of rules
// to include OR exclude, but not both. If no include or exclude rules are
// specified, the layer is always included. If the current NetState meets
// ANY (i.e., one or more) of the specified rules, the layer is
// included/excluded.
repeated NetStateRule include = 32;
repeated NetStateRule exclude = 33;
optional string name = 1; // the layer name
optional string type = 2; // the layer type
repeated string bottom = 3; // the name of each bottom blob
repeated string top = 4; // the name of each top blob

optional string type = 5; // the layer type from the enum above
// The amount of weight to assign each top blob in the objective.
// Each layer assigns a default value, usually of either 0 or 1,
// to each top blob.
repeated float loss_weight = 5;

// The blobs containing the numeric parameters of the layer
repeated BlobProto blobs = 6;

// The names of the parameter blobs -- useful for sharing parameters among
// layers (but never required).
repeated string param = 1001;
repeated string param = 7;

// Whether to require shared weights to have the same shape, or just the same
// count -- defaults to STRICT if unspecified.
repeated DimCheckMode blob_share_mode = 1002;
repeated DimCheckMode param_share_mode = 8;
enum DimCheckMode {
// STRICT (default) requires that num, channels, height, width each match.
STRICT = 0;
// PERMISSIVE requires only the count (num*channels*height*width) to match.
PERMISSIVE = 1;
}

// The ratio that is multiplied on the global learning rate. If you want to
// set the learning ratio for one blob, you need to set it for all blobs.
repeated float blobs_lr = 7;
// The weight decay that is multiplied on the global weight decay.
repeated float weight_decay = 8;
repeated float blobs_lr = 9;

// The amount of weight to assign each top blob in the objective.
// Each layer assigns a default value, usually of either 0 or 1,
// to each top blob.
repeated float loss_weight = 35;
// The weight decay that is multiplied on the global weight decay.
repeated float weight_decay = 10;

optional AccuracyParameter accuracy_param = 27;
optional ArgMaxParameter argmax_param = 23;
optional ConcatParameter concat_param = 9;
optional ContrastiveLossParameter contrastive_loss_param = 40;
optional ConvolutionParameter convolution_param = 10;
optional DataParameter data_param = 11;
optional DropoutParameter dropout_param = 12;
optional DummyDataParameter dummy_data_param = 26;
optional EltwiseParameter eltwise_param = 24;
optional ExpParameter exp_param = 41;
optional HDF5DataParameter hdf5_data_param = 13;
optional HDF5OutputParameter hdf5_output_param = 14;
optional HingeLossParameter hinge_loss_param = 29;
optional ImageDataParameter image_data_param = 15;
optional InfogainLossParameter infogain_loss_param = 16;
optional InnerProductParameter inner_product_param = 17;
optional LRNParameter lrn_param = 18;
optional MemoryDataParameter memory_data_param = 22;
optional MVNParameter mvn_param = 34;
optional PoolingParameter pooling_param = 19;
optional PowerParameter power_param = 21;
optional ReLUParameter relu_param = 30;
optional SigmoidParameter sigmoid_param = 38;
optional SoftmaxParameter softmax_param = 39;
optional SliceParameter slice_param = 31;
optional TanHParameter tanh_param = 37;
optional ThresholdParameter threshold_param = 25;
optional WindowDataParameter window_data_param = 20;
// Rules controlling whether and when a layer is included in the network,
// based on the current NetState. You may specify a non-zero number of rules
// to include OR exclude, but not both. If no include or exclude rules are
// specified, the layer is always included. If the current NetState meets
// ANY (i.e., one or more) of the specified rules, the layer is
// included/excluded.
repeated NetStateRule include = 11;
repeated NetStateRule exclude = 12;

// Parameters for data pre-processing.
optional TransformationParameter transform_param = 36;
optional TransformationParameter transform_param = 13;

optional AccuracyParameter accuracy_param = 14;
optional ArgMaxParameter argmax_param = 15;
optional ConcatParameter concat_param = 16;
optional ContrastiveLossParameter contrastive_loss_param = 17;
optional ConvolutionParameter convolution_param = 18;
optional DataParameter data_param = 19;
optional DropoutParameter dropout_param = 20;
optional DummyDataParameter dummy_data_param = 21;
optional EltwiseParameter eltwise_param = 22;
optional ExpParameter exp_param = 23;
optional HDF5DataParameter hdf5_data_param = 24;
optional HDF5OutputParameter hdf5_output_param = 25;
optional HingeLossParameter hinge_loss_param = 26;
optional ImageDataParameter image_data_param = 27;
optional InfogainLossParameter infogain_loss_param = 28;
optional InnerProductParameter inner_product_param = 29;
optional LRNParameter lrn_param = 30;
optional MemoryDataParameter memory_data_param = 31;
optional MVNParameter mvn_param = 32;
optional PoolingParameter pooling_param = 33;
optional PowerParameter power_param = 34;
optional ReLUParameter relu_param = 35;
optional SigmoidParameter sigmoid_param = 36;
optional SoftmaxParameter softmax_param = 37;
optional SliceParameter slice_param = 38;
optional TanHParameter tanh_param = 39;
optional ThresholdParameter threshold_param = 40;
optional WindowDataParameter window_data_param = 41;

// Parameters shared by loss layers.
optional LossParameter loss_param = 42;
Expand Down
Loading

0 comments on commit 2e6a82c

Please sign in to comment.