Skip to content

Commit

Permalink
Merge pull request BVLC#4785 from bwilbertz/slightly_relax_batch_norm…
Browse files Browse the repository at this point in the history
…_check

slightly relax batch norm check
  • Loading branch information
shelhamer authored Sep 30, 2016
2 parents 2c34393 + ce6ac83 commit a7f950b
Showing 1 changed file with 8 additions and 4 deletions.
12 changes: 8 additions & 4 deletions src/caffe/layers/batch_norm_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,15 @@ void BatchNormLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
}
// Mask statistics from optimization by setting local learning rates
// for mean, variance, and the bias correction to zero.
CHECK_EQ(this->layer_param_.param_size(), 0)
<< "Cannot configure batch normalization statistics as layer parameters.";
for (int i = 0; i < this->blobs_.size(); ++i) {
ParamSpec* fixed_param_spec = this->layer_param_.add_param();
fixed_param_spec->set_lr_mult(0.);
if (this->layer_param_.param_size() == i) {
ParamSpec* fixed_param_spec = this->layer_param_.add_param();
fixed_param_spec->set_lr_mult(0.f);
} else {
CHECK_EQ(this->layer_param_.param(i).lr_mult(), 0.f)
<< "Cannot configure batch normalization statistics as layer "
<< "parameters.";
}
}
}

Expand Down

0 comments on commit a7f950b

Please sign in to comment.