diff --git a/src/caffe/layers/concat_layer.cpp b/src/caffe/layers/concat_layer.cpp index 4bbce133c51..8036bdab675 100644 --- a/src/caffe/layers/concat_layer.cpp +++ b/src/caffe/layers/concat_layer.cpp @@ -12,14 +12,17 @@ template void ConcatLayer::SetUp(const vector*>& bottom, vector*>* top) { CHECK_GT(bottom.size(), 1) << - "Concat Layer takes at least two blobs as input."; + "ConcatLayer takes at least two blobs as input."; CHECK_EQ(top->size(), 1) << - "Concat Layer takes a single blob as output."; + "ConcatLayer takes a single blob as output."; + concat_dim_ = this->layer_param_.concat_param().concat_dim(); - CHECK_GE(concat_dim_, 0) << "concat_dim should be >= 0"; + CHECK_GE(concat_dim_, 0) << + "concat_dim should be >= 0"; CHECK_LE(concat_dim_, 1) << "For now concat_dim <=1, it can only concat num and channels"; - // Intialize with the first blob + + // Initialize with the first blob. count_ = bottom[0]->count(); num_ = bottom[0]->num(); channels_ = bottom[0]->channels(); @@ -64,10 +67,7 @@ Dtype ConcatLayer::Forward_cpu(const vector*>& bottom, top_data+(*top)[0]->offset(n, offset_channel)); } offset_channel += bottom[i]->channels(); - } - } else { - LOG(FATAL) << "concat_dim along dim" << concat_dim_ << - " not implemented yet"; + } // concat_dim_ is guaranteed to be 0 or 1 by SetUp. } return Dtype(0.); } @@ -97,10 +97,7 @@ void ConcatLayer::Backward_cpu(const vector*>& top, } offset_channel += blob->channels(); } - } else { - LOG(FATAL) << "concat_dim along dim" << concat_dim_ << - " not implemented yet"; - } + } // concat_dim_ is guaranteed to be 0 or 1 by SetUp. } INSTANTIATE_CLASS(ConcatLayer);