Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Give back to layer what is layer's, and to factory what is factory's #1270

Merged
merged 1 commit into from
Oct 13, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
93 changes: 31 additions & 62 deletions src/caffe/layer_factory.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,6 @@

namespace caffe {

// GetLayer() defines the overall layer factory. The Get*Layer() functions
// define factories for layers with multiple computational engines.

// Get convolution layer according to engine.
template <typename Dtype>
Layer<Dtype>* GetConvolutionLayer(
Expand All @@ -32,6 +29,8 @@ Layer<Dtype>* GetConvolutionLayer(
}
}

REGISTER_LAYER_CREATOR(CONVOLUTION, GetConvolutionLayer);

// Get pooling layer according to engine.
template <typename Dtype>
Layer<Dtype>* GetPoolingLayer(const LayerParameter& param) {
Expand Down Expand Up @@ -60,6 +59,8 @@ Layer<Dtype>* GetPoolingLayer(const LayerParameter& param) {
}
}

REGISTER_LAYER_CREATOR(POOLING, GetPoolingLayer);

// Get relu layer according to engine.
template <typename Dtype>
Layer<Dtype>* GetReLULayer(const LayerParameter& param) {
Expand All @@ -81,6 +82,8 @@ Layer<Dtype>* GetReLULayer(const LayerParameter& param) {
}
}

REGISTER_LAYER_CREATOR(RELU, GetReLULayer);

// Get sigmoid layer according to engine.
template <typename Dtype>
Layer<Dtype>* GetSigmoidLayer(const LayerParameter& param) {
Expand All @@ -102,26 +105,7 @@ Layer<Dtype>* GetSigmoidLayer(const LayerParameter& param) {
}
}

// Get tanh layer according to engine.
template <typename Dtype>
Layer<Dtype>* GetTanHLayer(const LayerParameter& param) {
TanHParameter_Engine engine = param.tanh_param().engine();
if (engine == TanHParameter_Engine_DEFAULT) {
engine = TanHParameter_Engine_CAFFE;
#ifdef USE_CUDNN
engine = TanHParameter_Engine_CUDNN;
#endif
}
if (engine == TanHParameter_Engine_CAFFE) {
return new TanHLayer<Dtype>(param);
#ifdef USE_CUDNN
} else if (engine == TanHParameter_Engine_CUDNN) {
return new CuDNNTanHLayer<Dtype>(param);
#endif
} else {
LOG(FATAL) << "Layer " << param.name() << " has unknown engine.";
}
}
REGISTER_LAYER_CREATOR(SIGMOID, GetSigmoidLayer);

// Get softmax layer according to engine.
template <typename Dtype>
Expand All @@ -144,46 +128,31 @@ Layer<Dtype>* GetSoftmaxLayer(const LayerParameter& param) {
}
}

// Layers that have a specific creator function.
REGISTER_LAYER_CREATOR(CONVOLUTION, GetConvolutionLayer);
REGISTER_LAYER_CREATOR(POOLING, GetPoolingLayer);
REGISTER_LAYER_CREATOR(RELU, GetReLULayer);
REGISTER_LAYER_CREATOR(SIGMOID, GetSigmoidLayer);
REGISTER_LAYER_CREATOR(SOFTMAX, GetSoftmaxLayer);
REGISTER_LAYER_CREATOR(TANH, GetTanHLayer);

// Layers that use their constructor as their default creator.
REGISTER_LAYER_CLASS(ACCURACY, AccuracyLayer);
REGISTER_LAYER_CLASS(ABSVAL, AbsValLayer);
REGISTER_LAYER_CLASS(ARGMAX, ArgMaxLayer);
REGISTER_LAYER_CLASS(BNLL, BNLLLayer);
REGISTER_LAYER_CLASS(CONCAT, ConcatLayer);
REGISTER_LAYER_CLASS(CONTRASTIVE_LOSS, ContrastiveLossLayer);
REGISTER_LAYER_CLASS(DATA, DataLayer);
REGISTER_LAYER_CLASS(DROPOUT, DropoutLayer);
REGISTER_LAYER_CLASS(DUMMY_DATA, DummyDataLayer);
REGISTER_LAYER_CLASS(EUCLIDEAN_LOSS, EuclideanLossLayer);
REGISTER_LAYER_CLASS(ELTWISE, EltwiseLayer);
REGISTER_LAYER_CLASS(EXP, ExpLayer);
REGISTER_LAYER_CLASS(FLATTEN, FlattenLayer);
REGISTER_LAYER_CLASS(HDF5_DATA, HDF5DataLayer);
REGISTER_LAYER_CLASS(HDF5_OUTPUT, HDF5OutputLayer);
REGISTER_LAYER_CLASS(HINGE_LOSS, HingeLossLayer);
REGISTER_LAYER_CLASS(IMAGE_DATA, ImageDataLayer);
REGISTER_LAYER_CLASS(IM2COL, Im2colLayer);
REGISTER_LAYER_CLASS(INFOGAIN_LOSS, InfogainLossLayer);
REGISTER_LAYER_CLASS(INNER_PRODUCT, InnerProductLayer);
REGISTER_LAYER_CLASS(LRN, LRNLayer);
REGISTER_LAYER_CLASS(MEMORY_DATA, MemoryDataLayer);
REGISTER_LAYER_CLASS(MVN, MVNLayer);
REGISTER_LAYER_CLASS(MULTINOMIAL_LOGISTIC_LOSS, MultinomialLogisticLossLayer);
REGISTER_LAYER_CLASS(POWER, PowerLayer);
REGISTER_LAYER_CLASS(SILENCE, SilenceLayer);
REGISTER_LAYER_CLASS(SIGMOID_CROSS_ENTROPY_LOSS, SigmoidCrossEntropyLossLayer);
REGISTER_LAYER_CLASS(SLICE, SliceLayer);
REGISTER_LAYER_CLASS(SOFTMAX_LOSS, SoftmaxWithLossLayer);
REGISTER_LAYER_CLASS(SPLIT, SplitLayer);
REGISTER_LAYER_CLASS(THRESHOLD, ThresholdLayer);
REGISTER_LAYER_CLASS(WINDOW_DATA, WindowDataLayer);
// Get tanh layer according to engine.
template <typename Dtype>
Layer<Dtype>* GetTanHLayer(const LayerParameter& param) {
TanHParameter_Engine engine = param.tanh_param().engine();
if (engine == TanHParameter_Engine_DEFAULT) {
engine = TanHParameter_Engine_CAFFE;
#ifdef USE_CUDNN
engine = TanHParameter_Engine_CUDNN;
#endif
}
if (engine == TanHParameter_Engine_CAFFE) {
return new TanHLayer<Dtype>(param);
#ifdef USE_CUDNN
} else if (engine == TanHParameter_Engine_CUDNN) {
return new CuDNNTanHLayer<Dtype>(param);
#endif
} else {
LOG(FATAL) << "Layer " << param.name() << " has unknown engine.";
}
}

REGISTER_LAYER_CREATOR(TANH, GetTanHLayer);

// Layers that use their constructor as their default creator should be
// registered in their corresponding cpp files. Do not registere them here.
} // namespace caffe
3 changes: 1 addition & 2 deletions src/caffe/layers/absval_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,5 @@ STUB_GPU(AbsValLayer);
#endif

INSTANTIATE_CLASS(AbsValLayer);


REGISTER_LAYER_CLASS(ABSVAL, AbsValLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/accuracy_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -64,5 +64,5 @@ void AccuracyLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
}

INSTANTIATE_CLASS(AccuracyLayer);

REGISTER_LAYER_CLASS(ACCURACY, AccuracyLayer);
} // namespace caffe
1 change: 1 addition & 0 deletions src/caffe/layers/argmax_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -58,5 +58,6 @@ void ArgMaxLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
}

INSTANTIATE_CLASS(ArgMaxLayer);
REGISTER_LAYER_CLASS(ARGMAX, ArgMaxLayer);

} // namespace caffe
3 changes: 1 addition & 2 deletions src/caffe/layers/bnll_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,5 @@ STUB_GPU(BNLLLayer);
#endif

INSTANTIATE_CLASS(BNLLLayer);


REGISTER_LAYER_CLASS(BNLL, BNLLLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/concat_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -105,5 +105,5 @@ STUB_GPU(ConcatLayer);
#endif

INSTANTIATE_CLASS(ConcatLayer);

REGISTER_LAYER_CLASS(CONCAT, ConcatLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/contrastive_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -97,5 +97,5 @@ STUB_GPU(ContrastiveLossLayer);
#endif

INSTANTIATE_CLASS(ContrastiveLossLayer);

REGISTER_LAYER_CLASS(CONTRASTIVE_LOSS, ContrastiveLossLayer);
} // namespace caffe
1 change: 0 additions & 1 deletion src/caffe/layers/conv_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -267,5 +267,4 @@ STUB_GPU(ConvolutionLayer);
#endif

INSTANTIATE_CLASS(ConvolutionLayer);

} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/data_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -204,5 +204,5 @@ void DataLayer<Dtype>::InternalThreadEntry() {
}

INSTANTIATE_CLASS(DataLayer);

REGISTER_LAYER_CLASS(DATA, DataLayer);
} // namespace caffe
3 changes: 1 addition & 2 deletions src/caffe/layers/dropout_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,5 @@ STUB_GPU(DropoutLayer);
#endif

INSTANTIATE_CLASS(DropoutLayer);


REGISTER_LAYER_CLASS(DROPOUT, DropoutLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/dummy_data_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -93,5 +93,5 @@ void DummyDataLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
}

INSTANTIATE_CLASS(DummyDataLayer);

REGISTER_LAYER_CLASS(DUMMY_DATA, DummyDataLayer);
} // namespace caffe
3 changes: 1 addition & 2 deletions src/caffe/layers/eltwise_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,5 @@ STUB_GPU(EltwiseLayer);
#endif

INSTANTIATE_CLASS(EltwiseLayer);


REGISTER_LAYER_CLASS(ELTWISE, EltwiseLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/euclidean_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -54,5 +54,5 @@ STUB_GPU(EuclideanLossLayer);
#endif

INSTANTIATE_CLASS(EuclideanLossLayer);

REGISTER_LAYER_CLASS(EUCLIDEAN_LOSS, EuclideanLossLayer);
} // namespace caffe
3 changes: 1 addition & 2 deletions src/caffe/layers/exp_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,5 @@ STUB_GPU(ExpLayer);
#endif

INSTANTIATE_CLASS(ExpLayer);


REGISTER_LAYER_CLASS(EXP, ExpLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/flatten_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,5 +34,5 @@ STUB_GPU(FlattenLayer);
#endif

INSTANTIATE_CLASS(FlattenLayer);

REGISTER_LAYER_CLASS(FLATTEN, FlattenLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/hdf5_data_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -119,5 +119,5 @@ STUB_GPU_FORWARD(HDF5DataLayer, Forward);
#endif

INSTANTIATE_CLASS(HDF5DataLayer);

REGISTER_LAYER_CLASS(HDF5_DATA, HDF5DataLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/hdf5_output_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -70,5 +70,5 @@ STUB_GPU(HDF5OutputLayer);
#endif

INSTANTIATE_CLASS(HDF5OutputLayer);

REGISTER_LAYER_CLASS(HDF5_OUTPUT, HDF5OutputLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/hinge_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -77,5 +77,5 @@ void HingeLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
}

INSTANTIATE_CLASS(HingeLossLayer);

REGISTER_LAYER_CLASS(HINGE_LOSS, HingeLossLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/im2col_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -88,5 +88,5 @@ STUB_GPU(Im2colLayer);
#endif

INSTANTIATE_CLASS(Im2colLayer);

REGISTER_LAYER_CLASS(IM2COL, Im2colLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/image_data_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -131,5 +131,5 @@ void ImageDataLayer<Dtype>::InternalThreadEntry() {
}

INSTANTIATE_CLASS(ImageDataLayer);

REGISTER_LAYER_CLASS(IMAGE_DATA, ImageDataLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/infogain_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -106,5 +106,5 @@ void InfogainLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
}

INSTANTIATE_CLASS(InfogainLossLayer);

REGISTER_LAYER_CLASS(INFOGAIN_LOSS, InfogainLossLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/inner_product_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -104,5 +104,5 @@ STUB_GPU(InnerProductLayer);
#endif

INSTANTIATE_CLASS(InnerProductLayer);

REGISTER_LAYER_CLASS(INNER_PRODUCT, InnerProductLayer);
} // namespace caffe
3 changes: 1 addition & 2 deletions src/caffe/layers/lrn_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,5 @@ STUB_GPU_BACKWARD(LRNLayer, CrossChannelBackward);
#endif

INSTANTIATE_CLASS(LRNLayer);


REGISTER_LAYER_CLASS(LRN, LRNLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/memory_data_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -72,5 +72,5 @@ void MemoryDataLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
}

INSTANTIATE_CLASS(MemoryDataLayer);

REGISTER_LAYER_CLASS(MEMORY_DATA, MemoryDataLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/multinomial_logistic_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -62,5 +62,5 @@ void MultinomialLogisticLossLayer<Dtype>::Backward_cpu(
}

INSTANTIATE_CLASS(MultinomialLogisticLossLayer);

REGISTER_LAYER_CLASS(MULTINOMIAL_LOGISTIC_LOSS, MultinomialLogisticLossLayer);
} // namespace caffe
3 changes: 1 addition & 2 deletions src/caffe/layers/mvn_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,5 @@ STUB_GPU(MVNLayer);
#endif

INSTANTIATE_CLASS(MVNLayer);


REGISTER_LAYER_CLASS(MVN, MVNLayer);
} // namespace caffe
3 changes: 1 addition & 2 deletions src/caffe/layers/power_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,5 @@ STUB_GPU(PowerLayer);
#endif

INSTANTIATE_CLASS(PowerLayer);


REGISTER_LAYER_CLASS(POWER, PowerLayer);
} // namespace caffe
3 changes: 1 addition & 2 deletions src/caffe/layers/sigmoid_cross_entropy_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,5 @@ STUB_GPU(SigmoidCrossEntropyLossLayer);
#endif

INSTANTIATE_CLASS(SigmoidCrossEntropyLossLayer);


REGISTER_LAYER_CLASS(SIGMOID_CROSS_ENTROPY_LOSS, SigmoidCrossEntropyLossLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/silence_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -22,5 +22,5 @@ STUB_GPU(SilenceLayer);
#endif

INSTANTIATE_CLASS(SilenceLayer);

REGISTER_LAYER_CLASS(SILENCE, SilenceLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/slice_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -137,5 +137,5 @@ STUB_GPU(SliceLayer);
#endif

INSTANTIATE_CLASS(SliceLayer);

REGISTER_LAYER_CLASS(SLICE, SliceLayer);
} // namespace caffe
2 changes: 0 additions & 2 deletions src/caffe/layers/softmax_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,4 @@ STUB_GPU(SoftmaxLayer);
#endif

INSTANTIATE_CLASS(SoftmaxLayer);


} // namespace caffe
3 changes: 1 addition & 2 deletions src/caffe/layers/softmax_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,5 @@ STUB_GPU(SoftmaxWithLossLayer);
#endif

INSTANTIATE_CLASS(SoftmaxWithLossLayer);


REGISTER_LAYER_CLASS(SOFTMAX_LOSS, SoftmaxWithLossLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/split_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -56,5 +56,5 @@ STUB_GPU(SplitLayer);
#endif

INSTANTIATE_CLASS(SplitLayer);

REGISTER_LAYER_CLASS(SPLIT, SplitLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/threshold_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,5 +29,5 @@ STUB_GPU_FORWARD(ThresholdLayer, Forward);
#endif

INSTANTIATE_CLASS(ThresholdLayer);

REGISTER_LAYER_CLASS(THRESHOLD, ThresholdLayer);
} // namespace caffe
2 changes: 1 addition & 1 deletion src/caffe/layers/window_data_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -402,5 +402,5 @@ void WindowDataLayer<Dtype>::InternalThreadEntry() {
}

INSTANTIATE_CLASS(WindowDataLayer);

REGISTER_LAYER_CLASS(WINDOW_DATA, WindowDataLayer);
} // namespace caffe