Skip to content

Commit

Permalink
Merge pull request #2482 from longjon/clean-message-comments
Browse files Browse the repository at this point in the history
Clean up redundant protobuf message comments
  • Loading branch information
shelhamer committed May 18, 2015
2 parents 352aef4 + dbd8319 commit 17e57ae
Showing 1 changed file with 3 additions and 30 deletions.
33 changes: 3 additions & 30 deletions src/caffe/proto/caffe.proto
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,9 @@ message LossParameter {
optional bool normalize = 2 [default = true];
}

// Message that stores parameters used by AccuracyLayer
// Messages that store parameters used by individual layer types follow, in
// alphabetical order.

message AccuracyParameter {
// When computing accuracy, count as correct by comparing the true label to
// the top k scoring classes. By default, only compare to the top scoring
Expand All @@ -386,14 +388,12 @@ message AccuracyParameter {
optional int32 ignore_label = 3;
}

// Message that stores parameters used by ArgMaxLayer
message ArgMaxParameter {
// If true produce pairs (argmax, maxval)
optional bool out_max_val = 1 [default = false];
optional uint32 top_k = 2 [default = 1];
}

// Message that stores parameters used by ConcatLayer
message ConcatParameter {
// The axis along which to concatenate -- may be negative to index from the
// end (e.g., -1 for the last axis). Other axes must have the
Expand All @@ -405,7 +405,6 @@ message ConcatParameter {
optional uint32 concat_dim = 1 [default = 1];
}

// Message that stores parameters used by ContrastiveLossLayer
message ContrastiveLossParameter {
// margin for dissimilar pair
optional float margin = 1 [default = 1.0];
Expand All @@ -418,7 +417,6 @@ message ContrastiveLossParameter {
optional bool legacy_version = 2 [default = false];
}

// Message that stores parameters used by ConvolutionLayer
message ConvolutionParameter {
optional uint32 num_output = 1; // The number of outputs for the layer
optional bool bias_term = 2 [default = true]; // whether to have bias terms
Expand All @@ -444,7 +442,6 @@ message ConvolutionParameter {
optional Engine engine = 15 [default = DEFAULT];
}

// Message that stores parameters used by DataLayer
message DataParameter {
enum DB {
LEVELDB = 0;
Expand Down Expand Up @@ -475,12 +472,10 @@ message DataParameter {
optional bool force_encoded_color = 9 [default = false];
}

// Message that stores parameters used by DropoutLayer
message DropoutParameter {
optional float dropout_ratio = 1 [default = 0.5]; // dropout ratio
}

// Message that stores parameters used by DummyDataLayer.
// DummyDataLayer fills any number of arbitrarily shaped blobs with random
// (or constant) data generated by "Fillers" (see "message FillerParameter").
message DummyDataParameter {
Expand All @@ -500,7 +495,6 @@ message DummyDataParameter {
repeated uint32 width = 5;
}

// Message that stores parameters used by EltwiseLayer
message EltwiseParameter {
enum EltwiseOp {
PROD = 0;
Expand All @@ -515,7 +509,6 @@ message EltwiseParameter {
optional bool stable_prod_grad = 3 [default = true];
}

// Message that stores parameters used by ExpLayer
message ExpParameter {
// ExpLayer computes outputs y = base ^ (shift + scale * x), for base > 0.
// Or if base is set to the default (-1), base is set to e,
Expand All @@ -525,7 +518,6 @@ message ExpParameter {
optional float shift = 3 [default = 0.0];
}

// Message that stores parameters used by HDF5DataLayer
message HDF5DataParameter {
// Specify the data source.
optional string source = 1;
Expand All @@ -540,7 +532,6 @@ message HDF5DataParameter {
optional bool shuffle = 3 [default = false];
}

// Message that stores parameters used by HDF5OutputLayer
message HDF5OutputParameter {
optional string file_name = 1;
}
Expand All @@ -554,7 +545,6 @@ message HingeLossParameter {
optional Norm norm = 1 [default = L1];
}

// Message that stores parameters used by ImageDataLayer
message ImageDataParameter {
// Specify the data source.
optional string source = 1;
Expand Down Expand Up @@ -586,13 +576,11 @@ message ImageDataParameter {
optional string root_folder = 12 [default = ""];
}

// Message that stores parameters InfogainLossLayer
message InfogainLossParameter {
// Specify the infogain matrix source.
optional string source = 1;
}

// Message that stores parameters used by InnerProductLayer
message InnerProductParameter {
optional uint32 num_output = 1; // The number of outputs for the layer
optional bool bias_term = 2 [default = true]; // whether to have bias terms
Expand All @@ -605,7 +593,6 @@ message InnerProductParameter {
optional int32 axis = 5 [default = 1];
}

// Message that stores parameters used by LRNLayer
message LRNParameter {
optional uint32 local_size = 1 [default = 5];
optional float alpha = 2 [default = 1.];
Expand All @@ -618,15 +605,13 @@ message LRNParameter {
optional float k = 5 [default = 1.];
}

// Message that stores parameters used by MemoryDataLayer
message MemoryDataParameter {
optional uint32 batch_size = 1;
optional uint32 channels = 2;
optional uint32 height = 3;
optional uint32 width = 4;
}

// Message that stores parameters used by MVNLayer
message MVNParameter {
// This parameter can be set to false to normalize mean only
optional bool normalize_variance = 1 [default = true];
Expand All @@ -638,7 +623,6 @@ message MVNParameter {
optional float eps = 3 [default = 1e-9];
}

// Message that stores parameters used by PoolingLayer
message PoolingParameter {
enum PoolMethod {
MAX = 0;
Expand Down Expand Up @@ -668,21 +652,18 @@ message PoolingParameter {
optional bool global_pooling = 12 [default = false];
}

// Message that stores parameters used by PowerLayer
message PowerParameter {
// PowerLayer computes outputs y = (shift + scale * x) ^ power.
optional float power = 1 [default = 1.0];
optional float scale = 2 [default = 1.0];
optional float shift = 3 [default = 0.0];
}

// Message that stores parameters used by PythonLayer
message PythonParameter {
optional string module = 1;
optional string layer = 2;
}

// Message that stores parameters used by ReLULayer
message ReLUParameter {
// Allow non-zero slope for negative inputs to speed up optimization
// Described in:
Expand All @@ -698,7 +679,6 @@ message ReLUParameter {
optional Engine engine = 2 [default = DEFAULT];
}

// Message that stores parameters used by ReshapeLayer
message ReshapeParameter {
// Specify the output dimensions. If some of the dimensions are set to 0,
// the corresponding dimension from the bottom layer is used (unchanged).
Expand Down Expand Up @@ -763,7 +743,6 @@ message ReshapeParameter {
optional int32 num_axes = 3 [default = -1];
}

// Message that stores parameters used by SigmoidLayer
message SigmoidParameter {
enum Engine {
DEFAULT = 0;
Expand All @@ -773,7 +752,6 @@ message SigmoidParameter {
optional Engine engine = 1 [default = DEFAULT];
}

// Message that stores parameters used by SliceLayer
message SliceParameter {
// The axis along which to slice -- may be negative to index from the end
// (e.g., -1 for the last axis).
Expand All @@ -800,7 +778,6 @@ message SoftmaxParameter {
optional int32 axis = 2 [default = 1];
}

// Message that stores parameters used by TanHLayer
message TanHParameter {
enum Engine {
DEFAULT = 0;
Expand All @@ -810,12 +787,10 @@ message TanHParameter {
optional Engine engine = 1 [default = DEFAULT];
}

// Message that stores parameters used by ThresholdLayer
message ThresholdParameter {
optional float threshold = 1 [default = 0]; // Strictly positive values
}

// Message that stores parameters used by WindowDataLayer
message WindowDataParameter {
// Specify the data source.
optional string source = 1;
Expand Down Expand Up @@ -849,7 +824,6 @@ message WindowDataParameter {
optional string root_folder = 13 [default = ""];
}

// Message that stores parameters used by SPPLayer
message SPPParameter {
enum PoolMethod {
MAX = 0;
Expand Down Expand Up @@ -1053,7 +1027,6 @@ message V0LayerParameter {
optional HDF5OutputParameter hdf5_output_param = 1001;
}

// Message that stores parameters used by PReLULayer
message PReLUParameter {
// Parametric ReLU described in K. He et al, Delving Deep into Rectifiers:
// Surpassing Human-Level Performance on ImageNet Classification, 2015.
Expand Down

0 comments on commit 17e57ae

Please sign in to comment.