未验证 提交 f4061000 编写于 作者: R Riccardo 提交者: GitHub

Add support for ReLU6 op in CAFFE (#621)

Add support for Clip op in CAFFE
Co-authored-by: Nriccardo <rgall@sertiscorp.com>
上级 9a06864a
...@@ -30,6 +30,8 @@ WORKDIR $CAFFE_ROOT ...@@ -30,6 +30,8 @@ WORKDIR $CAFFE_ROOT
# FIXME: use ARG instead of ENV once DockerHub supports this # FIXME: use ARG instead of ENV once DockerHub supports this
# https://github.com/docker/hub-feedback/issues/460 # https://github.com/docker/hub-feedback/issues/460
ENV CLONE_TAG=1.0 ENV CLONE_TAG=1.0
# To enable Clip operation in CAFFE during validation, it's needed to set CLONE_TAG=master
# ENV CLONE_TAG=master
# ImportError in system pip wrappers after an upgrade. # ImportError in system pip wrappers after an upgrade.
# https://github.com/pypa/pip/issues/5599 # https://github.com/pypa/pip/issues/5599
......
...@@ -437,7 +437,7 @@ message ParamSpec { ...@@ -437,7 +437,7 @@ message ParamSpec {
// NOTE // NOTE
// Update the next available ID when you add a new LayerParameter field. // Update the next available ID when you add a new LayerParameter field.
// //
// LayerParameter next available layer-specific ID: 147 (last added: recurrent_param) // LayerParameter next available layer-specific ID: 149 (last added: clip_param)
message LayerParameter { message LayerParameter {
optional string name = 1; // the layer name optional string name = 1; // the layer name
optional string type = 2; // the layer type optional string type = 2; // the layer type
...@@ -494,6 +494,7 @@ message LayerParameter { ...@@ -494,6 +494,7 @@ message LayerParameter {
optional ArgMaxParameter argmax_param = 103; optional ArgMaxParameter argmax_param = 103;
optional BatchNormParameter batch_norm_param = 139; optional BatchNormParameter batch_norm_param = 139;
optional BiasParameter bias_param = 141; optional BiasParameter bias_param = 141;
optional ClipParameter clip_param = 148;
optional ConcatParameter concat_param = 104; optional ConcatParameter concat_param = 104;
optional ContrastiveLossParameter contrastive_loss_param = 105; optional ContrastiveLossParameter contrastive_loss_param = 105;
optional ConvolutionParameter convolution_param = 106; optional ConvolutionParameter convolution_param = 106;
...@@ -778,6 +779,12 @@ message ArgMaxParameter { ...@@ -778,6 +779,12 @@ message ArgMaxParameter {
optional int32 axis = 3; optional int32 axis = 3;
} }
// Message that stores parameters used by ClipLayer
message ClipParameter {
required float min = 1;
required float max = 2;
}
message ConcatParameter { message ConcatParameter {
// The axis along which to concatenate -- may be negative to index from the // The axis along which to concatenate -- may be negative to index from the
// end (e.g., -1 for the last axis). Other axes must have the // end (e.g., -1 for the last axis). Other axes must have the
......
...@@ -164,6 +164,7 @@ class CaffeConverter(base_converter.ConverterInterface): ...@@ -164,6 +164,7 @@ class CaffeConverter(base_converter.ConverterInterface):
'PReLU': ActivationType.PRELU, 'PReLU': ActivationType.PRELU,
'TanH': ActivationType.TANH, 'TanH': ActivationType.TANH,
'Sigmoid': ActivationType.SIGMOID, 'Sigmoid': ActivationType.SIGMOID,
'Clip': ActivationType.RELUX,
} }
def __init__(self, option, src_model_file, src_weight_file): def __init__(self, option, src_model_file, src_weight_file):
...@@ -177,6 +178,7 @@ class CaffeConverter(base_converter.ConverterInterface): ...@@ -177,6 +178,7 @@ class CaffeConverter(base_converter.ConverterInterface):
'TanH': self.convert_activation, 'TanH': self.convert_activation,
'Sigmoid': self.convert_activation, 'Sigmoid': self.convert_activation,
'PReLU': self.convert_activation, 'PReLU': self.convert_activation,
'Clip': self.convert_activation,
'Pooling': self.convert_pooling, 'Pooling': self.convert_pooling,
'Concat': self.convert_concat, 'Concat': self.convert_concat,
'Slice': self.convert_slice, 'Slice': self.convert_slice,
...@@ -506,6 +508,13 @@ class CaffeConverter(base_converter.ConverterInterface): ...@@ -506,6 +508,13 @@ class CaffeConverter(base_converter.ConverterInterface):
type_arg.s = six.b(ActivationType.LEAKYRELU.name) type_arg.s = six.b(ActivationType.LEAKYRELU.name)
if caffe_op.type == 'Clip':
mace_check(caffe_op.layer.clip_param.min == 0,
"Mace only supports min == 0 Clip op")
limit_arg = op.arg.add()
limit_arg.name = MaceKeyword.mace_activation_max_limit_str
limit_arg.f = caffe_op.layer.clip_param.max
def convert_folded_batchnorm(self, caffe_op): def convert_folded_batchnorm(self, caffe_op):
op = self.convert_general_op(caffe_op) op = self.convert_general_op(caffe_op)
op.type = MaceOp.BatchNorm.name op.type = MaceOp.BatchNorm.name
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册