From e98f6d909438c2876fd285a65d706c188b3d8486 Mon Sep 17 00:00:00 2001 From: SunAhong1993 Date: Thu, 4 Mar 2021 11:45:26 +0800 Subject: [PATCH] fix the caffe leaklyrelu and add aten_prelu --- .../dygraph/caffe2paddle/caffe_op_mapper.py | 2 +- .../op_mapper/dygraph/pytorch2paddle/aten.py | 36 +++++++++++++++++++ .../layer_code_generator.py | 3 +- 3 files changed, 39 insertions(+), 2 deletions(-) diff --git a/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py b/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py index 42c7098..dcecf0e 100644 --- a/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py +++ b/x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py @@ -571,7 +571,7 @@ class CaffeOpMapper(OpMapper): if params.HasField('negative_slope') and params.negative_slope != 0: negative_slope = float(params.negative_slope) - layer_attrs = {'alpha': negative_slope} + layer_attrs = {'negative_slope': negative_slope} self.paddle_graph.add_layer( "paddle.nn.LeakyReLU", inputs={"input": input.name}, diff --git a/x2paddle/op_mapper/dygraph/pytorch2paddle/aten.py b/x2paddle/op_mapper/dygraph/pytorch2paddle/aten.py index d6c6493..636fb41 100644 --- a/x2paddle/op_mapper/dygraph/pytorch2paddle/aten.py +++ b/x2paddle/op_mapper/dygraph/pytorch2paddle/aten.py @@ -3353,6 +3353,42 @@ def aten_pow(mapper, graph, node): return current_inputs, current_outputs +def aten_prelu(mapper, graph, node): + """ 构造prelu激活的PaddleLayer。 + + TorchScript示例: + %result.3 : aten::prelu(%input.150, %999) + 参数含义: + %result.3 (Tensor): 输出,prelu后的结果。 + %input.150 (Tensor): 需要prelu的Tensor。 + %999 (Tnsor): 权重。 + """ + scope_name = mapper.normalize_scope_name(node) + op_name = name_generator("relu", mapper.nn_name2id) + output_name = mapper._get_outputs_name(node)[0] + layer_outputs = [op_name, output_name] + layer_inputs = {} + inputs_name, inputs_node = mapper._get_inputs_name(node) + # 获取当前节点输出的list + current_outputs = [output_name] + # 处理输入0,即%result.150 + mapper._check_input(graph, inputs_node[0], inputs_name[0], current_outputs, scope_name) + layer_inputs["x"] = inputs_name[0] + # 处理输入1,即%999 + weight = mapper.pytorch_params[inputs_name[1]] + mapper.paddle_params[op_name + "._weight"] = weight + # 获取当前节点输入的list + current_inputs = list(layer_inputs.values()) + + graph.add_layer( + "paddle.nn.PReLU", + inputs=layer_inputs, + outputs=layer_outputs, + scope_name=scope_name, + num_parameters=weight.shape[0]) + return current_inputs, current_outputs + + def aten_relu(mapper, graph, node): """ 构造ReLU激活的PaddleLayer。 diff --git a/x2paddle/optimizer/pytorch_code_optimizer/layer_code_generator.py b/x2paddle/optimizer/pytorch_code_optimizer/layer_code_generator.py index a4c368c..ab06035 100644 --- a/x2paddle/optimizer/pytorch_code_optimizer/layer_code_generator.py +++ b/x2paddle/optimizer/pytorch_code_optimizer/layer_code_generator.py @@ -27,6 +27,7 @@ NN_KERNEL_NAME = {"paddle.nn.BatchNorm": "bn", "paddle.nn.Linear": "linear", "paddle.nn.Conv2DTranspose": "conv", "paddle.nn.LSTM": "lstm", + "paddle.nn.PReLU": "prelu", "paddle.nn.ReLU": "relu", "paddle.nn.ReLU6": "relu", "paddle.nn.Softmax": "softmax", @@ -41,7 +42,7 @@ NN_KERNEL_NAME = {"paddle.nn.BatchNorm": "bn", "paddle.nn.GELU": "gelu", "paddle.nn.Hardtanh": "tanh", "paddle.nn.LeakyReLU": "leakly_relu"} -NN_KERNEL_WITH_PARAMS = list(NN_KERNEL_NAME.keys())[:7] +NN_KERNEL_WITH_PARAMS = list(NN_KERNEL_NAME.keys())[:8] def rename_layers(layers, param_tree=None, is_rename_module=False): """ 对子模块的输入输出等进行重命名。 -- GitLab