From 5e645693996c8f63cd61c970bfd7d5e848b35f9f Mon Sep 17 00:00:00 2001 From: WJJ1995 Date: Fri, 5 Aug 2022 10:32:35 +0800 Subject: [PATCH] Fixed torch pad (#863) * add logical ops * add run_dynamic switch * add Or and Xor * add Compare ops * fixed compare bug * fixed pad bug * fixed pad bug --- x2paddle/op_mapper/pytorch2paddle/aten.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/x2paddle/op_mapper/pytorch2paddle/aten.py b/x2paddle/op_mapper/pytorch2paddle/aten.py index a5dcae4..0212f3d 100755 --- a/x2paddle/op_mapper/pytorch2paddle/aten.py +++ b/x2paddle/op_mapper/pytorch2paddle/aten.py @@ -1221,18 +1221,18 @@ def aten_constant_pad_nd(mapper, graph, node): layer_attrs["value"] = mapper.attrs[inputs_name[2]] if padding_attr is not None: + ## convert torch pad attr to paddle pad attr, eg:(x1,x2,x3,x4)->(x3,x4,x1,x2) + padding_attr = np.array(padding_attr).reshape((-1, 2)) + padding_attr = np.flip(padding_attr, axis=0).flatten().tolist() layer_inputs["x"] = inputs_name[0] kernel_name = "paddle.nn.functional.pad" if len(padding_attr) == 2: layer_attrs["pad"] = [0, 0, 0, 0, 0, 0] + padding_attr elif len(padding_attr) == 4: - layer_inputs["x"] = inputs_name[0] layer_attrs["pad"] = [0, 0, 0, 0] + padding_attr elif len(padding_attr) == 6: - layer_inputs["x"] = inputs_name[0] layer_attrs["pad"] = [0, 0] + padding_attr else: - layer_inputs["x"] = inputs_name[0] layer_attrs["pad"] = padding_attr graph.add_layer( kernel_name, -- GitLab