From 071ecd5a7fc66e759b6def586765e6c9a657a807 Mon Sep 17 00:00:00 2001 From: wjj19950828 Date: Thu, 4 Aug 2022 17:16:45 +0800 Subject: [PATCH] fixed pad bug --- x2paddle/op_mapper/pytorch2paddle/aten.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/x2paddle/op_mapper/pytorch2paddle/aten.py b/x2paddle/op_mapper/pytorch2paddle/aten.py index a5dcae4..8153790 100755 --- a/x2paddle/op_mapper/pytorch2paddle/aten.py +++ b/x2paddle/op_mapper/pytorch2paddle/aten.py @@ -1221,18 +1221,16 @@ def aten_constant_pad_nd(mapper, graph, node): layer_attrs["value"] = mapper.attrs[inputs_name[2]] if padding_attr is not None: + padding_attr.reverse() layer_inputs["x"] = inputs_name[0] kernel_name = "paddle.nn.functional.pad" if len(padding_attr) == 2: layer_attrs["pad"] = [0, 0, 0, 0, 0, 0] + padding_attr elif len(padding_attr) == 4: - layer_inputs["x"] = inputs_name[0] layer_attrs["pad"] = [0, 0, 0, 0] + padding_attr elif len(padding_attr) == 6: - layer_inputs["x"] = inputs_name[0] layer_attrs["pad"] = [0, 0] + padding_attr else: - layer_inputs["x"] = inputs_name[0] layer_attrs["pad"] = padding_attr graph.add_layer( kernel_name, -- GitLab