From 92b5f12bdcd1ad31a9de553b2146c28844a9ab3e Mon Sep 17 00:00:00 2001 From: SunAhong1993 Date: Tue, 25 Aug 2020 18:18:16 +0800 Subject: [PATCH] add setattr --- x2paddle/core/program.py | 5 +++ x2paddle/op_mapper/pytorch2paddle/prim.py | 33 ++++++++------- .../op_mapper/pytorch2paddle/prim2code.py | 5 +++ .../pytorch2paddle/pytorch_op_mapper.py | 40 ++++++++++++++++--- x2paddle/optimizer/fusion/__init__.py | 2 + x2paddle/optimizer/optimizer.py | 3 +- 6 files changed, 68 insertions(+), 20 deletions(-) diff --git a/x2paddle/core/program.py b/x2paddle/core/program.py index 75c4be5..ab76c91 100644 --- a/x2paddle/core/program.py +++ b/x2paddle/core/program.py @@ -101,6 +101,11 @@ class PaddleGraph(object): self.clear_edges() outputs_from_nodes = dict() for layer_id, layer in self.layers.items(): + # if "x5109" in layer.outputs or "x5110" in layer.outputs: + # print(layer.kernel) + # print(layer.inputs) + # print(layer.outputs) + # print(layer.attrs) for input_key, input_var in layer.inputs.items(): vs = input_var if not isinstance(vs, list): diff --git a/x2paddle/op_mapper/pytorch2paddle/prim.py b/x2paddle/op_mapper/pytorch2paddle/prim.py index 7661714..60dba68 100644 --- a/x2paddle/op_mapper/pytorch2paddle/prim.py +++ b/x2paddle/op_mapper/pytorch2paddle/prim.py @@ -44,7 +44,7 @@ def prim_GetAttr(mapper, graph, node): %7 (Tensor): 输入Tensor。 %27 (Tensor): 输入Tensor。 """ - output_name = mapper._get_outputs_name(node)[0] + current_node = node field_name_list = [node.s('name')] while True: input_node = list(node.inputs())[0].node() @@ -53,18 +53,16 @@ def prim_GetAttr(mapper, graph, node): node = input_node except Exception: break - if ".".join(field_name_list) in mapper.pytorch_params: - mapper.pytorch_params[output_name] = mapper.pytorch_params[".".join( - field_name_list)] - else: - part_script = mapper.script - for field_name in field_name_list: - if hasattr(part_script, field_name): - param = getattr(part_script, field_name) - if isinstance(param, torch.Tensor): - param = param.detach().numpy() - mapper.pytorch_params[output_name] = param - part_script = param + attr_name = ".".join(field_name_list) + output_name = mapper._get_outputs_name(current_node, attr_name)[0] + part_script = mapper.script + for field_name in field_name_list: + if hasattr(part_script, field_name): + param = getattr(part_script, field_name) + if isinstance(param, torch.Tensor): + param = param.detach().numpy() + mapper.pytorch_params[output_name] = param + part_script = param return [], [output_name] @@ -295,8 +293,15 @@ def prim_SetAttr(mapper, graph, node): field_name_list.append(node.s('name')) inputs_name, inputs_node = mapper._get_inputs_name(node) - param = {"Tensor": inputs_name[1]} + param = { + "Tensor": "self." + ".".join(field_name_list).replace(".", "_"), + "parent_layer_id": graph.parent_layer.id + } mapper.pytorch_params[".".join(field_name_list)] = param + graph.add_layer( + "prim.set_attr", + inputs={"input": inputs_name[1]}, + outputs=["self." + ".".join(field_name_list).replace(".", "_")]) return [], [output_name] diff --git a/x2paddle/op_mapper/pytorch2paddle/prim2code.py b/x2paddle/op_mapper/pytorch2paddle/prim2code.py index 5293d18..d30c3ee 100644 --- a/x2paddle/op_mapper/pytorch2paddle/prim2code.py +++ b/x2paddle/op_mapper/pytorch2paddle/prim2code.py @@ -208,6 +208,11 @@ def prim_select(layer, indent=1, init_func=[], forward_func=[]): forward_func.extend(gen_codes([line], indent=indent)) +def prim_set_attr(layer, indent=1, init_func=[], forward_func=[]): + line = "{} = {}".format(layer.outputs[0], get_value(layer, "input")) + forward_func.extend(gen_codes([line], indent=indent)) + + def prim_shape(layer, indent=1, init_func=[], forward_func=[]): line = "{} = {}.shape".format(layer.outputs[0], get_value(layer, "input")) forward_func.extend(gen_codes([line], indent=indent)) diff --git a/x2paddle/op_mapper/pytorch2paddle/pytorch_op_mapper.py b/x2paddle/op_mapper/pytorch2paddle/pytorch_op_mapper.py index ab51e73..95396eb 100644 --- a/x2paddle/op_mapper/pytorch2paddle/pytorch_op_mapper.py +++ b/x2paddle/op_mapper/pytorch2paddle/pytorch_op_mapper.py @@ -113,15 +113,19 @@ class PyTorchOpMapper(OpMapper): graph.set_parameters(self.paddle_params) return graph, graph_inputs - def _get_outputs_name(self, node): + def _get_outputs_name(self, node, attr_name=None): outputs_name = [] for output_ivalue in node.outputs(): - output_name = 'x' + str(self.output_index) script_unique_id = output_ivalue.unique() - if script_unique_id in self.outputs_info: - output_name = self.outputs_info[script_unique_id] + if attr_name is None: + output_name = 'x' + str(self.output_index) + if script_unique_id in self.outputs_info: + output_name = self.outputs_info[script_unique_id] + else: + output_name = attr_name.replace(".", "_") self.outputs_info[script_unique_id] = output_name self.output_index += 1 + outputs_name.append(output_name) # if或loop节点没有输出的情况 if len(list(node.outputs())) == 0: @@ -148,7 +152,33 @@ class PyTorchOpMapper(OpMapper): outputs=[output_name], value="params[{}]".format(string(output_name))) else: - if isinstance(param, dict) and "Tensor" in param: + if isinstance(param, dict) and "Tensor" in param and \ + "parent_layer_id" in param: + if graph.parent_layer is not None: + # 当某个param被2个控制流(if-else)赋值时,else不可以引用if中的赋值结果 + id1 = param["parent_layer_id"] + id2 = graph.parent_layer.id + id1_part = id1.split(".") + id2_part = id2.split(".") + if len(id1_part) >= len(id2_part): + for i in range(len(id1_part)): + if id1_part[i] == id2_part[i]: + continue + else: + if id1_part[i] == "0" and id2_part[ + i] == "1": + if add_dim: + param = param[np.newaxis, :] + self.paddle_params[output_name] = param + graph.add_layer( + "fluid.dygraph.base.to_variable", + inputs={}, + outputs=[output_name], + value="params[{}]".format( + string(output_name))) + node_outputs.append(output_name) + return + # 若if-else外,则可直接引用if-else中的赋值结果 graph.add_layer( "prim.constant", inputs={}, diff --git a/x2paddle/optimizer/fusion/__init__.py b/x2paddle/optimizer/fusion/__init__.py index d9b6967..543afeb 100644 --- a/x2paddle/optimizer/fusion/__init__.py +++ b/x2paddle/optimizer/fusion/__init__.py @@ -20,3 +20,5 @@ from .functional_adaptive_pool2d_fuser import FunctionalAdaptivePool2dFuser from .functional_adaptive_pool2d_fuse_pass import FunctionalAdaptivePool2dFusePass from .constant_fuser import ConstantFuser from .constant_fuse_pass import ConstantFusePass +from .batchnorm2d_fuser import BatchNorm2dFuser +from .batchnorm2d_fuse_pass import BatchNorm2dFusePass diff --git a/x2paddle/optimizer/optimizer.py b/x2paddle/optimizer/optimizer.py index 7f239ab..4e6b42a 100644 --- a/x2paddle/optimizer/optimizer.py +++ b/x2paddle/optimizer/optimizer.py @@ -20,7 +20,8 @@ class GraphOptimizer(object): def __init__(self): self.passes = [ "fc_fuse_pass", "nn_adaptive_pool2d_fuse_pass", - "functional_adaptive_pool2d_fuse_pass", "constant_fuse_pass" + "functional_adaptive_pool2d_fuse_pass", "batchnorm2d_fuse_pass", + "constant_fuse_pass" ] def optimize(self, graph): -- GitLab