From e8df9aec95a9f36b8843c560d9bec5c772508cff Mon Sep 17 00:00:00 2001 From: SunAhong1993 Date: Wed, 2 Dec 2020 11:38:49 +0800 Subject: [PATCH] remove --- x2paddle/decoder/pytorch_decoder.py | 16 ++--- .../op_mapper/dygraph/pytorch2paddle/aten.py | 60 ------------------- 2 files changed, 8 insertions(+), 68 deletions(-) diff --git a/x2paddle/decoder/pytorch_decoder.py b/x2paddle/decoder/pytorch_decoder.py index b4ff69d..43281d1 100644 --- a/x2paddle/decoder/pytorch_decoder.py +++ b/x2paddle/decoder/pytorch_decoder.py @@ -21,14 +21,14 @@ import numpy as np class Decoder(object): def _optimize_graph(self, graph): torch._C._jit_pass_constant_propagation(graph) -# torch._C._jit_pass_dce(graph) -# torch._C._jit_pass_lint(graph) -# torch._C._jit_pass_peephole(graph) -# torch._C._jit_pass_lint(graph) -# torch._C._jit_pass_dce(graph) -# torch._C._jit_pass_lint(graph) -# torch._C._jit_pass_canonicalize(graph) -# torch._C._jit_pass_lint(graph) + torch._C._jit_pass_dce(graph) + torch._C._jit_pass_lint(graph) + torch._C._jit_pass_peephole(graph) + torch._C._jit_pass_lint(graph) + torch._C._jit_pass_dce(graph) + torch._C._jit_pass_lint(graph) + torch._C._jit_pass_canonicalize(graph) + torch._C._jit_pass_lint(graph) torch._C._jit_pass_constant_propagation(graph) return graph diff --git a/x2paddle/op_mapper/dygraph/pytorch2paddle/aten.py b/x2paddle/op_mapper/dygraph/pytorch2paddle/aten.py index 35bdc32..bb09402 100644 --- a/x2paddle/op_mapper/dygraph/pytorch2paddle/aten.py +++ b/x2paddle/op_mapper/dygraph/pytorch2paddle/aten.py @@ -1589,66 +1589,6 @@ def aten_expand(mapper, graph, node): outputs=layer_outputs, scope_name=scope_name, **layer_attrs) - -# graph.add_layer( -# "prim.type", -# inputs={"input": inputs_name[0]}, -# outputs=[inputs_name[0] + "_type"], -# scope_name=scope_name) -# graph.add_layer( -# "prim.str", -# inputs={"input": inputs_name[0] + "_type"}, -# outputs=[inputs_name[0] + "_type"], -# scope_name=scope_name) -# graph.add_layer( -# "prim.eq", -# inputs={"x": inputs_name[0] + "_type"}, -# outputs=[inputs_name[0] + "_cond"], -# scope_name=scope_name, -# y=string("VarType.BOOL")) -# graph.add_layer( -# "prim.if", {'input': inputs_name[0] + "_cond"}, -# outputs=[inputs_name[0] + "_if1", inputs_name[1] + "_var"], -# scope_name=scope_name) -# if_layer = graph.layers[list(graph.layers.keys())[-1]] -# block = PaddleGraph(parent_layer=if_layer, graph_type="dygraph") -# block.add_layer( -# "paddle.cast", -# inputs={"x": inputs_name[0]}, -# outputs=[inputs_name[0]], -# scope_name=scope_name, -# dtype=string("int64")) -# block.add_layer( -# "paddle.zeros", -# inputs={"shape": inputs_name[1]}, -# outputs=[inputs_name[1] + "_var"], -# scope_name=scope_name, -# dtype=string("int64")) -# if_layer.add_block(block) -# block = PaddleGraph(parent_layer=if_layer, graph_type="dygraph") -# block.add_layer( -# "prim.type", -# inputs={"input": inputs_name[0]}, -# outputs=[inputs_name[0] + "_type"], -# scope_name=scope_name) -# block.add_layer( -# "paddle.zeros", -# inputs={"shape": inputs_name[1]}, -# outputs=[inputs_name[1] + "_var"], -# scope_name=scope_name, -# dtype=inputs_name[0] + "_type") -# if_layer.add_block(block) -# if_layer.inputs["input-0"] = inputs_name[0] -# if_layer.inputs["input-1"] = inputs_name[1] - -# layer_inputs["y"] = inputs_name[1] + "_var" -# current_outputs.append(inputs_name[1] + "_var") -# # 获取当前节点输入的list -# current_inputs = list(layer_inputs.values()) -# current_inputs.append(inputs_name[1]) - -# graph.add_layer( -# "paddle.expand_as", inputs=layer_inputs, outputs=layer_outputs, scope_name=scope_name) return current_inputs, current_outputs -- GitLab