diff --git a/x2paddle/core/program.py b/x2paddle/core/program.py index e65c0825b520909bbd6f0b2134ea94554a8d6ec1..3d59ae95693ddb3361ee67747bd790fea204efc8 100644 --- a/x2paddle/core/program.py +++ b/x2paddle/core/program.py @@ -275,7 +275,7 @@ class PaddleGraph(object): def gen_dygraph_model(self, save_dir, jit_type=None): if jit_type == "trace": - from x2paddle.optimizer.code_optimizer import HierarchicalTree + from x2paddle.optimizer.pytorch_code_optimizer import HierarchicalTree hierarchical_tree = HierarchicalTree(self) for layer_id, layer in self.layers.items(): hierarchical_tree.insert(layer) @@ -283,7 +283,7 @@ class PaddleGraph(object): self.dump_dygraph_parameter(save_dir) else: if self.source_type == "pytorch": - from x2paddle.optimizer.code_optimizer import ModuleGraph + from x2paddle.optimizer.pytorch_code_optimizer import ModuleGraph module_graph = ModuleGraph(self) module_graph.save_source_files(save_dir) self.dump_dygraph_parameter(save_dir) @@ -347,7 +347,8 @@ class PaddleGraph(object): ], indent=1) for layer_id, layer in self.layers.items(): - remove_default_attrs(layer) + if layer.kernel.startswith("paddle"): + remove_default_attrs(layer.kernel, layer.attrs) edges_in = self.edges_in.get(layer_id, []) edges_out = self.edges_out.get(layer_id, []) if len(edges_in) == 0 and len(edges_out) == 0: @@ -546,7 +547,8 @@ class PaddleGraph(object): gen_head() for layer_id, layer in self.layers.items(): - remove_default_attrs(layer) + if layer.kernel.startswith("paddle"): + remove_default_attrs(layer.kernel, layer.attrs) if ("paddle.nn" in layer.kernel and "functional" not in layer.kernel ) or layer.kernel == "paddle.to_tensor" or \ layer.kernel.startswith("custom_layer") or \ diff --git a/x2paddle/core/util.py b/x2paddle/core/util.py index 38cf6ab2030437da21a1f61184792019f47a3e88..224abb2f1a31655a42d01c74fd316c172235316d 100644 --- a/x2paddle/core/util.py +++ b/x2paddle/core/util.py @@ -28,7 +28,7 @@ def name_generator(nn_name, nn_name2id): real_nn_name = nn_name + str(nn_name2id[nn_name]) return real_nn_name -def remove_default_attrs(layer, diff_attrs=None): +def remove_default_attrs(kernel, attrs): def get_default_args(func): signature = inspect.signature(func) return { @@ -36,10 +36,6 @@ def remove_default_attrs(layer, diff_attrs=None): for k, v in signature.parameters.items() if v.default is not inspect.Parameter.empty } - kernel = layer.kernel - attrs = layer.attrs - if ":" in kernel or "prim" in kernel or "module" in kernel: - return is_func = True if "paddle.nn" in kernel and "functional"not in kernel: is_func = False @@ -61,9 +57,4 @@ def remove_default_attrs(layer, diff_attrs=None): if len(set(attrs[default_k])) == 1: attrs[default_k] = attrs[default_k][0] if default_v == attrs[default_k]: - if diff_attrs is None: - attrs.pop(default_k) - else: - key_name = "{}_{}".format(layer.outputs[0], default_k) - if key_name not in diff_attrs: - attrs.pop(default_k) + attrs.pop(default_k) \ No newline at end of file diff --git a/x2paddle/optimizer/code_optimizer/__init__.py b/x2paddle/optimizer/pytorch_code_optimizer/__init__.py similarity index 78% rename from x2paddle/optimizer/code_optimizer/__init__.py rename to x2paddle/optimizer/pytorch_code_optimizer/__init__.py index 6aba8a196de57797d27af44e916c349a38002b4a..bcef73ab268de515374480743351ca0f6d96b0ff 100644 --- a/x2paddle/optimizer/code_optimizer/__init__.py +++ b/x2paddle/optimizer/pytorch_code_optimizer/__init__.py @@ -13,5 +13,5 @@ # limitations under the License. -from x2paddle.optimizer.code_optimizer.hierachical_tree import HierarchicalTree -from x2paddle.optimizer.code_optimizer.module_graph import ModuleGraph \ No newline at end of file +from x2paddle.optimizer.pytorch_code_optimizer.hierachical_tree import HierarchicalTree +from x2paddle.optimizer.pytorch_code_optimizer.module_graph import ModuleGraph \ No newline at end of file diff --git a/x2paddle/optimizer/code_optimizer/hierachical_tree.py b/x2paddle/optimizer/pytorch_code_optimizer/hierachical_tree.py similarity index 98% rename from x2paddle/optimizer/code_optimizer/hierachical_tree.py rename to x2paddle/optimizer/pytorch_code_optimizer/hierachical_tree.py index 6a69e52da477f62c83f843876e0b9c3e7bb65254..ee36d2d3748d3915c09b0e3683f38675ad2119c4 100644 --- a/x2paddle/optimizer/code_optimizer/hierachical_tree.py +++ b/x2paddle/optimizer/pytorch_code_optimizer/hierachical_tree.py @@ -18,10 +18,10 @@ import copy import os.path as osp from treelib import Tree from queue import Queue -from x2paddle.optimizer.code_optimizer.layer_code_generator import gen_layer_code, rename_layers, NN_KERNEL_WITH_PARAMS, NN_KERNEL_NAME -from x2paddle.optimizer.code_optimizer.subgraphs_union import distinguish_sequential, get_inputs_outputs +from x2paddle.optimizer.pytorch_code_optimizer.layer_code_generator import gen_layer_code, rename_layers, NN_KERNEL_WITH_PARAMS, NN_KERNEL_NAME +from x2paddle.optimizer.pytorch_code_optimizer.subgraphs_union import distinguish_sequential, get_inputs_outputs from x2paddle.core.program import PaddleLayer -from x2paddle.optimizer.code_optimizer.parameter_tree import PamareterNode, PamareterTree +from x2paddle.optimizer.pytorch_code_optimizer.parameter_tree import PamareterNode, PamareterTree SEPARATOR_IN_SCOPE = "/" diff --git a/x2paddle/optimizer/code_optimizer/layer_code_generator.py b/x2paddle/optimizer/pytorch_code_optimizer/layer_code_generator.py similarity index 95% rename from x2paddle/optimizer/code_optimizer/layer_code_generator.py rename to x2paddle/optimizer/pytorch_code_optimizer/layer_code_generator.py index bf3d6050fb9987bba25a65c2bcf3e47245e2d41c..dc89e7ff26e0538d9ca81e06a43a949fe377f418 100644 --- a/x2paddle/optimizer/code_optimizer/layer_code_generator.py +++ b/x2paddle/optimizer/pytorch_code_optimizer/layer_code_generator.py @@ -16,7 +16,7 @@ import copy import os.path as osp import x2paddle -from x2paddle.optimizer.code_optimizer.parameter_tree import PamareterNode +from x2paddle.optimizer.pytorch_code_optimizer.parameter_tree import PamareterNode from x2paddle.core.util import * @@ -128,7 +128,23 @@ def rename_layers(layers, param_tree=None, is_rename_module=False): return count rename_sub_layers(layers_cp, count) return layers_cp, nn_param_nodes, new_names - + + +def _update_attrs(layer, different_attrs): + if "module" in layer.kernel or "prim" in layer.kernel: + return + common_attrs = copy.deepcopy(layer.attrs) + special_attrs = dict() + for k, v in layer.attrs.items(): + if len(layer.outputs) < 1: + break + key_name = "{}_{}".format(layer.outputs[0], k) + if key_name in different_attrs: + common_attrs.pop(k) + special_attrs[k] = v + remove_default_attrs(layer.kernel, common_attrs) + common_attrs.update(special_attrs) + layer.attrs = common_attrs def gen_layer_code(graph, sub_layers, sub_layers_name, different_attrs=dict()): """ 根据sub_layers生成对应的Module代码。 @@ -224,7 +240,7 @@ def gen_layer_code(graph, sub_layers, sub_layers_name, different_attrs=dict()): outputs.append(layer.outputs[0]) no_output_count = 0 for i, (layer_id, layer) in enumerate(sub_layers.items()): - remove_default_attrs(layer, different_attrs) + _update_attrs(layer, different_attrs) if ("paddle.nn" in layer.kernel and "functional" not in layer.kernel) or \ layer.kernel.startswith("custom_layer"): line = "self.{}".format(layer.outputs[0]) diff --git a/x2paddle/optimizer/code_optimizer/module_graph.py b/x2paddle/optimizer/pytorch_code_optimizer/module_graph.py similarity index 98% rename from x2paddle/optimizer/code_optimizer/module_graph.py rename to x2paddle/optimizer/pytorch_code_optimizer/module_graph.py index c9086bb79485157a9d5ae53dc784ec30d1c47a4c..54e2cbd34dd0111d3cd4b91aeffb7a255201a829 100644 --- a/x2paddle/optimizer/code_optimizer/module_graph.py +++ b/x2paddle/optimizer/pytorch_code_optimizer/module_graph.py @@ -17,9 +17,9 @@ import copy import os import os.path as osp from x2paddle.core.program import PaddleLayer -from x2paddle.optimizer.code_optimizer.subgraphs_union import construct_attrs_table, get_inputs_outputs -from x2paddle.optimizer.code_optimizer.layer_code_generator import gen_layer_code, rename_layers -from x2paddle.optimizer.code_optimizer.parameter_tree import PamareterNode, PamareterTree +from x2paddle.optimizer.pytorch_code_optimizer.subgraphs_union import construct_attrs_table, get_inputs_outputs +from x2paddle.optimizer.pytorch_code_optimizer.layer_code_generator import gen_layer_code, rename_layers +from x2paddle.optimizer.pytorch_code_optimizer.parameter_tree import PamareterNode, PamareterTree NoModuleStart = ["paddle.nn.ReLU"] diff --git a/x2paddle/optimizer/code_optimizer/parameter_tree.py b/x2paddle/optimizer/pytorch_code_optimizer/parameter_tree.py similarity index 100% rename from x2paddle/optimizer/code_optimizer/parameter_tree.py rename to x2paddle/optimizer/pytorch_code_optimizer/parameter_tree.py diff --git a/x2paddle/optimizer/code_optimizer/subgraphs_union.py b/x2paddle/optimizer/pytorch_code_optimizer/subgraphs_union.py similarity index 98% rename from x2paddle/optimizer/code_optimizer/subgraphs_union.py rename to x2paddle/optimizer/pytorch_code_optimizer/subgraphs_union.py index ee804eb3093caaaf99ef880acd2c65b85585714b..24779c38d7ae66ffe9af7faafd5076fef8341abb 100644 --- a/x2paddle/optimizer/code_optimizer/subgraphs_union.py +++ b/x2paddle/optimizer/pytorch_code_optimizer/subgraphs_union.py @@ -16,7 +16,7 @@ import copy import pandas as pd -from x2paddle.optimizer.code_optimizer.layer_code_generator import rename_layers +from x2paddle.optimizer.pytorch_code_optimizer.layer_code_generator import rename_layers def construct_attrs_table(sub_layers_list, node_name2sub_layers=None, module_name=None):