From a227ae2bbc3b80399e5d296d40899cbd40cfd4fd Mon Sep 17 00:00:00 2001 From: Jiabin Yang <360788950@qq.com> Date: Tue, 10 Jan 2023 10:23:31 +0800 Subject: [PATCH] Fix prim paddle c make (#49643) * proto type of composite grad in paddle * proto type of composite grad in paddle * refactor composite api with phi * fix compile error * support static graph code-gen for squeeze op * generate static graph code of unsqueeze * refine op name * fix compile error * add extra output in op_compat * remove debug log * fix clang compile error * support prim switch flag * support prim switch flag * fix dygraph error * merge develop * add code_gen * add necessary files without codegen * fix code_gen bug * add deps * modify igmnore * add ignore * delete std cout * add composite logic for backward.py * add tanh first order grad composite * support enable_prim flag for static graph * throw expection when both GrapOpMaker and GradCompOpMaker not been registered * reorganize the directory of prim api tests * fix windows error * add eager_utils * add eager_utils * modify code gen * add composite parse * add unittest for get_grad_op_desc * code optimize * fix static test on windows * support generate static graph code for imag and real op * fix windows compile error in test_static_prim * merge develop * disable test eager in inference * prim code gen * disable eager compile in inference * rm other file * rm gitignore file * code_style * add eager test * code_style * merge develop * remove useless files * modify static test * support bool flag from singlton * merge develop * recover git ignore * fix conflict * recover git ignore for generated op * fix test compile error * remove some tests * add python test * fix some name issue * add composite code gen * modify backward yaml * fix static composite grad maker code gen * remove addtional files * add some static funcs unit test * fix some bugs * fix composite grad maker register code gen * optimize some functions * remove duplicated cmake * fix cmake and codegen problem Co-authored-by: zyfncg Co-authored-by: wangruting Co-authored-by: cxxly Co-authored-by: charles-hit Co-authored-by: xiaoguoguo626807 <100397923+xiaoguoguo626807@users.noreply.github.com> --- .../auto_code_generator/generator/codegen_utils.py | 13 +++++++++---- .../auto_code_generator/generator/eager_gen.py | 10 +++++----- paddle/fluid/operators/generator/CMakeLists.txt | 7 ------- 3 files changed, 14 insertions(+), 16 deletions(-) diff --git a/paddle/fluid/eager/auto_code_generator/generator/codegen_utils.py b/paddle/fluid/eager/auto_code_generator/generator/codegen_utils.py index bf10763e2f..9caa759a25 100644 --- a/paddle/fluid/eager/auto_code_generator/generator/codegen_utils.py +++ b/paddle/fluid/eager/auto_code_generator/generator/codegen_utils.py @@ -411,11 +411,14 @@ def ParseYamlCompositeInfo(string): pattern = fr'{fname}{wspace}\({wspace}{fargs}{wspace}\)' m = re.search(pattern, string) - composite_fun_info = [] - composite_fun_info.append(m.group(1)) + composite_fun_info = {} + composite_fun_info.update({"name": m.group(1)}) func_args = m.group(2).split(",") for fun_arg in func_args: - composite_fun_info.append(fun_arg.strip()) + if "args" in composite_fun_info: + composite_fun_info["args"].append(fun_arg.strip()) + else: + composite_fun_info.update({"args": [fun_arg.strip()]}) return composite_fun_info @@ -455,7 +458,9 @@ class FunctionGeneratorBase: # Special Op Attributes self.optional_inputs = [] # [name, ...] self.no_need_buffers = [] # [name, ...] - self.composite_func_info = [] # [func_name, input_name, ...] + self.composite_func_info = ( + {} + ) # {name: func_name, args: [input_name, ...]} self.intermediate_outputs = [] # [name, ...] self.forward_inplace_map = {} # {name : name, ...} diff --git a/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py b/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py index e03529bbe0..414270a544 100644 --- a/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py +++ b/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py @@ -876,7 +876,7 @@ class DygraphFunctionGeneratorBase(FunctionGeneratorBase): backward_attrs_list = self.backward_attrs_list optional_inputs = self.optional_inputs is_composite_grad_api = ( - False if self.composite_func_info == [] else True + False if self.composite_func_info == {} else True ) # Pass Stop Gradient Args @@ -1836,7 +1836,7 @@ class DygraphNodeGenerator(DygraphFunctionGeneratorBase): self.grad_api_contents, self.forward_apis_dict ) is_composite_grad_api = ( - False if self.composite_func_info == [] else True + False if self.composite_func_info == {} else True ) if next_node_generator is not None: @@ -1970,7 +1970,7 @@ class DygraphNodeGenerator(DygraphFunctionGeneratorBase): forward_api_name = self.forward_api_name backward_api_name = self.backward_api_name composite_grad_api_name = ( - self.composite_func_info[0] if is_composite_grad_api else None + self.composite_func_info["name"] if is_composite_grad_api else None ) backward_forward_inputs_map = self.backward_forward_inputs_map backward_grad_inputs_map = self.backward_grad_inputs_map @@ -2257,10 +2257,10 @@ class DygraphNodeGenerator(DygraphFunctionGeneratorBase): grad_function_call_str = f""" if (paddle::prim::PrimCommonUtils::IsPrimEnabled()) {{ {indent}{composite_grad_api_namespace}{composite_grad_api_name}{composite_template_name}({composite_grad_api_args_str}); - VLOG(4) << paddle::string::Sprintf("composite api %s is called" , "{composite_grad_api_name}"); + VLOG(4) << "Composite api {composite_grad_api_name} is called "; }}else{{ {indent}{grad_api_namespace}{backward_api_name}({grad_api_args_str}); - VLOG(4) << paddle::string::Sprintf("origin api %s is called" , "{backward_api_name}"); + VLOG(4) << "Fused api {backward_api_name} is called "; }} """ else: diff --git a/paddle/fluid/operators/generator/CMakeLists.txt b/paddle/fluid/operators/generator/CMakeLists.txt index 759a0fbaae..62c11faada 100644 --- a/paddle/fluid/operators/generator/CMakeLists.txt +++ b/paddle/fluid/operators/generator/CMakeLists.txt @@ -156,13 +156,6 @@ set(generated_static_files "${generated_static_argument_mapping_path}" "${generated_sparse_argument_mapping_path}") -set(generated_static_files - "${generated_op_path}" - "${generated_static_op_path}" - "${generated_sparse_ops_path}" - "${generated_argument_mapping_path}" - "${generated_static_argument_mapping_path}" - "${generated_sparse_argument_mapping_path}") foreach(generated_static_file ${generated_static_files}) if(EXISTS "${generated_static_file}.tmp" AND EXISTS "${generated_static_file}") -- GitLab