未验证 提交 e339d3c1 编写于 作者: F Feiyu Chan 提交者: GitHub

Add Code Generation for operators, op makers and argument mapping functions (#41772)

Add Code Generation for operators,  op makers and argument mapping functions (#41772)
上级 f6ee202f
set(api_yaml_path "${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/api.yaml,${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/sparse_api.yaml") set(api_yaml_path "${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/api.yaml,${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/new_api.yaml,${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/sparse_api.yaml")
set(backward_yaml_path "${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/backward.yaml,${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/sparse_bw_api.yaml") set(backward_yaml_path "${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/backward.yaml,${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/new_backward.yaml,${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/sparse_bw_api.yaml")
set(tmp_forwards_cc_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/forwards/tmp_dygraph_functions.cc") set(tmp_forwards_cc_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/forwards/tmp_dygraph_functions.cc")
set(tmp_forwards_h_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/forwards/tmp_dygraph_functions.h") set(tmp_forwards_h_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/forwards/tmp_dygraph_functions.h")
set(tmp_nodes_cc_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/backwards/tmp_nodes.cc") set(tmp_nodes_cc_path "${PADDLE_SOURCE_DIR}/paddle/fluid/eager/api/generated/eager_generated/backwards/tmp_nodes.cc")
......
...@@ -63,22 +63,24 @@ def AssertMessage(lhs_str, rhs_str): ...@@ -63,22 +63,24 @@ def AssertMessage(lhs_str, rhs_str):
def ReadFwdFile(filepath): def ReadFwdFile(filepath):
f = open(filepath, 'r') f = open(filepath, 'r')
# empty file loaded by yaml is None
contents = yaml.load(f, Loader=yaml.FullLoader) contents = yaml.load(f, Loader=yaml.FullLoader)
f.close() f.close()
return contents return contents if contents is not None else []
def ReadBwdFile(filepath): def ReadBwdFile(filepath):
f = open(filepath, 'r') f = open(filepath, 'r')
contents = yaml.load(f, Loader=yaml.FullLoader) contents = yaml.load(f, Loader=yaml.FullLoader)
ret = {} ret = {}
for content in contents: if contents is not None:
assert 'backward_api' in content.keys(), AssertMessage('backward_api', for content in contents:
content.keys()) assert 'backward_api' in content.keys(), AssertMessage(
if 'backward_api' in content.keys(): 'backward_api', content.keys())
api_name = content['backward_api'] if 'backward_api' in content.keys():
api_name = content['backward_api']
ret[api_name] = content
ret[api_name] = content
f.close() f.close()
return ret return ret
...@@ -207,6 +209,8 @@ def ParseYamlArgs(string): ...@@ -207,6 +209,8 @@ def ParseYamlArgs(string):
assert arg_type in yaml_types_mapping.keys( assert arg_type in yaml_types_mapping.keys(
), f"The argument type {arg_type} in yaml config is not supported in yaml_types_mapping." ), f"The argument type {arg_type} in yaml config is not supported in yaml_types_mapping."
if arg_type in ["DataType", "DataLayout"] and default_value is not None:
default_value = f"paddle::experimental::{default_value}"
arg_type = yaml_types_mapping[arg_type] arg_type = yaml_types_mapping[arg_type]
arg_name = RemoveSpecialSymbolsInName(arg_name) arg_name = RemoveSpecialSymbolsInName(arg_name)
......
...@@ -13,6 +13,7 @@ set(api_gen_base ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/api_base.py) ...@@ -13,6 +13,7 @@ set(api_gen_base ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/api_base.py)
# forward api file # forward api file
set(api_gen_file ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/api_gen.py) set(api_gen_file ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/api_gen.py)
set(api_yaml_file ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/api.yaml) set(api_yaml_file ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/api.yaml)
set(new_api_yaml_file ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/new_api.yaml)
set(api_header_file ${CMAKE_SOURCE_DIR}/paddle/phi/api/include/api.h) set(api_header_file ${CMAKE_SOURCE_DIR}/paddle/phi/api/include/api.h)
set(api_source_file ${CMAKE_SOURCE_DIR}/paddle/phi/api/lib/api.cc) set(api_source_file ${CMAKE_SOURCE_DIR}/paddle/phi/api/lib/api.cc)
set(api_header_file_tmp ${api_header_file}.tmp) set(api_header_file_tmp ${api_header_file}.tmp)
...@@ -21,6 +22,7 @@ set(api_source_file_tmp ${api_source_file}.tmp) ...@@ -21,6 +22,7 @@ set(api_source_file_tmp ${api_source_file}.tmp)
# backward api file # backward api file
set(bw_api_gen_file ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/backward_api_gen.py) set(bw_api_gen_file ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/backward_api_gen.py)
set(bw_api_yaml_file ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/backward.yaml) set(bw_api_yaml_file ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/backward.yaml)
set(new_bw_api_yaml_file ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/new_backward.yaml)
set(bw_api_header_file ${CMAKE_SOURCE_DIR}/paddle/phi/api/backward/backward_api.h) set(bw_api_header_file ${CMAKE_SOURCE_DIR}/paddle/phi/api/backward/backward_api.h)
set(bw_api_source_file ${CMAKE_SOURCE_DIR}/paddle/phi/api/lib/backward_api.cc) set(bw_api_source_file ${CMAKE_SOURCE_DIR}/paddle/phi/api/lib/backward_api.cc)
set(bw_api_header_file_tmp ${bw_api_header_file}.tmp) set(bw_api_header_file_tmp ${bw_api_header_file}.tmp)
...@@ -59,7 +61,6 @@ set(strings_api_source_file_tmp ${strings_api_source_file}.tmp) ...@@ -59,7 +61,6 @@ set(strings_api_source_file_tmp ${strings_api_source_file}.tmp)
# wrapped infermeta file # wrapped infermeta file
set(wrapped_infermeta_gen_file ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/wrapped_infermeta_gen.py) set(wrapped_infermeta_gen_file ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/wrapped_infermeta_gen.py)
set(api_yaml_file ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/api.yaml)
set(wrapped_infermeta_header_file ${CMAKE_SOURCE_DIR}/paddle/phi/infermeta/generated.h) set(wrapped_infermeta_header_file ${CMAKE_SOURCE_DIR}/paddle/phi/infermeta/generated.h)
set(wrapped_infermeta_source_file ${CMAKE_SOURCE_DIR}/paddle/phi/infermeta/generated.cc) set(wrapped_infermeta_source_file ${CMAKE_SOURCE_DIR}/paddle/phi/infermeta/generated.cc)
...@@ -67,12 +68,106 @@ if (NOT PYTHON_EXECUTABLE) ...@@ -67,12 +68,106 @@ if (NOT PYTHON_EXECUTABLE)
find_package(PythonInterp REQUIRED) find_package(PythonInterp REQUIRED)
endif() endif()
# install extra dependencies
execute_process(
COMMAND ${PYTHON_EXECUTABLE} -m pip install -U pyyaml jinja2
)
# parse apis
set(parsed_api_dir ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen/parsed_apis)
set(generated_op_path ${CMAKE_SOURCE_DIR}/paddle/fluid/operators/generated_op.cc)
set(generated_argument_mapping_path ${CMAKE_SOURCE_DIR}/paddle/phi/ops/compat/generated_sig.cc)
message("parse api yamls:
- ${api_yaml_file}
- ${new_api_yaml_file}
- ${bw_api_yaml_file}
- ${new_bw_api_yaml_file}")
execute_process(
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen
COMMAND ${CMAKE_COMMAND} -E make_directory ${parsed_api_dir}
COMMAND ${PYTHON_EXECUTABLE} parse_api.py
--api_yaml_path ./api.yaml
--output_path ./parsed_apis/api.parsed.yaml
COMMAND ${PYTHON_EXECUTABLE} parse_api.py
--api_yaml_path ./new_api.yaml
--output_path ./parsed_apis/new_api.parsed.yaml
COMMAND ${PYTHON_EXECUTABLE} parse_api.py
--api_yaml_path ./backward.yaml
--output_path ./parsed_apis/backward_api.parsed.yaml
--backward
COMMAND ${PYTHON_EXECUTABLE} parse_api.py
--api_yaml_path ./new_backward.yaml
--output_path ./parsed_apis/new_backward_api.parsed.yaml
--backward
RESULTS_VARIABLE _results
)
foreach(_result in ${_results})
if (${_result})
message(FATAL_ERROR "api yaml parsing failed, exiting.")
endif()
endforeach()
# validation of api yamls
message("validate api yaml:
- ${parsed_api_dir}/new_api.parsed.yaml
- ${parsed_api_dir}/new_backward_api.parsed.yaml")
execute_process(
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen
COMMAND ${PYTHON_EXECUTABLE} cross_validate.py
--forward_yaml_paths ./parsed_apis/api.parsed.yaml ./parsed_apis/new_api.parsed.yaml
--backward_yaml_paths ./parsed_apis/backward_api.parsed.yaml ./parsed_apis/new_backward_api.parsed.yaml
RESULT_VARIABLE _result
)
if (${_result})
message(FATAL_ERROR "api validation failed, exiting." )
endif()
# code generation for op, op makers, and argument mapping functions
message("create or remove auto-geneated operators: ${generated_op_path}.tmp
create or remove auto-geneated argument mappings: ${generated_argument_mapping_path}.tmp")
execute_process(
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/python/paddle/utils/code_gen
COMMAND ${PYTHON_EXECUTABLE} generate_op.py
--api_yaml_path ./parsed_apis/new_api.parsed.yaml
--backward_api_yaml_path ./parsed_apis/new_backward_api.parsed.yaml
--output_op_path "${generated_op_path}.tmp"
--output_arg_map_path "${generated_argument_mapping_path}.tmp"
RESULT_VARIABLE _result
)
if (${_result})
message(FATAL_ERROR "operator codegen failed, exiting." )
endif()
if(EXISTS "${generated_op_path}.tmp" AND EXISTS "${generated_op_path}")
execute_process(COMMAND ${CMAKE_COMMAND} -E copy_if_different "${generated_op_path}.tmp" "${generated_op_path}")
message("copy if different ${generated_op_path}.tmp ${generated_op_path}")
elseif(EXISTS "${generated_op_path}.tmp")
execute_process(COMMAND ${CMAKE_COMMAND} -E copy "${generated_op_path}.tmp" "${generated_op_path}")
message("copy ${generated_op_path}.tmp ${generated_op_path}")
else()
execute_process(COMMAND ${CMAKE_COMMAND} -E rm -f "${generated_op_path}")
message("remove ${generated_op_path}")
endif()
if(EXISTS "${generated_argument_mapping_path}.tmp" AND EXISTS "${generated_argument_mapping_path}")
execute_process(COMMAND ${CMAKE_COMMAND} -E copy_if_different "${generated_argument_mapping_path}.tmp" "${generated_argument_mapping_path}")
message("copy if different ${generated_argument_mapping_path}.tmp ${generated_argument_mapping_path}")
elseif(EXISTS "${generated_argument_mapping_path}.tmp")
execute_process(COMMAND ${CMAKE_COMMAND} -E copy "${generated_argument_mapping_path}.tmp" "${generated_argument_mapping_path}")
message("copy ${generated_argument_mapping_path}.tmp ${generated_argument_mapping_path}")
else()
execute_process(COMMAND ${CMAKE_COMMAND} -E rm -f "${generated_argument_mapping_path}")
message("remove ${generated_argument_mapping_path}")
endif()
# generate forward api # generate forward api
add_custom_command( add_custom_command(
OUTPUT ${api_header_file} ${api_source_file} OUTPUT ${api_header_file} ${api_source_file}
COMMAND ${PYTHON_EXECUTABLE} -m pip install pyyaml COMMAND ${PYTHON_EXECUTABLE} -m pip install pyyaml
COMMAND ${PYTHON_EXECUTABLE} ${api_gen_file} COMMAND ${PYTHON_EXECUTABLE} ${api_gen_file}
--api_yaml_path ${api_yaml_file} --api_yaml_path ${api_yaml_file} ${new_api_yaml_file}
--api_header_path ${api_header_file_tmp} --api_header_path ${api_header_file_tmp}
--api_header_path ${api_header_file_tmp} --api_header_path ${api_header_file_tmp}
--api_source_path ${api_source_file_tmp} --api_source_path ${api_source_file_tmp}
...@@ -86,7 +181,7 @@ add_custom_command( ...@@ -86,7 +181,7 @@ add_custom_command(
add_custom_command( add_custom_command(
OUTPUT ${bw_api_header_file} ${bw_api_source_file} ${bw_api_header_file_tmp} ${bw_api_source_file_tmp} OUTPUT ${bw_api_header_file} ${bw_api_source_file} ${bw_api_header_file_tmp} ${bw_api_source_file_tmp}
COMMAND ${PYTHON_EXECUTABLE} ${bw_api_gen_file} COMMAND ${PYTHON_EXECUTABLE} ${bw_api_gen_file}
--backward_yaml_path ${bw_api_yaml_file} --backward_yaml_path ${bw_api_yaml_file} ${new_bw_api_yaml_file}
--backward_header_path ${bw_api_header_file_tmp} --backward_header_path ${bw_api_header_file_tmp}
--backward_source_path ${bw_api_source_file_tmp} --backward_source_path ${bw_api_source_file_tmp}
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${bw_api_header_file_tmp} ${bw_api_header_file} COMMAND ${CMAKE_COMMAND} -E copy_if_different ${bw_api_header_file_tmp} ${bw_api_header_file}
...@@ -138,7 +233,7 @@ add_custom_command( ...@@ -138,7 +233,7 @@ add_custom_command(
add_custom_command( add_custom_command(
OUTPUT ${dygraph_api_header_file} ${dygraph_api_source_file} OUTPUT ${dygraph_api_header_file} ${dygraph_api_source_file}
COMMAND ${PYTHON_EXECUTABLE} ${im_api_gen_file} COMMAND ${PYTHON_EXECUTABLE} ${im_api_gen_file}
--api_yaml_path ${api_yaml_file} --api_yaml_path ${api_yaml_file} ${new_api_yaml_file}
--sparse_api_yaml_path ${sparse_api_yaml_file} --sparse_api_yaml_path ${sparse_api_yaml_file}
--dygraph_api_header_path ${dygraph_api_header_file_tmp} --dygraph_api_header_path ${dygraph_api_header_file_tmp}
--dygraph_api_source_path ${dygraph_api_source_file_tmp} --dygraph_api_source_path ${dygraph_api_source_file_tmp}
...@@ -151,7 +246,7 @@ add_custom_command( ...@@ -151,7 +246,7 @@ add_custom_command(
add_custom_command( add_custom_command(
OUTPUT ${wrapped_infermeta_header_file} ${wrapped_infermeta_source_file} OUTPUT ${wrapped_infermeta_header_file} ${wrapped_infermeta_source_file}
COMMAND ${PYTHON_EXECUTABLE} ${wrapped_infermeta_gen_file} COMMAND ${PYTHON_EXECUTABLE} ${wrapped_infermeta_gen_file}
--api_yaml_path ${api_yaml_file} --api_yaml_path ${api_yaml_file} ${new_api_yaml_file}
--wrapped_infermeta_header_path ${wrapped_infermeta_header_file} --wrapped_infermeta_header_path ${wrapped_infermeta_header_file}
--wrapped_infermeta_source_path ${wrapped_infermeta_source_file} --wrapped_infermeta_source_path ${wrapped_infermeta_source_file}
DEPENDS ${api_yaml_file} ${wrapped_infermeta_gen_file} ${api_gen_base} DEPENDS ${api_yaml_file} ${wrapped_infermeta_gen_file} ${api_gen_base}
......
...@@ -27,6 +27,7 @@ template <typename T> ...@@ -27,6 +27,7 @@ template <typename T>
class ScalarBase { class ScalarBase {
public: public:
// Constructor support implicit // Constructor support implicit
ScalarBase() : ScalarBase(0) {}
ScalarBase(double val) : dtype_(DataType::FLOAT64) { // NOLINT ScalarBase(double val) : dtype_(DataType::FLOAT64) { // NOLINT
data_.f64 = val; data_.f64 = val;
} }
......
...@@ -812,7 +812,7 @@ ...@@ -812,7 +812,7 @@
skip_transform : x skip_transform : x
- api : gather - api : gather
args : (Tensor x, Tensor index, Scalar axis=0) args : (Tensor x, Tensor index, Scalar(int) axis=0)
output : Tensor(out) output : Tensor(out)
infer_meta : infer_meta :
func : GatherInferMeta func : GatherInferMeta
...@@ -2021,7 +2021,7 @@ ...@@ -2021,7 +2021,7 @@
backward : subtract_grad backward : subtract_grad
- api : sum - api : sum
args : (Tensor x, int64_t[] dims={}, DataType out_dtype=paddle::experimental::DataType::UNDEFINED, bool keep_dim=false) args : (Tensor x, int64_t[] dims={}, DataType out_dtype=DataType::UNDEFINED, bool keep_dim=false)
output : Tensor(out) output : Tensor(out)
infer_meta : infer_meta :
func : SumInferMeta func : SumInferMeta
......
...@@ -222,9 +222,14 @@ namespace experimental { ...@@ -222,9 +222,14 @@ namespace experimental {
def generate_api(api_yaml_path, header_file_path, source_file_path): def generate_api(api_yaml_path, header_file_path, source_file_path):
apis = []
for each_api_yaml in api_yaml_path:
with open(each_api_yaml, 'r') as f:
api_list = yaml.load(f, Loader=yaml.FullLoader)
if api_list:
apis.extend(api_list)
with open(api_yaml_path, 'r') as f:
apis = yaml.load(f, Loader=yaml.FullLoader)
header_file = open(header_file_path, 'w') header_file = open(header_file_path, 'w')
source_file = open(source_file_path, 'w') source_file = open(source_file_path, 'w')
...@@ -259,6 +264,7 @@ def main(): ...@@ -259,6 +264,7 @@ def main():
parser.add_argument( parser.add_argument(
'--api_yaml_path', '--api_yaml_path',
help='path to api yaml file', help='path to api yaml file',
nargs='+',
default='python/paddle/utils/code_gen/api.yaml') default='python/paddle/utils/code_gen/api.yaml')
parser.add_argument( parser.add_argument(
......
...@@ -281,7 +281,6 @@ ...@@ -281,7 +281,6 @@
param : [grad_x_grad, axis] param : [grad_x_grad, axis]
kernel : kernel :
func : concat func : concat
no_need_buffer : x
- backward_api : concat_grad - backward_api : concat_grad
forward : concat (Tensor[] x, Scalar axis) -> Tensor(out) forward : concat (Tensor[] x, Scalar axis) -> Tensor(out)
...@@ -507,7 +506,6 @@ ...@@ -507,7 +506,6 @@
param : [out_grad] param : [out_grad]
kernel : kernel :
func : dropout_grad func : dropout_grad
optional : seed_tensor
- backward_api : eigh_grad - backward_api : eigh_grad
forward : eigh (Tensor x, str uplo) -> Tensor(out_w), Tensor(out_v) forward : eigh (Tensor x, str uplo) -> Tensor(out_w), Tensor(out_v)
...@@ -648,7 +646,6 @@ ...@@ -648,7 +646,6 @@
data_type: out_grad data_type: out_grad
backend: out_grad backend: out_grad
layout: out_grad layout: out_grad
no_need_buffer : x
- backward_api : flip_grad - backward_api : flip_grad
forward : flip (Tensor x, int[] axis) -> Tensor(out) forward : flip (Tensor x, int[] axis) -> Tensor(out)
...@@ -866,7 +863,6 @@ ...@@ -866,7 +863,6 @@
param : [out_grad] param : [out_grad]
kernel : kernel :
func : label_smooth_grad func : label_smooth_grad
optional : prior_dist
- backward_api : layer_norm_grad - backward_api : layer_norm_grad
forward : layer_norm (Tensor x, Tensor scale, Tensor bias, float epsilon, int begin_norm_axis, bool is_test) -> Tensor(out), Tensor(mean), Tensor(variance) forward : layer_norm (Tensor x, Tensor scale, Tensor bias, float epsilon, int begin_norm_axis, bool is_test) -> Tensor(out), Tensor(mean), Tensor(variance)
...@@ -1483,7 +1479,7 @@ ...@@ -1483,7 +1479,7 @@
no_need_buffer : grad_out no_need_buffer : grad_out
- backward_api : reshape_grad - backward_api : reshape_grad
forward : reshape_with_xshape (Tensor x, IntArray shape) -> Tensor(out), Tensor(xshape) forward : reshape (Tensor x, IntArray shape) -> Tensor(out), Tensor(xshape)
args : (Tensor xshape, Tensor out_grad) args : (Tensor xshape, Tensor out_grad)
output : Tensor(x_grad) output : Tensor(x_grad)
infer_meta : infer_meta :
...@@ -1814,7 +1810,7 @@ ...@@ -1814,7 +1810,7 @@
backward : sum_triple_grad backward : sum_triple_grad
- backward_api : sum_grad - backward_api : sum_grad
forward : sum (Tensor x, int64_t[] dims={}, DataType out_dtype=paddle::experimental::DataType::UNDEFINED, bool keep_dim=false) -> Tensor(out) forward : sum (Tensor x, int64_t[] dims={}, DataType out_dtype=DataType::UNDEFINED, bool keep_dim=false) -> Tensor(out)
args : (Tensor x, Tensor out_grad, int64_t[] dims, bool keep_dim, bool reduce_all=false) args : (Tensor x, Tensor out_grad, int64_t[] dims, bool keep_dim, bool reduce_all=false)
output : Tensor(x_grad) output : Tensor(x_grad)
infer_meta : infer_meta :
...@@ -1830,7 +1826,6 @@ ...@@ -1830,7 +1826,6 @@
args : (Tensor grad_grad_x, Tensor grad_grad_out_grad, int64_t[] dims={}, bool keep_dim=false, bool reduce_all=false) args : (Tensor grad_grad_x, Tensor grad_grad_out_grad, int64_t[] dims={}, bool keep_dim=false, bool reduce_all=false)
output : Tensor(grad_grad_x_grad) output : Tensor(grad_grad_x_grad)
invoke : sum_grad(grad_grad_x, grad_grad_out_grad, dims, keep_dim, reduce_all, grad_grad_x_grad) invoke : sum_grad(grad_grad_x, grad_grad_out_grad, dims, keep_dim, reduce_all, grad_grad_x_grad)
no_need_buffer : x
- backward_api : swish_grad - backward_api : swish_grad
forward : swish (Tensor x, float beta=1.0) -> Tensor(out) forward : swish (Tensor x, float beta=1.0) -> Tensor(out)
......
...@@ -237,8 +237,13 @@ namespace experimental { ...@@ -237,8 +237,13 @@ namespace experimental {
def generate_backward_api(backward_yaml_path, header_file_path, def generate_backward_api(backward_yaml_path, header_file_path,
source_file_path): source_file_path):
with open(backward_yaml_path, 'r') as f: bw_apis = []
bw_apis = yaml.load(f, Loader=yaml.FullLoader) for each_api_yaml in backward_yaml_path:
with open(each_api_yaml, 'r') as f:
api_list = yaml.load(f, Loader=yaml.FullLoader)
if api_list:
bw_apis.extend(api_list)
header_file = open(header_file_path, 'w') header_file = open(header_file_path, 'w')
source_file = open(source_file_path, 'w') source_file = open(source_file_path, 'w')
...@@ -270,6 +275,7 @@ def main(): ...@@ -270,6 +275,7 @@ def main():
parser.add_argument( parser.add_argument(
'--backward_yaml_path', '--backward_yaml_path',
help='path to backward yaml file', help='path to backward yaml file',
nargs='+',
default='python/paddle/utils/code_gen/backward.yaml') default='python/paddle/utils/code_gen/backward.yaml')
parser.add_argument( parser.add_argument(
'--backward_header_path', '--backward_header_path',
......
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
from itertools import chain
from pathlib import Path
import yaml
from parse_utils import cross_validate, to_named_dict
def main(forward_api_yaml_paths, backward_api_yaml_paths):
apis = {}
for api_yaml_path in chain(forward_api_yaml_paths, backward_api_yaml_paths):
with open(api_yaml_path, "rt", encoding="utf-8") as f:
api_list = yaml.safe_load(f)
if api_list is not None:
apis.update(to_named_dict((api_list)))
cross_validate(apis)
if __name__ == "__main__":
current_dir = Path(__file__).parent / "temp"
parser = argparse.ArgumentParser(
description="Parse api yaml into canonical format.")
parser.add_argument(
'--forward_yaml_paths',
type=str,
nargs='+',
default=str(current_dir / "api.parsed.yaml"),
help="forward api yaml file.")
parser.add_argument(
'--backward_yaml_paths',
type=str,
nargs='+',
default=str(current_dir / "backward.yaml.yaml"),
help="backward api yaml file.")
args = parser.parse_args()
main(args.forward_yaml_paths, args.backward_yaml_paths)
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List, Dict
import re
from jinja2.filters import do_xmlattr
from type_mapping import (input_types_map, optional_input_types_map,
attr_types_map, opmaker_attr_types_map,
output_type_map)
from type_mapping import (dense_input_types_map, dense_optional_input_types_map,
dense_output_types_map, sr_input_types_map,
sr_optional_input_types_map, sr_output_types_map,
phi_attr_types_map)
# ------------------------------ attr -------------------------------------
def to_phi_attr_type(s):
return phi_attr_types_map[s]
def to_op_attr_type(s):
return opmaker_attr_types_map[s]
def to_paddle_attr_type(s):
"Convert type tag for attributes in yaml to c++ types"
return attr_types_map[s]
# ------------------------------ input ----------------------------------
def to_paddle_input_type(s, optional=False):
"Convert type tag for inputs in yaml to c++ types"
if optional:
return optional_input_types_map[s]
else:
return input_types_map[s]
def to_dense_input_type(s, optional=False):
"Convert types in yaml to dense tensor type in phi"
if optional:
return dense_input_types_map[s]
else:
return dense_optional_input_types_map[s]
# ------------------------------ output ----------------------------------
def to_paddle_output_type(s):
return output_type_map[s]
def to_dense_output_type(s):
"Convert types in yaml to dense tensor type in phi"
return dense_output_types_map[s]
def to_sr_output_type(s):
"Convert types in yaml to selected rows type in phi"
return sr_output_types_map[s]
# -------------- transform argument names from yaml to opmaker ------------
def to_opmaker_name(s):
if s.endswith("_grad"):
return 'GradVarName("{}")'.format(
to_pascal_case(s.removesuffix("_grad")))
else:
return '"{}"'.format(to_pascal_case(s))
def to_opmaker_name_cstr(s):
if s.endswith("_grad"):
return '"{}@GRAD"'.format(to_pascal_case(s.removesuffix("_grad")))
else:
return '"{}"'.format(to_pascal_case(s))
def to_pascal_case(s):
words = s.split("_")
return "".join([word.capitalize() for word in words])
def to_input_name(s):
"""find input variable name in api yaml for higher order backward api.
x -> dx
x -> d2x
x -> d3x
NOTE: for first order backward api
x -> x_grad
is more common.
"""
match = re.match(r"(d\d*)(\w+)", s)
assert (match.group(1) != ""), "it should be a grad style name."
return match.group(2)
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from itertools import chain
from pathlib import Path
import yaml
from jinja2 import Environment, FileSystemLoader, StrictUndefined
from filters import to_op_attr_type, to_opmaker_name, to_opmaker_name_cstr, to_pascal_case
from tests import is_base_api, is_vec, is_scalar, is_initializer_list, supports_inplace, supports_no_need_buffer
from filters import to_input_name
from parse_utils import to_named_dict
file_loader = FileSystemLoader(Path(__file__).parent / "templates")
env = Environment(
loader=file_loader,
keep_trailing_newline=True,
trim_blocks=True,
lstrip_blocks=True,
undefined=StrictUndefined,
extensions=['jinja2.ext.do'])
env.filters["to_op_attr_type"] = to_op_attr_type
env.filters["to_opmaker_name"] = to_opmaker_name
env.filters["to_pascal_case"] = to_pascal_case
env.filters["to_input_name"] = to_input_name
env.filters["to_opmaker_name_cstr"] = to_opmaker_name_cstr
env.tests["base_api"] = is_base_api
env.tests["vec"] = is_vec
env.tests["scalar"] = is_scalar
env.tests["initializer_list"] = is_initializer_list
env.tests["supports_inplace"] = supports_inplace
env.tests["supports_no_need_buffer"] = supports_no_need_buffer
def main(api_yaml_path, backward_yaml_path, output_op_path,
output_arg_map_path):
with open(api_yaml_path, "rt") as f:
apis = yaml.safe_load(f)
forward_api_dict = to_named_dict(apis)
with open(backward_yaml_path, "rt") as f:
backward_apis = yaml.safe_load(f)
backward_api_dict = to_named_dict(backward_apis)
# fill backward field for an api if another api claims it as forward
for name, backward_api in backward_api_dict.items():
forward_name = backward_api["forward"]["name"]
if forward_name in backward_api_dict:
forward_api = backward_api_dict[forward_name]
if forward_api["backward"] is None:
forward_api["backward"] = name
if forward_name in backward_api_dict:
forward_api = backward_api_dict[forward_name]
if forward_api["backward"] is None:
forward_api["backward"] = name
api_dict = {}
api_dict.update(forward_api_dict)
api_dict.update(backward_api_dict)
if len(apis) == 0 and len(backward_apis) == 0:
if os.path.isfile(output_op_path):
os.remove(output_op_path)
if os.path.isfile(output_arg_map_path):
os.remove(output_arg_map_path)
return
op_template = env.get_template('op.c.j2')
with open(output_op_path, "wt") as f:
msg = op_template.render(
apis=apis, backward_apis=backward_apis, api_dict=api_dict)
f.write(msg)
ks_template = env.get_template('ks.c.j2')
with open(output_arg_map_path, 'wt') as f:
msg = ks_template.render(apis=apis, backward_apis=backward_apis)
f.write(msg)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Generate operator file from api yaml.")
parser.add_argument(
'--api_yaml_path', type=str, help="parsed api yaml file.")
parser.add_argument(
'--backward_api_yaml_path',
type=str,
help="parsed backward api yaml file.")
parser.add_argument(
"--output_op_path", type=str, help="path to save generated operators.")
parser.add_argument(
"--output_arg_map_path",
type=str,
help="path to save generated argument mapping functions.")
args = parser.parse_args()
main(args.api_yaml_path, args.backward_api_yaml_path, args.output_op_path,
args.output_arg_map_path)
...@@ -94,8 +94,12 @@ def generate_intermediate_api(api_yaml_path, sparse_api_yaml_path, ...@@ -94,8 +94,12 @@ def generate_intermediate_api(api_yaml_path, sparse_api_yaml_path,
dygraph_source_file.write(source_include(dygraph_include_header_file)) dygraph_source_file.write(source_include(dygraph_include_header_file))
dygraph_source_file.write(namespace[0]) dygraph_source_file.write(namespace[0])
with open(api_yaml_path, 'r') as f: apis = []
apis = yaml.load(f, Loader=yaml.FullLoader) for each_api_yaml in api_yaml_path:
with open(each_api_yaml, 'r') as f:
api_list = yaml.load(f, Loader=yaml.FullLoader)
if api_list:
apis.extend(api_list)
for api in apis: for api in apis:
foward_api = ForwardAPI(api) foward_api = ForwardAPI(api)
...@@ -131,6 +135,7 @@ def main(): ...@@ -131,6 +135,7 @@ def main():
description='Generate PaddlePaddle C++ Sparse API files') description='Generate PaddlePaddle C++ Sparse API files')
parser.add_argument( parser.add_argument(
'--api_yaml_path', '--api_yaml_path',
nargs='+',
help='path to api yaml file', help='path to api yaml file',
default='python/paddle/utils/code_gen/api.yaml') default='python/paddle/utils/code_gen/api.yaml')
......
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
from pathlib import Path
import yaml
from parse_utils import parse_api_entry
def main(api_yaml_path, output_path, backward):
with open(api_yaml_path, "rt") as f:
apis = yaml.safe_load(f)
if apis is None:
apis = []
else:
apis = [
parse_api_entry(api, "backward_api" if backward else "api")
for api in apis
]
with open(output_path, "wt") as f:
yaml.safe_dump(apis, f, default_flow_style=None, sort_keys=False)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Parse api yaml into canonical format.")
parser.add_argument('--api_yaml_path', type=str, help="api yaml file.")
parser.add_argument(
"--output_path", type=str, help="path to save parsed yaml file.")
parser.add_argument("--backward", action="store_true", default=False)
args = parser.parse_args()
main(args.api_yaml_path, args.output_path, args.backward)
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import yaml
from copy import copy
from typing import Dict, Any, List, Tuple
from tests import is_attr, is_input, is_output, is_vec
def to_named_dict(items: List[Dict]) -> Dict[str, Dict]:
named_dict = {}
for item in items:
if "name" not in item:
raise KeyError(f"name not in {item}")
name = item["name"]
named_dict[name] = item
return named_dict
def parse_arg(api_name: str, s: str) -> Dict[str, str]:
"""parse an argument in following formats:
1. typename name
2. typename name = default_value
"""
typename, rest = [item.strip() for item in s.split(" ", 1)]
assert len(
typename
) > 0, f"The arg typename should not be empty. Please check the args of {api_name} in yaml."
assert rest.count(
"=") <= 1, f"There is more than 1 = in an arg in {api_name}"
if rest.count("=") == 1:
name, default_value = [item.strip() for item in rest.split("=", 1)]
assert len(
name
) > 0, f"The arg name should not be empty. Please check the args of {api_name} in yaml."
assert len(
default_value
) > 0, f"The default value should not be empty. Please check the args of {api_name} in yaml."
return {
"typename": typename,
"name": name,
"default_value": default_value
}
else:
name = rest.strip()
assert len(
name
) > 0, f"The arg name should not be empty. Please check the args of {api_name} in yaml."
return {"typename": typename, "name": name}
def parse_input_and_attr(api_name: str,
arguments: str) -> Tuple[List, List, Dict, Dict]:
args_str = arguments.strip()
assert args_str.startswith('(') and args_str.endswith(')'), \
(f"Args declaration should start with '(' and end with ')', "
f"please check the args of {api_name} in yaml.")
args_str = args_str[1:-1]
args = parse_plain_list(args_str)
inputs = []
attrs = []
met_attr_with_default_value = False
for arg in args:
item = parse_arg(api_name, arg)
typename = item["typename"]
name = item["name"]
if is_input(typename):
assert len(attrs) == 0, \
(f"The input Tensor should appear before attributes. "
f"please check the position of {api_name}:input({name}) "
f"in yaml.")
inputs.append(item)
elif is_attr(typename):
if met_attr_with_default_value:
assert "default_value" in item, f"{api_name}: Arguments with default value should not precede those without default value"
elif "default_value" in item:
met_attr_with_default_value = True
attrs.append(item)
else:
raise KeyError(f"{api_name}: Invalid argument type {typename}.")
return inputs, attrs
def parse_output(api_name: str, s: str) -> Dict[str, str]:
"""parse an output, typename or typename(name)."""
match = re.search(
r"(?P<out_type>[a-zA-Z0-9_[\]]+)\s*(?P<name>\([a-zA-Z0-9_@]+\))?\s*(?P<expr>\{[^\}]+\})?",
s)
typename = match.group("out_type")
name = match.group("name")
size_expr = match.group("expr")
name = name[1:-1] if name is not None else 'out'
size_expr = size_expr[1:-1] if size_expr is not None else None
assert is_output(typename), \
(f"Invalid output type: {typename} in api: {api_name}."
f"Supported types are Tensor and Tensor[]")
if size_expr is not None:
assert is_vec(typename), \
(f"Invalid output size: output {name} in api: {api_name} is "
f"not a vector but has size expr")
return {"typename": typename, "name": name, "size": size_expr}
else:
return {"typename": typename, "name": name}
def parse_outputs(api_name: str, outputs: str) -> List[Dict]:
outputs = parse_plain_list(outputs, sep=",")
output_items = []
for output in outputs:
output_items.append(parse_output(api_name, output))
return output_items
def parse_infer_meta(infer_meta: Dict[str, Any]) -> Dict[str, Any]:
infer_meta = copy(infer_meta) # to prevent mutating the input
if "param" not in infer_meta:
infer_meta["param"] = None
return infer_meta
def parse_candidates(s: str) -> Dict[str, Any]:
"parse candidates joined by either '>'(ordered) or ','(unordered)"
delimiter = ">" if ">" in s else ","
ordered = delimiter == ">"
candidates = parse_plain_list(s, delimiter)
return {"ordered": ordered, "candidates": candidates}
def parse_plain_list(s: str, sep=",") -> List[str]:
items = [item.strip() for item in s.strip().split(sep)]
return items
def parse_kernel(api_name: str,
kernel_config: Dict[str, Any]) -> Dict[str, Any]:
# kernel :
# func : [], Kernel functions (example: scale, scale_sr)
# param : [], Input params of kernel
# backend : str, the names of param to choose the kernel backend, default is None
# layout : str, the names of param to choose the kernel layout, default is None
# data_type : str, the names of param to choose the kernel data_type, default is None
kernel = {
'func': None, # up to 2 function names
'param': None,
'backend': None,
'layout': None,
'data_type': None
}
kernel['func'] = parse_plain_list(kernel_config['func'])
if 'param' in kernel_config:
kernel['param'] = kernel_config['param']
if 'backend' in kernel_config:
kernel['backend'] = parse_candidates(kernel_config["backend"])
if 'layout' in kernel_config:
kernel['layout'] = parse_candidates(kernel_config["layout"])
if 'data_type' in kernel_config:
kernel['data_type'] = parse_candidates(kernel_config["data_type"])
return kernel
def parse_inplace(api_name: str, inplace_cfg: str) -> Dict[str, str]:
inplace_map = {}
inplace_cfg = inplace_cfg.lstrip("(").rstrip(")")
pairs = parse_plain_list(inplace_cfg)
for pair in pairs:
in_name, out_name = parse_plain_list(pair, sep="->")
inplace_map[out_name] = in_name
return inplace_map
def parse_invoke(api_name: str, invoke_config: str) -> Dict[str, Any]:
invoke_config = invoke_config.strip()
func, rest = invoke_config.split("(", 1)
func = func.strip()
args = rest.rstrip(")").strip()
invocation = {"func": func, "args": args}
return invocation
def extract_type_and_name(records: List[Dict]) -> List[Dict]:
"""extract type and name from forward call, it is simpler than forward api."""
extracted = [{
"name": item["name"],
"typename": item["typename"]
} for item in records]
return extracted
def parse_forward(api_name: str, forward_config: str) -> Dict[str, Any]:
# api_name (const Tensor& input, ... , int attr, ...) -> Tensor(out)
result = re.search(
r"(?P<api>[a-z][a-z0-9_]+)\s*(?P<args>\([^\)]+\))\s*->\s*(?P<outputs>.+)",
forward_config)
api = result.group("api")
outputs = parse_outputs(api_name, result.group("outputs"))
outputs = extract_type_and_name(outputs)
inputs, attrs = parse_input_and_attr(api_name, result.group("args"))
inputs = extract_type_and_name(inputs)
attrs = extract_type_and_name(attrs)
forward_cfg = {
"name": api,
"inputs": inputs,
"attrs": attrs,
"outputs": outputs
}
return forward_cfg
def parse_api_entry(api_entry: Dict[str, Any], name_field="api"):
api_name = api_entry[name_field]
inputs, attrs = parse_input_and_attr(api_name, api_entry["args"])
outputs = parse_outputs(api_name, api_entry["output"])
# validate default value of DataType and DataLayout
for attr in attrs:
if "default_value" in attr:
typename = attr["typename"]
default_value = attr["default_value"]
if typename == "DataType":
assert "DataType" in default_value, f"invalid DataType default value in {api_name}"
# remove namespace
default_value = default_value[default_value.find("DataType"):]
attr["default_value"] = default_value
elif typename == "DataLayout":
assert "DataLayout" in default_value, f"invalid DataLayout default value in {api_name}"
default_value = default_value[default_value.find("DataLayout"):]
attr["default_value"] = default_value
input_names = [item["name"] for item in inputs]
attr_names = [item["name"] for item in attrs]
output_names = [item["name"] for item in outputs]
# add optional tag for every input
for input in inputs:
input["optional"] = False
if "optional" in api_entry:
optional_args = parse_plain_list(api_entry["optional"])
for name in optional_args:
assert name in input_names, f"{api_name} has an optional input: '{name}' which is not an input."
for input in inputs:
if input["name"] in optional_args:
input["optional"] = True
# add intermediate tag for every output
for output in outputs:
output["intermediate"] = False
if "intermediate" in api_entry:
intermediate_outs = parse_plain_list(api_entry["intermediate"])
for name in intermediate_outs:
assert name in output_names, f"{api_name} has an intermediate output: '{name}' which is not an output."
for output in outputs:
if output["name"] in intermediate_outs:
output["intermediate"] = True
# add no_need_buffer for every input
for input in inputs:
input["no_need_buffer"] = False
if "no_need_buffer" in api_entry:
no_buffer_args = parse_plain_list(api_entry["no_need_buffer"])
for name in no_buffer_args:
assert name in input_names, f"{api_name} has an no buffer input: '{name}' which is not an input."
for input in inputs:
if input["name"] in no_buffer_args:
input["no_need_buffer"] = True
else:
no_buffer_args = None
# TODO(chenfeiyu): data_transform
api = {
"name": api_name,
"inputs": inputs,
"attrs": attrs,
"outputs": outputs,
"no_need_buffer": no_buffer_args
}
# invokes another api?
is_base_api = "invoke" not in api_entry
if is_base_api:
# kernel
kernel = parse_kernel(api_name, api_entry["kernel"])
if kernel["param"] is None:
kernel["param"] = input_names + attr_names
# infer meta
infer_meta = parse_infer_meta(api_entry["infer_meta"])
if infer_meta["param"] is None:
infer_meta["param"] = copy(kernel["param"])
# inplace
if "inplace" in api_entry:
inplace_pairs = parse_inplace(api_name, api_entry["inplace"])
else:
inplace_pairs = None
api.update({
"infer_meta": infer_meta,
"kernel": kernel,
"inplace": inplace_pairs
})
else:
# invoke
invoke = parse_invoke(api_name, api_entry["invoke"])
api["invoke"] = invoke
# backward
if "backward" in api_entry:
backward = api_entry["backward"]
else:
backward = None
api["backward"] = backward
# forward for backward_apis
is_backward_api = name_field == "backward_api"
if is_backward_api:
if "forward" in api_entry:
forward = parse_forward(api_name, api_entry["forward"])
# validate_fb
validate_backward_inputs(api_name, forward["inputs"],
forward["outputs"], inputs)
validate_backward_attrs(api_name, forward["attrs"], attrs)
validate_backward_outputs(api_name, forward["inputs"], outputs)
else:
forward = None
api["forward"] = forward
return api
def validate_backward_attrs(api, forward_attrs, backward_attrs):
if len(forward_attrs) >= len(backward_attrs):
return
num_exceptional_attrs = len(backward_attrs) - len(forward_attrs)
# this is a not-that-clean trick to allow backward api to has more attrs
# than the forward api, as long as they all have default value
for i in range(-num_exceptional_attrs, 0):
assert "default_value" in backward_attrs[
i], f"{api} has exceptional attr without default value"
def validate_backward_inputs(api, forward_inputs, forward_outputs,
backward_inputs):
foward_input_names = [item["name"] for item in forward_inputs]
forward_output_names = [item["name"] for item in forward_outputs]
backward_input_names = [item["name"] for item in backward_inputs]
assert len(backward_input_names) <= len(foward_input_names) + 2 * len(
forward_output_names), f"{api} has too many inputs."
def validate_backward_outputs(api, forward_inputs, backward_outputs):
assert len(backward_outputs) <= len(
forward_inputs), f"{api} has too many outputs"
def cross_validate(apis):
for name, api in apis.items():
if "forward" in api:
fw_call = api["forward"]
fw_name = fw_call["name"]
if fw_name not in apis:
print(
f"Something Wrong here, this backward api({name})'s forward api({fw_name}) does not exist."
)
else:
fw_api = apis[fw_name]
if "backward" not in fw_api or fw_api["backward"] is None:
print(
f"Something Wrong here, {name}'s forward api({fw_name}) does not claim {name} as its backward."
)
else:
assert fw_api[
"backward"] == name, f"{name}: backward and forward name mismatch"
assert len(fw_call["inputs"]) <= len(
fw_api["inputs"]
), f"{name}: forward call has more inputs than the api"
for (input, input_) in zip(fw_call["inputs"], fw_api["inputs"]):
assert input["typename"] == input_[
"typename"], f"type mismatch in {name} and {fw_name}"
assert len(fw_call["attrs"]) <= len(
fw_api["attrs"]
), f"{name}: forward call has more attrs than the api"
for (attr, attr_) in zip(fw_call["attrs"], fw_api["attrs"]):
if attr["typename"] == "Scalar":
# special case for Scalar, fw_call can omit the type
assert re.match(
r"Scalar(\(\w+\))*", attr_["typename"]
), f"type mismatch in {name} and {fw_name}"
else:
assert attr["typename"] == attr_[
"typename"], f"type mismatch in {name} and {fw_name}"
assert len(fw_call["outputs"]) == len(
fw_api["outputs"]
), f"{name}: forward call has more outputs than the api"
for (output, output_) in zip(fw_call["outputs"],
fw_api["outputs"]):
assert output["typename"] == output_[
"typename"], f"type mismatch in {name} and {fw_name}"
{% from "operator_utils.c.j2" import name_map, register_name_map %}
// this file is generated by python/paddle/utils/code_gen/generate_op.py, do not edit.
#include "paddle/phi/core/compat/op_utils.h"
#include "paddle/fluid/framework/operator.h"
#include "paddle/utils/small_vector.h"
namespace phi {
using paddle::framework::GradVarName;
{% for api in apis %}
{% if api is base_api %}
{{name_map(api)}}
{% endif %}
{% endfor %}
{% for api in backward_apis %}
{% if api is base_api %}
{{name_map(api)}}
{% endif %}
{% endfor %}
} // namespace phi
{% for api in apis + backward_apis %}
{% if api is base_api %}
{{register_name_map(api)}}
{% endif %}
{% endfor %}
{% from "operator_utils.c.j2" import op_maker, backward_op_maker, operator, register_op_with_components %}
// this file is generated by python/paddle/utils/code_gen/generate_op.py, do not edit.
#include <string>
#include "paddle/fluid/framework/infershape_utils.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/convert_utils.h"
#include "paddle/phi/core/infermeta_utils.h"
#include "paddle/phi/infermeta/nullary.h"
#include "paddle/phi/infermeta/unary.h"
#include "paddle/phi/infermeta/binary.h"
#include "paddle/phi/infermeta/ternary.h"
#include "paddle/phi/infermeta/multiary.h"
#include "paddle/phi/infermeta/backward.cc"
namespace paddle {
namespace operators {
using paddle::framework::GradVarName;
{% for api in apis %}
{% if api is base_api %}
{{op_maker(api)}}
{{operator(api)}}
{% endif %}
{% endfor %}
{% for api in backward_apis %}
{% if api is base_api %}
{{backward_op_maker(api, api_dict[api["forward"]["name"]])}}
{{operator(api)}}
{% endif %}
{% endfor %}
} // namespace operators
} // namespace paddle
namespace ops = paddle::operators;
{% for api in apis + backward_apis %}
{% if api is base_api %}
{{register_op_with_components(api)}}
{% endif %}
{% endfor %}
{# ----------------------------- op maker ----------------------------------- #}
{% macro op_maker(api) %}
{% set api_name = api["name"] %}
class {{api_name | to_pascal_case}}OpMaker : public framework::OpProtoAndCheckerMaker {
public:
void Make() override {
{% filter indent(4, True) %}
{% for input in api["inputs"] %}
{% if input["name"] in api["kernel"]["param"] %}
{{add_input(loop.index0, input, api_name)}};
{% endif %}
{% endfor %}
{% for output in api["outputs"] %}
{{add_output(loop.index0, output, api_name)}};
{% endfor %}
{% for attr in api["attrs"] %}
{% if attr["name"] in api["kernel"]["param"] %}
{{add_attr(loop.index0, attr, api_name)}};
{% endif %}
{% endfor %}
{% endfilter %}
AddComment(R"DOC(
TODO: Documentation of {{api_name}} op.
)DOC");
}
};
{% endmacro %}
{# add input, it could be duplicable or dispensable #}
{% macro add_input(i, input, op_name) %}{# inline #}
{% set name = input["name"] %}
{% set typename = input["typename"] %}
AddInput({{name| to_opmaker_name}}, "({{typename}}), input {{i}} of {{op_name}} op.")
{%- if typename is vec +%}
.AsDuplicable()
{%- endif %}
{%- if input["optional"] +%}
.AsDispensable()
{%- endif %}
{%- endmacro %}
{# add output, it could be duplicable or intermediate, however, optional output is not supported #}
{% macro add_output(i, output, op_name) %}{# inline #}
{% set name = output["name"] %}
{% set typename = output["typename"] %}
{% set is_intermediate = output["intermediate"] %}
AddOutput({{name | to_opmaker_name}}, "({{typename}}), output {{i}} of {{op_name}} op.")
{%- if typename is vec +%}
.AsDuplicable()
{%- endif %}
{%- if is_intermediate +%}
.AsIntermediate()
{%- endif %}
{%- endmacro %}
{# add attribute, and process default value if needed #}
{% macro add_attr(i, attr, op_name) %}{# inline #}
{% set name = attr["name"] %}
{% set typename = attr["typename"] %}
{% if typename is scalar %}
AddInput("{{name | to_pascal_case}}Tensor", "attribute {{i}} for {{op_name}} op from 0D Tensor.")
.AsDispensable();
{% elif typename == "IntArray" %}{# the type has been renamed #}
AddInput("{{name | to_pascal_case}}Tensor", "attribute {{i}} for {{op_name}} op from 1D integer Tensor.")
.AsDispensable();
AddInput("{{name | to_pascal_case}}TensorList", "attribute {{i}} for {{op_name}} op from list fo 0D integer Tensors.")
.AsDuplicable()
.AsDispensable();
{% endif %}
AddAttr<{{typename | to_op_attr_type}}>("{{name}}", "({{typename | to_op_attr_type}}), attribute {{i}} for {{op_name}} op.")
{%- if "default_value" in attr +%}
.SetDefault({{process_default_value(attr)}})
{%- endif %}
{%- endmacro %}
{# process default value for attributes, some attribute has different types and different default values in api & opmaker #}
{% macro process_default_value(attr) %}{# inline #}
{% set default_value = attr["default_value"] %}
{% set typename = attr["typename"] %}
{% if typename == "DataType" %}{# convert back to VarType #}
static_cast<int>(framework::TransToProtoVarType(experimental::{{default_value}}))
{%- elif typename == "DataLayout" %} {# does DataLayout need any processing?#}
static_cast<int>(experimental::{{default_value}})
{%- elif typename == "Place" %}{# construct a Place to get the type #}
static_cast<int>(phi::Place({{"phi::" if not default_value is initializer_list}}{{default_value}}).GetType())
{%- else %}{# pass through as-is #}
{{default_value}}
{%- endif %}
{%- endmacro %}
{# --------------------------------------- name mapping ---------------------------------------------- #}
{% macro name_map(api) %}
KernelSignature {{api["name"] | to_pascal_case }}OpArgumentMapping(const ArgumentMappingContext& ctx) {
{% set kernel_args = api["kernel"]["param"] %}
{{get_input_list(api["inputs"], kernel_args)}};
paddle::small_vector<const char*> attrs;
{% for attr in api["attrs"]%}
{% filter indent(2)%}
{{get_an_attr(attr)}};
{% endfilter %}
{% endfor %}
{{get_output_list(api["outputs"], kernel_args)}};
return KernelSignature("{{api["name"]}}", std::move(inputs), std::move(attrs), std::move(outputs));
}
{% endmacro %}
{% macro register_name_map(api) %}
PD_REGISTER_ARG_MAPPING_FN({{api["name"]}}, phi::{{api["name"] | to_pascal_case}}OpArgumentMapping);
{%- endmacro %}
{% macro get_input_list(inputs, kernel_args) %}{# inline #}
paddle::small_vector<const char*> inputs {
{%- for input in inputs %}
{%- if input["name"] in kernel_args %}
{{input["name"] | to_opmaker_name_cstr}}{{", " if not loop.last}}
{%- endif %}
{%- endfor %}
}
{%- endmacro %}
{% macro get_an_attr(attr) %}{# inline #}
{% set typename = attr["typename"] %}
{% set name = attr["name"] %}
{% if typename is scalar %}{# scalar correspond to a dispensable input and an attr in opmaker #}
attrs.emplace_back(
ctx.HasInput("{{name | to_pascal_case}}")
? "{{name | to_pascal_case}}Tensor"
: "{{name}}"
)
{%- elif typename == "IntArray" %}
attrs.emplace_back(
ctx.HasInput("{{name | to_pascal_case}}Tensor")
? "{{name | to_pascal_case}}Tensor"
: ctx.InputSize("{{name | to_pascal_case}}TensorList") > 0
? "{{name | to_pascal_case}}TensorList"
: "{{name}}"
)
{%- else %}
attrs.emplace_back("{{name}}")
{%- endif %}
{%- endmacro %}
{% macro get_output_list(outputs, kernel_args) %}{# inline #}
paddle::small_vector<const char*> outputs {
{%- for output in outputs %}
{{output["name"] | to_opmaker_name_cstr}}{{", " if not loop.last}}
{%- endfor %}
}
{%- endmacro %}
{# --------------------------------------- operator ---------------------------------------------- #}
{% macro operator(api) %}
class {{api["name"] | to_pascal_case}}Op : public framework::OperatorWithKernel {
public:
using framework::OperatorWithKernel::OperatorWithKernel;
};
{# infershape functor #}
DECLARE_INFER_SHAPE_FUNCTOR({{api["name"]}}, {{api["name"] | to_pascal_case}}InferShapeFunctor,
PD_INFER_META(phi::{{api["infer_meta"]["func"]}}));
{# inplace inferer #}
{% if api["inplace"] is not none %}
{% set inplace_map %}
{% for source, target in api["inplace"].items() %}
{{"{"}}{{source | to_opmaker_name}}, {{target | to_opmaker_name}}{{"}"}}{{", " if not loop.last}}
{%- endfor %}
{%- endset %}
DECLARE_INPLACE_OP_INFERER({{api["name"] | to_pascal_case}}InplaceInferer,
{{inplace_map}});
{% endif %}
{# no_need_buffer inferer #}
{% if api["no_need_buffer"] is not none %}
DECLARE_NO_NEED_BUFFER_VARS_INFERER({{api["name"] | to_pascal_case}}NoNeedBufferVarInferer,
{{api["no_need_buffer"] | map("to_opmaker_name") | join(", ")}});
{% endif %}
{% endmacro%}
{% macro register_op_with_components(api) %}
{% set name = api["name"] %}
REGISTER_OPERATOR({{name}}, ops::{{name | to_pascal_case}}Op,
{% if not "forward" in api %}{# it is a forward api #}
ops::{{name | to_pascal_case}}OpMaker,
{% endif %}
{% if "backward" in api and api["backward"] is not none %}{# backward #}
{% set backward_name = api["backward"] %}
ops::{{backward_name | to_pascal_case}}OpMaker<paddle::framework::OpDesc>,
ops::{{backward_name | to_pascal_case}}OpMaker<paddle::imperative::OpBase>,
{% endif %}
{% if api is supports_inplace %}{# inplace#}
ops::{{name | to_pascal_case}}InplaceInferer,
{% endif %}
{% if api is supports_no_need_buffer %}{# no_need_buffer #}
ops::{{name | to_pascal_case}}NoNeedBufferVarInferer,
{% endif %}
ops::{{name | to_pascal_case}}InferShapeFunctor);
{% endmacro %}
{# --------------------------------------- backward op maker ---------------------------------------------- #}
{% macro backward_op_maker(api, forward_api) %}
{% set name = api["name"] %}
{% set forward_input_names = api["forward"]["inputs"] | map(attribute="name") | list %}
{% set forward_output_names = api["forward"]["outputs"] | map(attribute="name") | list %}
{% set forward_attr_names = api["forward"]["attrs"] | map(attribute="name") | list %}
{% set forward_input_orig_names = forward_api["inputs"] | map(attribute="name") | list %}
{% set forward_output_orig_names = forward_api["outputs"] | map(attribute="name") | list %}
{% set forward_attr_orig_names = forward_api["attrs"] | map(attribute="name") | list %}
template <typename T>
class {{name | to_pascal_case}}OpMaker : public framework::SingleGradOpMaker<T> {
public:
using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
protected:
void Apply(GradOpPtr<T> grad_op) const override {
grad_op->SetType("{{name}}");
{% for input in api["inputs"] %}
grad_op->SetInput("{{input["name"] | to_pascal_case}}", this->{{extract_input_from_forward(
input["name"],
forward_input_names,
forward_output_names,
forward_input_orig_names,
forward_output_orig_names)}});
{% endfor %}
{% for output in api["outputs"] %}
grad_op->SetOutput("{{output["name"] | to_pascal_case}}", this->{{extract_output_from_forward(
output["name"],
forward_input_names,
forward_output_names,
forward_input_orig_names,
forward_output_orig_names)}});
{% endfor %}
{% for attr in api["attrs"] %}
{% set attr_name = attr["name"] %}
{% if attr_name in forward_attr_names %}
{% if attr["typename"] == "IntArray" %}
grad_op->SetInput("{{attr_name | to_pascal_case}}Tensor", this->Input("{{attr_name | to_pascal_case}}Tensor"));
grad_op->SetInput("{{attr_name | to_pascal_case}}TensorList", this->Input("{{attr_name | to_pascal_case}}TensorList"));
{% elif attr["typename"] == "Scalar" %}
grad_op->SetInput("{{attr_name | to_pascal_case}}Tensor", this->Input("{{attr_name | to_pascal_case}}Tensor"));
{% endif %}
grad_op->SetAttr("{{attr_name}}", this->GetAttr("{{forward_attr_orig_names[forward_attr_names.index(attr_name)]}}"));
{% else %}{# maybe something wrong: backward op has more attrs than the forward one#}
grad_op->AddAttr<{{attr["typename"] | to_op_attr_type}}>({{attr_name}}, "({{attr["typename"] | to_op_attr_type}}), exceptional attr {{attr_name}}");
grad_op->SetAttr("{{attr_name}}", {{process_default_value(attr)}});
{% endif %}
{% endfor %}
}
};
{% endmacro %}
{% macro extract_input_from_forward(name,
input_names, output_names,
input_orig_names, output_orig_names) %}{# inline #}
{% if name in input_names %}
{% set name_in_forward_orig = input_orig_names[input_names.index(name)]%}
Input("{{name_in_forward_orig | to_pascal_case}}")
{%- elif name in output_names %}
{% set name_in_forward_orig = output_orig_names[output_names.index(name)]%}
Output("{{name | to_pascal_case}}")
{%- elif name.endswith("_grad") %}{# output grad#}
{% set name_in_forward = name.removesuffix("_grad") %}
{% if name_in_forward in output_names %}
{% set name_in_forward_orig = output_orig_names[output_names.index(name_in_forward)] %}
OutputGrad("{{name_in_forward_orig | to_pascal_case}}")
{%- endif %}
{%- endif %}
{%- endmacro %}
{% macro extract_output_from_forward(name, input_names, output_names,
input_orig_names, output_orig_names) %}{# inline #}
{% if name.removesuffix("_grad") in input_names %}
{% set name_in_forward = name.removesuffix("_grad") %}
{% set name_in_forward_orig = input_orig_names[input_names.index(name_in_forward)]%}
InputGrad("{{name.removesuffix("_grad") | to_pascal_case}}")
{%- elif (name | to_input_name) in input_names %}
{% set name_in_forward = name | to_input_name %}
{% set name_in_forward_orig = input_orig_names[input_names.index(name_in_forward)]%}
InputGrad("{{name | to_input_name | to_pascal_case}}")
{%- endif %}
{%- endmacro %}
{% macro extract_attr_from_forward(name, attr_names, attr_origin_names) %}
this->GetAttr("{{name}}")
{%- endmacro %}
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from type_mapping import input_types_map, attr_types_map, output_type_map
# tests for typename
def is_input(s):
return s in input_types_map
def is_attr(s):
return s in attr_types_map
def is_output(s):
return s in output_type_map
def is_vec(s):
return s.endswith("[]")
def is_scalar(s):
return re.match(r"Scalar(\(\w+\))*", s) is not None
def is_initializer_list(s):
return s == "{}"
def is_base_api(api):
return "kernel" in api and "infer_meta" in api
def supports_selected_rows_kernel(api):
return is_base_api(api) and len(api["kernel"]["func"]) == 2
def supports_inplace(api):
return "inplace_map" in api
def supports_no_need_buffer(api):
for input in api["inputs"]:
if input["no_need_buffer"]:
return True
return False
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# type mapping: types in yaml -> types in c++ API
input_types_map = {
'Tensor': 'const Tensor&',
'Tensor[]': 'const std::vector<Tensor>&'
}
optional_input_types_map = {
'Tensor': 'const paddle::optional<Tensor>&',
'Tensor[]': 'const paddle::optional<std::vector<Tensor>>&',
}
attr_types_map = {
# special types
'IntArray': 'const IntArray&',
'Scalar': 'const Scalar&',
'Scalar(bool)': 'const Scalar&',
'Scalar(int)': 'const Scalar&',
'Scalar(int64_t)': 'const Scalar&',
'Scalar(float)': 'const Scalar&',
'Place': 'Place',
'DataLayout': 'DataLayout',
'DataType': 'DataType',
# scalar types
'bool': 'bool',
'int': 'int',
'int64_t': 'int64_t',
'float': 'float',
'double': 'double',
'str': 'const std::string&',
# vector types
'bool[]': 'const std::vector<bool>&',
'int[]': 'const std::vector<int>&',
'int64_t[]': 'const std::vector<int64_t>&',
'float[]': 'const std::vector<float>&',
'double[]': 'const std::vector<double>&',
'str[]': 'const std::vector<<std::string>&',
}
opmaker_attr_types_map = {
# special types
'IntArray': 'std::vector<int64_t>',
'Scalar': 'float',
'Scalar(bool)': 'bool',
'Scalar(int)': 'int',
'Scalar(int64_t)': 'int64_t',
'Scalar(float)': 'float',
'Place': 'int',
'DataLayout': 'int',
'DataType': 'int',
# scalar types
'bool': 'bool',
'int': 'int',
'int64_t': 'int64_t',
'float': 'float',
'double': 'double',
'str': 'std::string',
# vector types
'bool[]': 'std::vector<bool>',
'int[]': 'std::vector<int>',
'int64_t[]': 'std::vector<int64_t>',
'float[]': 'std::vector<float>',
'double[]': 'std::vector<double>',
'str[]': 'std::vector<<std::string>',
}
output_type_map = {'Tensor': 'Tensor', 'Tensor[]': 'std::vector<Tensor>'}
#------------------------------ phi attr ------------------------------
phi_attr_types_map = attr_types_map.copy()
phi_attr_types_map.update({
'IntArray': 'const phi::IntArray&',
'Scalar': 'const phi::Scalar&'
})
#--------------------------- phi dense tensor ---------------------------
# type mapping to phi, used in implementation
dense_input_types_map = {
'Tensor': 'const phi::DenseTensor&',
'Tensor[]': 'const std::vector<const phi::DenseTensor*>&',
}
dense_optional_input_types_map = {
'Tensor': 'paddle::optional<const phi::DenseTensor&>',
'Tensor[]': 'paddle::optional<const std::vector<phi::DenseTensor>&>'
}
dense_output_types_map = {
'Tensor': 'phi::DenseTensor*',
'Tensor[]': 'std::vector<phi::DenseTensor*>'
}
#---------------------- phi selected rows------------------------------
# type mapping to phi, used in implementation
sr_input_types_map = {'Tensor': 'const phi::SelectedRows&', }
sr_optional_input_types_map = {
'Tensor': 'paddle::optional<const phi::SelectedRows&>',
}
sr_output_types_map = {'Tensor': 'phi::SelectedRows*', }
...@@ -117,9 +117,13 @@ namespace phi { ...@@ -117,9 +117,13 @@ namespace phi {
def generate_wrapped_infermeta_and_register(api_yaml_path, header_file_path, def generate_wrapped_infermeta_and_register(api_yaml_path, header_file_path,
source_file_path): source_file_path):
apis = []
for each_api_yaml in api_yaml_path:
with open(each_api_yaml, 'r') as f:
api_list = yaml.load(f, Loader=yaml.FullLoader)
if api_list:
apis.extend(api_list)
with open(api_yaml_path, 'r') as f:
apis = yaml.load(f, Loader=yaml.FullLoader)
header_file = open(header_file_path, 'w') header_file = open(header_file_path, 'w')
source_file = open(source_file_path, 'w') source_file = open(source_file_path, 'w')
...@@ -159,6 +163,7 @@ def main(): ...@@ -159,6 +163,7 @@ def main():
parser.add_argument( parser.add_argument(
'--api_yaml_path', '--api_yaml_path',
help='path to api yaml file', help='path to api yaml file',
nargs='+',
default='python/paddle/utils/code_gen/api.yaml') default='python/paddle/utils/code_gen/api.yaml')
parser.add_argument( parser.add_argument(
'--wrapped_infermeta_header_path', '--wrapped_infermeta_header_path',
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册