api_gen.py 12.7 KB
Newer Older
1
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
2
#
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
6
#
7
#     http://www.apache.org/licenses/LICENSE-2.0
8
#
9 10 11 12 13 14 15 16 17
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import yaml
import argparse
18
import re
19

20
from api_base import BaseAPI, PREFIX_TENSOR_NAME
21

22 23 24 25 26
inplace_out_type_map = {
    "Tensor": "Tensor&",
    "std::vector<Tensor>": "std::vector<Tensor>&"
}

27 28 29 30 31
inplace_optional_out_type_map = {
    "Tensor": "paddle::optional<Tensor>&",
    "std::vector<Tensor>": "paddle::optional<std::vector<Tensor>>&"
}

32

33
class ForwardAPI(BaseAPI):
34

35
    def __init__(self, api_item_yaml):
36
        super(ForwardAPI, self).__init__(api_item_yaml)
37 38
        self.is_dygraph_api, self.intermediate_outs = self.parse_intermediate(
            api_item_yaml)
39 40
        self.inplace_map, self.view_map = self.parse_inplace_and_view(
            api_item_yaml)
41 42 43 44 45 46 47 48 49

    def get_api_func_name(self):
        if self.is_dygraph_api:
            return self.api + '_intermediate'
        else:
            return self.api

    def parse_intermediate(self, api_item_yaml):
        if 'intermediate' in api_item_yaml:
50 51 52 53 54
            intermediate_outs = [
                item.strip()
                for item in api_item_yaml['intermediate'].split(',')
            ]
            return True, intermediate_outs
55
        else:
56
            return False, []
57

58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82
    def parse_inplace_and_view(self, api_item_yaml):
        inplace_map, view_map = {}, {}
        for mode in ['inplace', 'view']:
            if mode in api_item_yaml:
                if mode == 'inplace':
                    inplace_map = {}
                else:
                    view_map = {}
                in_out_mapping_list = api_item_yaml[mode].split(',')
                for item in in_out_mapping_list:
                    result = re.search(r"(?P<in>\w+)\s*->\s*(?P<out>\w+)", item)
                    in_val = result.group('in')
                    out_val = result.group('out')
                    assert in_val in self.inputs['names'], \
                        f"{self.api} : {mode} input error: the input var name('{in_val}') is not found in the input args of {self.api}."
                    assert out_val in self.outputs['names'], \
                        f"{self.api} : {mode} output error: the output var name('{out_val}') is not found in the output args of {self.api}."

                    if mode == 'inplace':
                        inplace_map[out_val] = in_val
                    else:
                        view_map[out_val] = in_val

        return inplace_map, view_map

83 84 85 86 87
    def get_return_type_with_intermediate(self, inplace_flag=False):
        out_type_list = []
        for i, out_type in enumerate(self.outputs['types']):
            out_name = self.outputs['names'][i].split('@')[0]
            if inplace_flag and out_name in self.inplace_map:
88 89 90 91 92
                if self.inplace_map[out_name] in self.optional_vars:
                    out_type_list.append(
                        inplace_optional_out_type_map[out_type])
                else:
                    out_type_list.append(inplace_out_type_map[out_type])
93 94
            else:
                out_type_list.append(out_type)
95

96 97
        if len(out_type_list) == 1:
            return out_type_list[0]
98
        else:
99 100 101 102 103 104 105
            return "std::tuple<" + ", ".join(out_type_list) + ">"

    def get_return_type(self, inplace_flag=False):
        out_type_list = []
        for i, out_type in enumerate(self.outputs['types']):
            out_name = self.outputs['names'][i].split('@')[0]
            if inplace_flag and out_name in self.inplace_map:
106 107 108 109 110
                if self.inplace_map[out_name] in self.optional_vars:
                    out_type_list.append(
                        inplace_optional_out_type_map[out_type])
                else:
                    out_type_list.append(inplace_out_type_map[out_type])
111 112 113 114 115 116 117
            elif self.is_dygraph_api or out_name not in self.intermediate_outs:
                out_type_list.append(out_type)

        if len(out_type_list) == 1:
            return out_type_list[0]
        else:
            return "std::tuple<" + ", ".join(out_type_list) + ">"
118 119 120

    def gene_return_code(self):
        if self.is_dygraph_api or len(self.intermediate_outs) == 0:
121
            return "return api_output;"
122 123 124
        else:
            return_out_list = []
            for i, name in enumerate(self.outputs['names']):
125
                if name.split('@')[0] not in self.intermediate_outs:
126 127
                    return_out_list.append(i)
            if len(return_out_list) == 1:
128
                return f"return std::get<{return_out_list[0]}>(api_output);"
129 130 131 132
            else:
                selected_code = [
                    f"std::get<{i}>(api_output)" for i in return_out_list
                ]
133
            return 'return std::make_tuple(' + ", ".join(selected_code) + ');'
134

135
    def gene_output(self,
136 137 138
                    out_dtype_list,
                    out_tensor_type_list=None,
                    code_indent='',
139
                    inplace_flag=False):
140
        kernel_output = []
141
        output_names = []
Z
zyfncg 已提交
142
        output_create = ""
143
        return_type = self.get_return_type_with_intermediate(inplace_flag)
Z
zyfncg 已提交
144

145
        if len(out_dtype_list) == 1:
146
            kernel_output.append('kernel_out')
147
            output_names.append('kernel_out')
148 149 150
            inplace_assign = " = " + self.inplace_map[
                self.outputs['names'][0]] if inplace_flag and self.outputs[
                    'names'][0] in self.inplace_map else ""
Z
zyfncg 已提交
151
            output_create = f"""
152
{code_indent}  {return_type} api_output{inplace_assign};"""
153 154
            set_out_func = 'SetKernelOutput' if out_tensor_type_list is None or out_tensor_type_list[
                0] == 'dense' else 'SetSelectedRowsKernelOutput'
155
            if return_type == 'std::vector<Tensor>':
156
                assert self.outputs['out_size_expr'][0] is not None, \
157
                     f"{self.api}: The out size expr : '{{expr}}' should be set when output has Tensor[]. You can refer 'split' api."
158
                output_create = output_create + f"""
159
{code_indent}  auto kernel_out = {set_out_func}({self.outputs['out_size_expr'][0]}, kernel_backend, &api_output);"""
160 161 162

            else:
                output_create = output_create + f"""
163
{code_indent}  auto kernel_out = {set_out_func}(kernel_backend, &api_output);"""
Z
zyfncg 已提交
164

165 166 167 168 169 170 171
            if not inplace_flag and self.view_map is not None and self.outputs[
                    'names'][0] in self.view_map:
                output_create = output_create + f"""
{code_indent}  kernel_out->ShareBufferWith(*{PREFIX_TENSOR_NAME}{self.view_map[self.outputs['names'][0]]});
{code_indent}  kernel_out->ShareInplaceVersionCounterWith(*{PREFIX_TENSOR_NAME}{self.view_map[self.outputs['names'][0]]});
{code_indent}  VLOG(3) << "Perform View between Output and Input Tensor, share allocation and inplace version.";"""

172
        elif len(out_dtype_list) > 1:
Z
zyfncg 已提交
173
            output_create = f"""
174 175 176 177 178 179 180 181 182 183 184 185 186
{code_indent}  {return_type} api_output;"""

            if inplace_flag:
                output_create = f"""
{code_indent}  {return_type} api_output{{"""

                for out_name in self.outputs['names']:
                    if out_name in self.inplace_map:
                        output_create = output_create + self.inplace_map[
                            out_name] + ', '
                    else:
                        output_create += 'Tensor(), '
                output_create = output_create[:-2] + '};'
Z
zyfncg 已提交
187

188
            for i in range(len(out_dtype_list)):
189
                kernel_output.append(f'kernel_out_{i}')
190
                output_names.append(f'kernel_out_{i}')
191 192 193 194 195 196 197 198
                set_out_func = 'SetKernelOutput' if out_tensor_type_list is None or out_tensor_type_list[
                    i] == 'dense' else 'SetSelectedRowsKernelOutput'

                get_out_code = f"&std::get<{i}>(api_output)"
                if self.outputs['names'][
                        i] in self.inplace_map and self.inplace_map[
                            self.outputs['names'][i]] in self.optional_vars:
                    get_out_code = f"std::get<{i}>(api_output).get_ptr()"
199

200
                if out_dtype_list[i] == 'std::vector<Tensor>':
201
                    assert self.outputs['out_size_expr'][i] is not None, \
202
                        f"{self.api}: The out size expr : '{{expr}}' should be set when output has Tensor[]. You can refer 'split' api."
203
                    output_create = output_create + f"""
204
{code_indent}  auto kernel_out_{i} = {set_out_func}({self.outputs['out_size_expr'][i]}, kernel_backend, {get_out_code});"""
205 206 207

                else:
                    output_create = output_create + f"""
208
{code_indent}  auto kernel_out_{i} = {set_out_func}(kernel_backend, {get_out_code});"""
Z
zyfncg 已提交
209

210 211 212 213 214 215 216
                if not inplace_flag and self.view_map is not None and self.outputs[
                        'names'][i] in self.view_map:
                    output_create = output_create + f"""
{code_indent}  kernel_out_{i}->ShareBufferWith(*{PREFIX_TENSOR_NAME}{self.view_map[self.outputs['names'][i]]});
{code_indent}  kernel_out_{i}->ShareInplaceVersionCounterWith(*{PREFIX_TENSOR_NAME}{self.view_map[self.outputs['names'][i]]});
{code_indent}  VLOG(3) << "Perform View between Output and Input Tensor, share allocation and inplace version.";"""

Z
zyfncg 已提交
217 218 219 220 221
        else:
            raise ValueError(
                "{} : Output error: the output should not be empty.".format(
                    self.api))

222
        return kernel_output, output_names, output_create
Z
zyfncg 已提交
223

224 225 226

def header_include():
    return """
227 228
#include <tuple>

229 230
#include "paddle/phi/api/include/tensor.h"
#include "paddle/phi/common/scalar.h"
231
#include "paddle/phi/common/int_array.h"
232
#include "paddle/utils/optional.h"
233 234 235 236 237 238 239 240 241 242
"""


def source_include(header_file_path):
    return f"""
#include "{header_file_path}"
#include <memory>

#include "glog/logging.h"

243
#include "paddle/phi/api/lib/api_custom_impl.h"
244
#include "paddle/phi/api/lib/api_gen_utils.h"
245 246 247 248 249 250 251
#include "paddle/phi/api/lib/data_transform.h"
#include "paddle/phi/api/lib/kernel_dispatch.h"
#include "paddle/phi/core/kernel_registry.h"
#include "paddle/phi/infermeta/binary.h"
#include "paddle/phi/infermeta/multiary.h"
#include "paddle/phi/infermeta/nullary.h"
#include "paddle/phi/infermeta/unary.h"
H
hong 已提交
252
#include "paddle/phi/infermeta/ternary.h"
253 254

#include "paddle/fluid/platform/profiler/event_tracing.h"
Z
zyfncg 已提交
255 256

DECLARE_bool(conv2d_disable_cudnn);
257 258 259 260 261 262 263 264 265 266 267 268 269 270 271
"""


def api_namespace():
    return ("""
namespace paddle {
namespace experimental {

""", """

}  // namespace experimental
}  // namespace paddle
""")


272
def generate_api(api_yaml_path, header_file_path, source_file_path):
273 274 275 276 277 278 279
    apis = []

    for each_api_yaml in api_yaml_path:
        with open(each_api_yaml, 'r') as f:
            api_list = yaml.load(f, Loader=yaml.FullLoader)
            if api_list:
                apis.extend(api_list)
280 281 282 283 284 285 286 287 288 289

    header_file = open(header_file_path, 'w')
    source_file = open(source_file_path, 'w')

    namespace = api_namespace()

    header_file.write("#pragma once\n")
    header_file.write(header_include())
    header_file.write(namespace[0])

290
    include_header_file = "paddle/phi/api/include/api.h"
291 292 293 294
    source_file.write(source_include(include_header_file))
    source_file.write(namespace[0])

    for api in apis:
295 296
        foward_api = ForwardAPI(api)
        if foward_api.is_dygraph_api:
297
            foward_api.is_dygraph_api = False
298 299 300

        header_file.write(foward_api.gene_api_declaration())
        source_file.write(foward_api.gene_api_code())
301 302 303

    header_file.write(namespace[1])
    source_file.write(namespace[1])
304

305 306 307 308 309 310 311
    header_file.close()
    source_file.close()


def main():
    parser = argparse.ArgumentParser(
        description='Generate PaddlePaddle C++ API files')
312 313 314
    parser.add_argument('--api_yaml_path',
                        help='path to api yaml file',
                        nargs='+',
315
                        default='paddle/phi/api/yaml/api.yaml')
316 317 318 319 320 321 322 323

    parser.add_argument('--api_header_path',
                        help='output of generated api header code file',
                        default='paddle/phi/api/include/api.h')

    parser.add_argument('--api_source_path',
                        help='output of generated api source code file',
                        default='paddle/phi/api/lib/api.cc')
324 325 326 327 328 329 330

    options = parser.parse_args()

    api_yaml_path = options.api_yaml_path
    header_file_path = options.api_header_path
    source_file_path = options.api_source_path

331
    generate_api(api_yaml_path, header_file_path, source_file_path)
332 333 334 335


if __name__ == '__main__':
    main()