utils.py 47.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
import ast
16
import astor
17 18
import atexit
import copy
19
from paddle.utils import gast
20
import inspect
21
import importlib.util
22 23
import os
import sys
24
import shutil
25
import tempfile
26
import textwrap
27
import numpy as np
28

29
import paddle
30
from paddle.fluid import unique_name
31
from paddle.fluid.data_feeder import convert_dtype
32
from paddle.fluid import core
33 34
from paddle.fluid.layer_helper import LayerHelper
from paddle.fluid.layers import assign
35
from functools import reduce
36
from importlib.machinery import SourceFileLoader
37
import warnings
38

39 40 41

__all__ = []

42 43 44 45
# Note(Aurelius): Do not forget the dot `.` to distinguish other
# module such as paddlenlp.
PADDLE_MODULE_PREFIX = 'paddle.'
DYGRAPH_MODULE_PREFIX = 'paddle.fluid.dygraph'
46
DYGRAPH_TO_STATIC_MODULE_PREFIX = 'paddle.jit.dy2static'
47 48
GET_ARGS_FUNC_PREFIX = 'get_args'
SET_ARGS_FUNC_PREFIX = 'set_args'
49
ALREADY_D2S = '__already_d2s'
50
ARGS_NAME = '__args'
51 52
# NOTE(liym27): Please use `getattr(ast_node, ORIGI_INFO)` instead of . operation to get the original information of ast node.
ORIGI_INFO = "Original information of source code for ast node."
53

54 55 56

class BaseNodeVisitor(gast.NodeVisitor):
    """
57
    Implement customized NodeVisitor inherited from gast.NodeVisitor.
58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75
    Ancestor nodes are traced to easily support more operations of currently
    visited node.
    """

    def __init__(self):
        self.ancestor_nodes = []

    def visit(self, node):
        """Visit a node."""
        self.ancestor_nodes.append(node)

        method = 'visit_' + node.__class__.__name__
        visitor = getattr(self, method, self.generic_visit)
        ret = visitor(node)
        self.ancestor_nodes.pop()
        return ret


76 77 78 79 80 81 82 83 84 85
dygraph_class_to_static_api = {
    "CosineDecay": "cosine_decay",
    "ExponentialDecay": "exponential_decay",
    "InverseTimeDecay": "inverse_time_decay",
    "NaturalExpDecay": "natural_exp_decay",
    "NoamDecay": "noam_decay",
    "PiecewiseDecay": "piecewise_decay",
    "PolynomialDecay": "polynomial_decay",
}

86
DEL_TEMP_DIR = True  # A flag to avoid atexit.register more than once
87 88
FOR_ITER_INDEX_PREFIX = '__for_loop_var_index'
FOR_ITER_TUPLE_PREFIX = '__for_loop_iter_tuple'
89 90
FOR_ITER_TARGET_PREFIX = '__for_loop_iter_target'
FOR_ITER_ITERATOR_PREFIX = '__for_loop_iter_iterator'
91 92 93 94 95
FOR_ITER_TUPLE_INDEX_PREFIX = '__for_loop_iter_tuple_index'
FOR_ITER_VAR_LEN_PREFIX = '__for_loop_var_len'
FOR_ITER_VAR_NAME_PREFIX = '__for_loop_iter_var'
FOR_ITER_ZIP_TO_LIST_PREFIX = '__for_loop_iter_zip'

96
RE_PYNAME = '[a-zA-Z0-9_]+'
97
RE_PYMODULE = r'[a-zA-Z0-9_]+\.'
98

99

100 101 102 103 104 105 106
def data_layer_not_check(name, shape, dtype='float32', lod_level=0):
    """
    This function creates a Tensor on the global block. The created Tensor
    doesn't check the dtype and the shape of feed data because dygraph input
    data can be various-length. This API is used in translating dygraph into
    static graph.

107
     Note:
108 109 110 111 112 113 114 115 116 117
        The default :code:`stop_gradient` attribute of the Tensor created by
        this API is true, which means the gradient won't be passed backward
        through the data Tensor. Set :code:`var.stop_gradient = False` If
        user would like to pass backward gradient.

    Args:
       name (str): The name/alias of the Tensor, see :ref:`api_guide_Name`
           for more details.
       shape (list|tuple): List|Tuple of integers declaring the shape. You can
           set "None" at a dimension to indicate the dimension can be of any
118
           size. For example, it is useful to set changeable batch size as "None"
119 120 121 122 123 124 125 126 127 128 129 130
       dtype (np.dtype|VarType|str, optional): The type of the data. Supported
           dtype: bool, float16, float32, float64, int8, int16, int32, int64,
           uint8. Default: float32
       lod_level (int, optional): The LoD level of the LoDTensor. Usually users
           don't have to set this value. For more details about when and how to
           use LoD level, see :ref:`user_guide_lod_tensor` . Default: 0

    Returns:
        Tensor: The global Tensor that gives access to the data.
    """
    helper = LayerHelper('data', **locals())
    shape = list(shape)
131
    for i in range(len(shape)):
132 133 134
        if shape[i] is None:
            shape[i] = -1

135 136 137 138 139 140 141 142 143 144
    return helper.create_global_variable(
        name=name,
        shape=shape,
        dtype=dtype,
        type=core.VarDesc.VarType.LOD_TENSOR,
        stop_gradient=True,
        lod_level=lod_level,
        is_data=True,
        need_check_feed=False,
    )
145

146

147
def create_undefined_variable():
148
    from paddle.jit.dy2static.return_transformer import (
149 150 151 152 153 154
        RETURN_NO_VALUE_MAGIC_NUM,
    )

    var = data_layer_not_check(
        unique_name.generate("undefined_var"), [1], "float64"
    )
155
    var.stop_gradient = False
156 157 158 159
    # the variable is created in block(0), we append assign in block(0) either.
    helper = LayerHelper('create_undefined_variable', **locals())
    saved_block_ids = helper.main_program.current_block_idx
    helper.main_program.current_block_idx = 0
160
    assign(RETURN_NO_VALUE_MAGIC_NUM, var)
161
    helper.main_program.current_block_idx = saved_block_ids
162
    return var
163 164


165 166 167 168 169 170
class UndefinedVar:
    def __init__(self, name):
        self.name = name

    def check(self):
        raise UnboundLocalError(
171 172
            "local variable '{}' should be created before using it."
        )
173 174


175 176 177 178 179
class Dygraph2StaticException(Exception):
    def __init__(self, message):
        super().__init__(message)


180 181 182 183 184 185 186
def saw(x):
    if isinstance(x, UndefinedVar):
        return x.check()
    else:
        return x


187 188 189 190
def parse_arg_and_kwargs(function):
    """
    Returns full argument names as list. e.g ['x', 'y', 'z']
    """
191
    fullargspec = inspect.getfullargspec(function)
192 193 194 195 196 197 198 199 200
    arg_names = fullargspec.args
    if arg_names and 'self' == arg_names[0]:
        arg_names = fullargspec.args[1:]

    # parse default kwargs
    default_kwargs = {}
    default_values = fullargspec.defaults
    if default_values:
        assert len(default_values) <= len(arg_names)
201
        default_kwarg_names = arg_names[-len(default_values) :]
202 203 204 205 206
        default_kwargs = dict(zip(default_kwarg_names, default_values))

    return arg_names, default_kwargs


W
WeiXin 已提交
207 208 209 210
def parse_varargs_name(function):
    """
    Returns varargs name string of function. e.g: 'input' from `foo(x, *input)`
    """
211
    fullargspec = inspect.getfullargspec(function)
W
WeiXin 已提交
212 213 214 215
    varargs = fullargspec.varargs
    return varargs


216 217 218 219 220 221 222 223
def type_name(v):
    return type(v).__name__


def make_hashable(x, error_msg=None):
    """
    Makes input `x` hashable.

224
    For some unhashable objects, such as `dict/list/set/np.ndarray`,applying hash function by using their values.
225
    """
226
    if isinstance(x, (tuple, list, set)):
227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243
        return tuple(map(make_hashable, x))

    try:
        hash(x)
    except TypeError:
        if isinstance(x, np.ndarray):
            # Note: `tostring()` will return the binary data from np.ndarray that
            # means different value will lead to different hash code.
            return hash(x.tostring())
        elif isinstance(x, dict):
            return tuple(map(make_hashable, x.values()))

        error_msg = error_msg or "Requires a hashable object."
        raise ValueError(error_msg + " But received type: %s" % type_name(x))

    return x

244

245 246 247 248 249 250 251
def _is_api_in_module_helper(obj, module_prefix):
    m = inspect.getmodule(obj)
    return m is not None and m.__name__.startswith(module_prefix)


def is_api_in_module(node, module_prefix):
    assert isinstance(node, gast.Call), "Input non-Call node for is_dygraph_api"
252 253 254 255 256 257 258 259

    # Python can have gast.Call as function, for example: covert_call(func)(x)
    # We only check the most outside function
    func_node = node.func
    while isinstance(func_node, gast.Call):
        func_node = func_node.func

    func_str = astor.to_source(gast.gast_to_ast(func_node)).strip()
260
    try:
261 262 263 264 265 266 267
        import paddle  # noqa: F401
        import paddle.fluid as fluid  # noqa: F401
        import paddle.fluid.dygraph as dygraph  # noqa: F401
        import paddle.fluid.layers as layers  # noqa: F401
        import paddle.jit.dy2static as _jst  # noqa: F401
        from paddle.fluid.dygraph import to_variable  # noqa: F401
        from paddle import to_tensor  # noqa: F401
268

269 270 271
        return eval(
            "_is_api_in_module_helper({}, '{}')".format(func_str, module_prefix)
        )
272
    except Exception:
273 274 275 276
        return False


def is_dygraph_api(node):
277

278
    # Note: A api in module dygraph_to_static is not a real dygraph api.
279
    if is_api_in_module(node, DYGRAPH_TO_STATIC_MODULE_PREFIX):
280 281
        return False

282 283
    # TODO(liym27): A better way to determine whether it is a dygraph api.
    #  Consider the decorator @dygraph_only
284
    return is_api_in_module(node, DYGRAPH_MODULE_PREFIX)
285 286 287


def is_paddle_api(node):
288 289 290 291 292 293
    return is_api_in_module(node, PADDLE_MODULE_PREFIX)


def is_paddle_func(func):
    m = inspect.getmodule(func)
    return m is not None and m.__name__.startswith(PADDLE_MODULE_PREFIX)
294 295 296 297 298 299 300


# Is numpy_api cannot reuse is_api_in_module because of numpy module problem
def is_numpy_api(node):
    assert isinstance(node, gast.Call), "Input non-Call node for is_numpy_api"
    func_str = astor.to_source(gast.gast_to_ast(node.func))
    try:
301
        import numpy as np  # noqa: F401
302 303 304 305

        module_result = eval(
            "_is_api_in_module_helper({}, '{}')".format(func_str, "numpy")
        )
306 307
        # BUG: np.random.uniform doesn't have module and cannot be analyzed
        # TODO: find a better way
308 309 310
        return module_result or (
            func_str.startswith("numpy.") or func_str.startswith("np.")
        )
311
    except Exception:
312 313 314
        return False


315 316
def _delete_keywords_from(node):
    assert isinstance(node, gast.Call)
317
    func_src = astor.to_source(gast.gast_to_ast(node.func))
318
    import paddle.fluid as fluid  # noqa: F401
319

320
    full_args = eval(f"inspect.getfullargspec({func_src})")
321 322 323 324 325 326 327 328 329 330
    full_args_name = full_args[0]

    node.keywords = [k for k in node.keywords if k.arg in full_args_name]
    return


def to_static_api(dygraph_class):
    if dygraph_class in dygraph_class_to_static_api:
        return dygraph_class_to_static_api[dygraph_class]
    else:
331 332
        raise NotImplementedError(
            "Paddle dygraph API {} cannot be converted "
333 334
            "to static graph at present.".format(dygraph_class)
        )
335 336 337 338 339 340 341 342 343 344


def _add_keywords_to(node, dygraph_api_name):
    assert isinstance(node, gast.Call)
    if dygraph_api_name == "Linear":
        for ast_keyword in node.keywords:
            if ast_keyword.arg == "output_dim":
                ast_keyword.arg = "size"

        node.keywords.append(
345 346 347 348
            gast.keyword(
                arg="num_flatten_dims", value=gast.Constant(value=-1, kind=None)
            )
        )
349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366

    if dygraph_api_name == "BilinearTensorProduct":
        for ast_keyword in node.keywords:
            if ast_keyword.arg == "output_dim":
                ast_keyword.arg = "size"

    if dygraph_api_name == "PRelu":
        for ast_keyword in node.keywords:
            if ast_keyword.arg == "input":
                ast_keyword.arg = "x"
    return


def to_static_ast(node, class_node):
    assert isinstance(node, gast.Call)
    assert isinstance(class_node, gast.Call)
    static_api = to_static_api(class_node.func.attr)

367 368 369 370 371 372 373 374 375 376 377
    node.func = gast.Attribute(
        attr=static_api,
        ctx=gast.Load(),
        value=gast.Attribute(
            attr='layers',
            ctx=gast.Load(),
            value=gast.Name(
                ctx=gast.Load(), id='fluid', annotation=None, type_comment=None
            ),
        ),
    )
378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397

    update_args_of_func(node, class_node, 'forward')

    node.args.extend(class_node.args)
    node.keywords.extend(class_node.keywords)
    _add_keywords_to(node, class_node.func.attr)
    _delete_keywords_from(node)

    gast.fix_missing_locations(node)

    return node


def update_args_of_func(node, dygraph_node, method_name):
    assert isinstance(node, gast.Call)
    if method_name not in ["__init__", "forward"]:
        raise ValueError(
            "The method name of class to update args should be '__init__' or 'forward'"
        )

398
    class_src = astor.to_source(gast.gast_to_ast(dygraph_node.func))
399
    import paddle.fluid as fluid  # noqa: F401
400

401
    if method_name == "__init__" or eval(
402 403
        "issubclass({}, fluid.dygraph.Layer)".format(class_src)
    ):
404
        full_args = eval(f"inspect.getfullargspec({class_src}.{method_name})")
405 406 407 408 409 410 411 412 413 414 415
        full_args_name = [
            arg_name for arg_name in full_args[0] if arg_name != "self"
        ]
    else:
        full_args_name = []
    added_keywords = []
    for idx, arg in enumerate(node.args):
        added_keywords.append(gast.keyword(arg=full_args_name[idx], value=arg))

    node.args = []
    node.keywords = added_keywords + node.keywords
416 417 418


def create_api_shape_node(tensor_shape_node):
419 420 421
    assert isinstance(
        tensor_shape_node, (gast.Name, gast.Attribute, gast.Subscript)
    )
422 423 424

    if isinstance(tensor_shape_node, gast.Name):
        api_shape_node = gast.Call(
425
            func=gast.parse('paddle.shape').body[0].value,
426
            args=[tensor_shape_node],
427 428
            keywords=[],
        )
429
        return api_shape_node
430 431 432

    if isinstance(tensor_shape_node, gast.Attribute):
        api_shape_node = gast.Call(
433
            func=gast.parse('paddle.shape').body[0].value,
434
            args=[tensor_shape_node.value],
435 436
            keywords=[],
        )
437 438 439 440 441 442
        return api_shape_node

    if isinstance(tensor_shape_node, gast.Subscript):
        result_node = copy.deepcopy(tensor_shape_node)
        result_node.value = create_api_shape_node(result_node.value)
        return result_node
443 444


445
def get_constant_variable_node(name, value, shape=[1], dtype='int64'):
446 447 448
    return gast.parse(
        '%s = paddle.full(%s, "%s", %s)' % (name, str(shape), str(value), dtype)
    )
449 450 451 452


def get_attribute_full_name(node):
    assert isinstance(
453 454
        node, gast.Attribute
    ), "Input non-Attribute node to get attribute full name"
455 456 457
    return astor.to_source(gast.gast_to_ast(node)).strip()


458
def generate_name_node(name_ids, ctx=gast.Load(), gen_tuple_if_single=False):
459
    """
460 461 462 463 464 465 466
    If name_ids is list or tuple or set with multiple strings, this function
    generates gast.Tuple of gast.Name.
    If the name_ids is single string or contains only 1 string, this function
    returns gast.Name if gen_tuple_if_single==False else returns gast.Tuple
    with only one gast.Name

    This function is used at several gast.Return statements.
467
    """
468
    if isinstance(name_ids, str):
469 470
        name_ids = [name_ids]
    if not isinstance(name_ids, (list, tuple, set)):
471
        raise TypeError(
472 473 474
            'name_ids must be list or tuple or set, but received %s'
            % type(type(name_ids))
        )
475 476 477

    def create_node_for_name(name):
        if '.' not in name:
478 479 480
            return gast.Name(
                id=name, ctx=ctx, annotation=None, type_comment=None
            )
481 482 483
        return gast.parse(name).body[0].value

    gast_names = [create_node_for_name(name_id) for name_id in name_ids]
484
    if len(gast_names) == 1 and not gen_tuple_if_single:
485 486 487 488 489 490 491 492 493 494 495 496 497
        name_node = gast_names[0]
    else:
        name_node = gast.Tuple(elts=gast_names, ctx=ctx)
    return name_node


def create_funcDef_node(nodes, name, input_args, return_name_ids):
    """
    Wrapper all statements of nodes into one ast.FunctionDef, which can be
    called by ast.Call.
    """
    nodes = copy.copy(nodes)
    # add return statement
498 499
    if return_name_ids:
        nodes.append(gast.Return(value=generate_name_node(return_name_ids)))
500 501
    else:
        nodes.append(gast.Return(value=None))
502 503 504 505 506 507 508 509
    func_def_node = gast.FunctionDef(
        name=name,
        args=input_args,
        body=nodes,
        decorator_list=[],
        returns=None,
        type_comment=None,
    )
510 511 512
    return func_def_node


513 514 515 516 517 518 519 520
def index_in_list(array_list, item):
    try:
        return array_list.index(item)
    except ValueError:
        # Item not in array_list
        return -1


521 522 523 524 525 526 527 528 529
def create_assign_node(name, node):
    """
    Creates a `gast.Assign` node by given name_id as target and node as value.
    """
    targets = generate_name_node(name, ctx=gast.Store())
    assign_node = gast.Assign(targets=[targets], value=node)
    return targets, assign_node


530 531 532 533
def get_temp_dir():
    """
    Return @to_static temp directory.
    """
A
Aurelius84 已提交
534
    dir_name = "paddle/to_static_tmp/{pid}".format(pid=os.getpid())
535 536 537 538 539 540 541 542 543 544 545
    temp_dir = os.path.join(os.path.expanduser('~/.cache'), dir_name)
    is_windows = sys.platform.startswith('win')
    if is_windows:
        temp_dir = os.path.normpath(temp_dir)

    if not os.path.exists(temp_dir):
        os.makedirs(temp_dir)

    return temp_dir


546
def ast_to_func(ast_root, dyfunc, delete_on_exit=True):
547 548
    """
    Transform modified AST of decorated function into python callable object.
549 550
    TODO: If only decorate one of inner function instead of decorating the main
    function, the other inner functions are invisible for the decorated function.
551
    """
552

553 554 555 556 557 558 559 560 561 562 563 564
    def remove_if_exit(dir_path):
        if os.path.exists(dir_path):
            shutil.rmtree(dir_path)

    def func_prefix(func):
        pre_fix = func.__name__
        if hasattr(func, '__self__'):
            try:
                pre_fix = func.__self__.__class__.__name__ + '_' + func.__name__
            except:
                pass
        return pre_fix
565

566
    source = ast_to_source_code(ast_root)
567
    source = _inject_import_statements() + source
568
    temp_dir = get_temp_dir()
569 570 571 572 573 574 575 576
    f = tempfile.NamedTemporaryFile(
        mode='w',
        prefix=func_prefix(dyfunc),
        suffix='.py',
        delete=False,
        dir=temp_dir,
        encoding='utf-8',
    )
577 578 579 580
    with f:
        module_name = os.path.basename(f.name[:-3])
        f.write(source)

581 582 583 584 585
    global DEL_TEMP_DIR
    if delete_on_exit and DEL_TEMP_DIR:
        # Clear temporary files in TEMP_DIR while exitting Python process
        atexit.register(remove_if_exit, dir_path=temp_dir)
        DEL_TEMP_DIR = False
586

587
    func_name = dyfunc.__name__
588 589 590 591
    loader = SourceFileLoader(module_name, f.name)
    spec = importlib.util.spec_from_loader(loader.name, loader)
    module = importlib.util.module_from_spec(spec)
    loader.exec_module(module)
W
WeiXin 已提交
592 593 594 595 596 597 598 599
    # The 'forward' or 'another_forward' of 'TranslatedLayer' cannot be obtained
    # through 'func_name'. So set the special function name '__i_m_p_l__'.
    if hasattr(module, '__i_m_p_l__'):
        callable_func = getattr(module, '__i_m_p_l__')
        callable_func.__name__ = func_name
    elif hasattr(module, func_name):
        callable_func = getattr(module, func_name)
    else:
600
        raise ValueError(
601 602 603
            'Function: %s doesn\'t exist in the Module transformed from AST.'
            % func_name
        )
604 605 606 607 608 609 610 611
    # After transform dygraph function into callable_func saved in tmp file,
    # it lost the global variables from imported statements or defined in source file.
    # Recovers the necessary variables by `__globals__`.
    recover_globals_attribute(dyfunc, callable_func)

    return callable_func, f.name


612 613
def _inject_import_statements():
    import_statements = [
614 615 616 617 618 619 620 621
        "import paddle",
        "from paddle import Tensor",
        "import paddle.fluid as fluid",
        "import paddle.jit.dy2static as _jst",
        "from typing import *",
        "import numpy as np",
        "import warnings",
        "warnings.filterwarnings('ignore', category=DeprecationWarning)",
622 623 624 625
    ]
    return '\n'.join(import_statements) + '\n'


626 627 628 629 630
def recover_globals_attribute(src_obj, dst_obj):
    attr_name = '__globals__'

    src_globals = getattr(src_obj, attr_name, {})
    dst_globals = getattr(dst_obj, attr_name, {})
631

632
    for k, v in src_globals.items():
633 634 635
        # ignore builtin attribute.
        if not (k.startswith('__') and k.endswith('__')):
            dst_globals[k] = v
636 637


638 639 640 641 642 643
def func_to_source_code(function, dedent=True):
    """
    Transforms function into raw string of source code.
    """
    if not (inspect.isfunction(function) or inspect.ismethod(function)):
        raise TypeError(
644 645 646 647
            "The type of 'function' should be a function or method, but received {}.".format(
                type(function).__name__
            )
        )
648
    source_code_list, _ = inspect.getsourcelines(function)
649
    # Replace comments with blank lines so that error messages are not misplaced
650
    source_code_list = [
651 652
        line if not line.lstrip().startswith('#') else '\n'
        for line in source_code_list
653 654
    ]
    source_code = ''.join(source_code_list)
655 656 657 658 659 660
    if dedent:
        source_code = textwrap.dedent(source_code)

    return source_code


661 662
def ast_to_source_code(ast_node):
    """
663
    Transforms ast node into source code.
664 665 666
    """
    if not isinstance(ast_node, (gast.AST, ast.AST)):
        raise TypeError(
667 668 669
            "Type of ast_root should be gast.AST or ast.AST, but received %s."
            % type(ast_node)
        )
670 671
    if isinstance(ast_node, gast.AST):
        ast_node = gast.gast_to_ast(ast_node)
672 673 674 675 676 677

    # Do not wrap lines even if they are too long
    def pretty_source(source):
        return ''.join(source)

    source_code = astor.to_source(ast_node, pretty_source=pretty_source)
678
    return source_code
L
liym27 已提交
679 680 681 682 683 684


def is_candidate_node(node):
    """
    Nodes with specified type will be dependent on tensor.
    """
685 686 687 688 689 690 691 692 693 694 695
    is_compare_node = isinstance(
        node,
        (
            gast.Compare,
            gast.BoolOp,
            gast.UnaryOp,
            gast.For,
            gast.If,
            gast.While,
        ),
    )
L
liym27 已提交
696 697 698 699 700 701 702 703 704 705 706 707 708 709 710
    # TODO(Aurelius84): `.numpy()` may be an customized function,
    # and should consider a more elegant way to solve this problem.
    has_numpy_attr = ".numpy()" in ast_to_source_code(node)
    return is_compare_node or has_numpy_attr


def compare_with_none(node):
    """
    Whether the comparator of `gast.Compare` node is `None`.
    """
    if isinstance(node, gast.Compare):
        for child in [node.left, node.comparators]:
            # node.comparators is a list.
            if isinstance(child, list):
                child = child[0]
711 712 713
            if (isinstance(child, gast.Constant) and child.value is None) or (
                isinstance(child, gast.Name) and child.id == 'None'
            ):
L
liym27 已提交
714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730
                return True
    return False


class IsControlFlowVisitor(gast.NodeVisitor):
    """
    Judge whether the ast_node of control flow from Dygraph code dependent on paddle Tensor.
    `ast_node` can be gast.If, gast.For, gast.While, gast.If.test(gast.Compare, gast.BoolOp, gast.UnaryOp).

    If returns True,
    gast.If.test must meet at least one of the following requirements:
        1. involves at least one var whose type is Tensor.
        2. the Tensor var calls `.numpy()[]` interface or Tensor.shape is [1].
        3. involves Tensor.shape[i] and the shape[i] is unknown in compile time.
    gast.While must meet at least one of the requirements 1 to 5:
        4. has `break` statement.
        5. has `continue` statement.
731
    gast.For must meet at least one of the requirements 4 to 8:
L
liym27 已提交
732
        6. calls `range` function in `for` statement and the argument of range is Tensor.
733 734
        7. calls `enumerate` function in `for` statement and the argument of enumerate is Tensor.
        8. the iterable varaible in `for` statement is Tensor.
L
liym27 已提交
735 736 737 738 739 740 741 742 743 744 745 746 747
        TODO: Support non-range case

    The following examples should not be considered as control_flow_if:
        1. `if Tensor_var` or `if Tensor_var is None`
        2. if Tensor.shape[i] is determined with fixed value (not -1 or None)

    Note: pred in ConditionalBlock require variable, which means all vars should be Tensor
          or transformed into Tensor, like fill_constant(shape=[1], dtype='int32', value=Tensor.shape[i]).

    TODO: 1. need to deal with `tensor.shape[i]` which need to eval the data of shape[i],
             because reshape_op may be called before this statement.
    """

748 749 750
    def __init__(
        self, ast_node, static_analysis_visitor=None, node_var_type_map=None
    ):
L
liym27 已提交
751 752 753
        assert isinstance(
            ast_node, gast.AST
        ), "Type of input node should be gast.AST, but received %s." % type(
754 755
            ast_node
        )
L
liym27 已提交
756 757 758
        self.ast_root = ast_node
        if static_analysis_visitor is None:
            from .static_analysis import StaticAnalysisVisitor
759

L
liym27 已提交
760 761
            static_analysis_visitor = StaticAnalysisVisitor(ast_node)
        self.static_analysis_visitor = static_analysis_visitor
762 763
        self.node_to_wrapper_map = (
            self.static_analysis_visitor.get_node_to_wrapper_map()
L
liym27 已提交
764 765 766 767 768 769 770 771
        )
        self.node_var_type_map = node_var_type_map

        self.is_control_flow_num = 0
        self._compare_node_tenor_set = set()

    def transform(self):
        node = self.ast_root
772 773 774 775 776 777 778 779
        if isinstance(node, gast.If):
            self._visit_If(node)
        elif isinstance(node, gast.For):
            self._visit_For(node)
        elif isinstance(node, gast.While):
            self._visit_While(node)
        else:
            self.visit(node)
L
liym27 已提交
780 781 782 783 784 785 786 787 788
        return self.is_control_flow_num > 0

    def _visit_If(self, node):
        assert isinstance(node, gast.If)
        self.visit(node.test)
        return

    def _visit_For(self, node):
        assert isinstance(node, gast.For)
789 790 791
        if isinstance(node.iter, gast.Call):
            # for in range(var[0]|var.numpy()[0]) or for in enumerate(var|var.numpy())
            if isinstance(node.iter.func, gast.Name):
792 793 794 795
                if (
                    node.iter.func.id == "range"
                    or node.iter.func.id == "enumerate"
                ):
796 797 798 799 800 801 802 803 804 805
                    for arg in node.iter.args:
                        self.visit(arg)
                else:
                    return
            # for in var.numpy()
            elif isinstance(node.iter.func, gast.Attribute):
                if node.iter.func.attr == 'numpy':
                    self._visit_Call(node.iter)
                else:
                    return
806 807
            else:
                return
808 809 810
        elif isinstance(node.iter, gast.Name):
            # for in var
            self.visit(node.iter)
811
        else:
L
liym27 已提交
812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849
            return

        for child_node in gast.walk(node):
            if isinstance(child_node, (gast.Continue, gast.Break)):
                self._visit_break_continue(child_node)
        return

    def _visit_While(self, node):
        assert isinstance(node, gast.While)
        test = node.test
        self.generic_visit(test)
        for child_node in gast.walk(node):
            if isinstance(child_node, (gast.Continue, gast.Break)):
                self._visit_break_continue(child_node)
        return

    def _visit_break_continue(self, node):
        assert isinstance(node, (gast.Break, gast.Continue))
        wrapper_node = self.node_to_wrapper_map.get(node)
        if not wrapper_node:
            # Transformed node is not in node_to_wrapper_map
            return

        while wrapper_node.parent:
            parent_node = wrapper_node.parent.node
            if isinstance(parent_node, (gast.For, gast.While)):
                if parent_node is self.ast_root:
                    self.is_control_flow_num += 1
                    return
                else:
                    return

            wrapper_node = wrapper_node.parent

        return

    def visit_BoolOp(self, node):
        for i, child in enumerate(node.values):
850
            self.visit(child)
L
liym27 已提交
851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893
        return node

    def visit_Compare(self, node):
        pre_control_flow_num = self.is_control_flow_num
        if not compare_with_none(node):
            self.generic_visit(node)
            for child in gast.walk(node):
                if isinstance(child, gast.Subscript):
                    self._visit_Subscript(child)
        if self.is_control_flow_num > pre_control_flow_num:
            self._compare_node_tenor_set.add(node)
        return node

    def _visit_Subscript(self, node):
        self.generic_visit(node)
        if hasattr(node, 'value') and isinstance(node.value, gast.Call):
            self._visit_Call(node.value)
        return node

    def _visit_Call(self, node):
        assert isinstance(node, gast.Call)
        if isinstance(node.func, gast.Attribute):
            attr_node = node.func
            if attr_node.attr == 'numpy':
                self.is_control_flow_num += 1

    def visit_Call(self, node):
        self._visit_Call(node)
        if is_paddle_api(node):
            self.is_control_flow_num += 1
        return node

    def visit_Name(self, node):
        if self._is_node_with_tensor(node, node.id):
            self.is_control_flow_num += 1
        return node

    def visit_Constant(self, node):
        if self._is_node_with_tensor(node, node.value):
            self.is_control_flow_num += 1
        return node

    def _is_node_with_tensor(self, node, name_id):
894
        from paddle.jit.dy2static.static_analysis import (
895 896
            NodeVarType,
        )
L
liym27 已提交
897 898 899

        # Look up the node_var_type_map by name_id.
        if self.node_var_type_map:
900
            if name_id and isinstance(name_id, str):
L
liym27 已提交
901
                var_type = self.node_var_type_map.get(name_id, None)
902
                if var_type and var_type & NodeVarType.TENSOR_TYPES:
L
liym27 已提交
903 904
                    return True
        # if not found, look up the node_to_wrapper_map by node.
905
        wrapper_node = self.node_to_wrapper_map.get(node, None)
L
liym27 已提交
906
        if wrapper_node is not None:
907
            if wrapper_node.node_var_type & NodeVarType.TENSOR_TYPES:
L
liym27 已提交
908 909 910 911 912 913
                return True

        return False

    def get_compare_nodes_with_tensor(self):
        return self._compare_node_tenor_set
914 915


916 917 918 919 920 921 922 923 924 925
# NOTE: inspect.unwrap() exits in PY3 but not in PY2.
def unwrap(func):
    """
    Returns the object wrapped by decorators.
    """

    def _is_wrapped(f):
        return hasattr(f, '__wrapped__')

    unwrapped_f = func
926
    while _is_wrapped(unwrapped_f):
927 928 929
        unwrapped_f = unwrapped_f.__wrapped__

    return unwrapped_f
930 931


C
Chen Weihang 已提交
932
def input_specs_compatible(src_input_specs, desired_input_specs):
933 934 935 936
    """
    Returns True if the two input specs are compatible, otherwise False.

    args:
937 938 939 940
        src_input_spec (list or tuple[InputSpec et.al]): list/tuple of
            paddle.static.InputSpec or int/str et.al
        desired_input_specs (list or tuple[InputSpec et.al]): list/tuple of
            paddle.static.InputSpec or int/str et.al
941 942
    """
    len_specs = len(src_input_specs)
C
Chen Weihang 已提交
943 944
    if len_specs != len(desired_input_specs):
        # NOTE(chenweihang): if the input_spec of jit.save is a subset of
945
        # input_spec of to_static, also compatible
C
Chen Weihang 已提交
946 947 948 949
        for spec in src_input_specs:
            if spec not in desired_input_specs:
                return False
    else:
950 951 952
        for (src_spec, desired_spec) in zip(
            src_input_specs, desired_input_specs
        ):
953
            if isinstance(src_spec, paddle.static.InputSpec) or isinstance(
954 955
                desired_spec, paddle.static.InputSpec
            ):
956 957 958 959
                if not _compatible_tensor_spec(src_spec, desired_spec):
                    return False
            else:
                if not _compatible_non_tensor_spec(src_spec, desired_spec):
C
Chen Weihang 已提交
960 961
                    return False

962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988
    return True


def _compatible_tensor_spec(src_spec, desired_spec):
    """
    Check whether two tensor type spec is compatible.
    """
    for spec in [src_spec, desired_spec]:
        if not isinstance(spec, paddle.static.InputSpec):
            return False
    src_shape = src_spec.shape
    other_shape = desired_spec.shape
    len_shape = len(src_shape)
    if len_shape != len(other_shape):
        return False
    for j in range(len_shape):
        if src_shape[j] is None or src_shape[j] < 0:
            continue
        if other_shape[j] is None or other_shape[j] < 0:
            continue
        if src_shape[j] != other_shape[j]:
            return False

    src_dtype = convert_dtype(src_spec.dtype)
    other_dtype = convert_dtype(desired_spec.dtype)
    if src_dtype != other_dtype:
        return False
989 990

    return True
991

992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012

def _compatible_non_tensor_spec(src_spec, desired_spec):
    """
    Check whether two non-tensor type spec is compatible.
    """

    def hash_value(spec):
        try:
            hash_val = make_hashable(spec)
        except:
            hash_val = None
        return hash_val

    src_hash_val = hash_value(src_spec)
    desired_hash_val = hash_value(desired_spec)

    if src_hash_val != desired_hash_val:
        return False
    else:
        return True

1013

1014 1015
class NameScope:
    def __init__(self):
1016
        """
1017 1018
        A NameScope is a object which manager all the variable names.
        only FunctionDef and Controlflow node will have a namescope property.
1019

1020
        type can be "function" and "controlflow"
1021

1022
        we don't analyze the read only variable because they don't affect the analysis.
1023 1024 1025 1026 1027 1028
        """
        self.globals = set()
        self.nonlocals = set()
        self.args = set()
        self.father = None  # point to the nearest function name scope.
        self.w_vars = set()  # all qualified + normal names been stored
1029
        self.created = set()  # useful for control flow compatibility
1030
        # only valid in control_flow nodes
1031 1032
        # may be remove later.
        self.push_pop_vars = set()  # we call push and pop in the vars
1033 1034 1035 1036 1037

    def set_father(self, father):
        self.father = father

    def existed_vars(self):
1038 1039
        """vars existing in current scope.
        they must not contain qualified names.
1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050
        """
        local_vars = self.w_vars - self.globals - self.nonlocals - self.args
        return set(filter(lambda x: '.' not in x, local_vars))

    def created_vars(self):
        return self.created

    def modified_vars(self):
        # may be globals / non-locals / args / qualified names and created_vars
        return self.w_vars

1051
    def variadic_length_vars(self):
1052
        """
1053
        At present, we do not support global append, such as
1054

1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067
        import numpy as np
        a = []
        def func():
            a.append() # global names `a`, we will raise a warning.
            p.append(a, 1) # global names `np`, we will raise a warning.
        """
        non_global_push_pop_names = []
        for var in self.push_pop_vars:
            if self._is_simple_name(var) and self.is_global_var(var):
                warnings.warn(
                    f"Find variable `{var}` defined in global scope"
                    f" and call `{var}.append() or {var}.pop()`"
                    f", which will be ignored and never be transfered into"
1068 1069
                    f" tensor array."
                )
1070 1071 1072
            else:
                non_global_push_pop_names.append(var)
        return set(non_global_push_pop_names)
1073

1074 1075
    def control_flow_vars(self):
        valid_names = self.w_vars
1076
        tmp = (self.father.global_vars & valid_names,)
1077 1078
        return {"global": tmp, "nonlocal": self.w_vars - tmp}

1079
    def _is_simple_name(self, name):
1080 1081
        if '.' in name or '[' in name:
            return False
1082 1083 1084
        return True

    def is_global_var(self, name):
1085
        """
1086
        Return whether the name is a var created in global scope.
1087
        Search from bottom to top. If it is not created or modified,
1088 1089 1090 1091
        it means global vars; otherwise, it means local vars.
        Only valid after FunctionNameLivenessAnalysis visitor.
        """
        assert self._is_simple_name(
1092 1093
            name
        ), "is_global_var accept a simple name, but get `{name}`."
1094 1095
        ancestor = self
        while ancestor is not None:
1096 1097 1098 1099
            if name in ancestor.globals:
                return True
            if name in (ancestor.nonlocals | ancestor.w_vars):
                return False
1100 1101 1102 1103 1104
            ancestor = ancestor.father
        return True

    def is_local_var(self, name):
        return not self.is_global_var(name)
1105 1106 1107 1108 1109 1110

    def merge_from(self, name_scope):
        self.globals |= name_scope.globals
        self.nonlocals |= name_scope.nonlocals
        self.args |= name_scope.args
        self.w_vars |= name_scope.w_vars
1111
        self.push_pop_vars |= name_scope.push_pop_vars
1112 1113 1114


class FunctionNameLivenessAnalysis(gast.NodeVisitor):
1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154
    """analyze the liveness of a function.

    every variables stored in this scope will be collected,
    in addition with global/nonlocal information and
    push_pop information.

    1. global variable is stored in node.var_globals.
    2. nonlocal variable is stored in node.var_nonlocals.
    3. arguments is stored in node.var_args.
    4. if a variable's push and pop attribute is called,
       it will be collected in push_pop_vars. They are
       used for transformation to tensor_array.
       NOTE: push_pop_vars **may not** in w_vars.
       a.push(0) don't modify the variable a, but the content
       of a.

    For example:

    def func(*args, **kargs):
        a = 12
        global i,j
        nonlocal x,y
        print(a)
        i = k
        b = []
        c = [1,2,3]
        for m in range(10):
            q = 12
            b.push(1)
            c.pop()

    After this visitor we have:
    # node is the FunctionDef node with name: "func"
    node.pd_scope = NameScope(
        globals = ['i', 'j'],
        nonlocals = ['x', 'y'],
        args = ['args', 'kargs'],
        wr_vars = ['a', 'i', 'q', 'm', 'c', 'b']
        push_pop_vars = ['b', 'c']
    )
1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173
    """

    def __init__(self, root_node):
        self.scope_node_stack = []  # controlflow, functiondef node
        self.visit(root_node)

    def _reset_name_scope(self, node):
        # always reset the node as empty namescope.
        setattr(node, "pd_scope", NameScope())

    def _get_name_scope(self, node):
        if not hasattr(node, "pd_scope"):
            setattr(node, "pd_scope", NameScope())
        return node.pd_scope

    def _current_name_scope(self):
        return self._get_name_scope(self.scope_node_stack[-1])

    def _father_name_scope(self):
1174 1175
        if len(self.scope_node_stack) == 1:
            return None
1176 1177 1178
        return self._get_name_scope(self.scope_node_stack[-2])

    def _nearest_function_scope(self):
1179 1180
        if len(self.scope_node_stack) == 1:
            return None
1181 1182 1183 1184
        for node in self.scope_node_stack[-2::-1]:
            if isinstance(node, gast.FunctionDef):
                return self._get_name_scope(node)

1185
    def visit_ListComp(self, node):
1186 1187 1188
        """[ i for i in range(10) ]
        In this case, `i` will not created in FunctionScope.
        We don't collect `i` by not calling generic_visit.
1189 1190 1191 1192
        """
        pass

    def visit_DictComp(self, node):
1193
        """the same as ListComp."""
1194 1195
        pass

1196 1197 1198 1199 1200 1201 1202 1203 1204
    def visit_Name(self, node):
        self.generic_visit(node)
        write_context = (gast.Store, gast.AugStore, gast.Del)
        if isinstance(node.ctx, write_context):
            self._current_name_scope().w_vars.add(node.id)

    def visit_FunctionDef(self, node):
        def pre_func():
            self._current_name_scope().args |= set(
1205 1206
                self._get_argument_names(node)
            )
1207 1208

        def post_func():
1209 1210
            """NOTE: why we need merge w_vars and push_pop_vars here ?
            because we do ifelse_transformer after loop_transformer. Loops will changed into functioons. but we know this function will be called in if. so we add w_vars to father function scope.
1211
            """
1212
            from paddle.jit.dy2static.loop_transformer import (
1213 1214 1215 1216
                WHILE_BODY_PREFIX,
                FOR_CONDITION_PREFIX,
                FOR_BODY_PREFIX,
            )
1217
            from paddle.jit.dy2static.ifelse_transformer import (
1218 1219 1220 1221
                TRUE_FUNC_PREFIX,
                FALSE_FUNC_PREFIX,
            )

1222
            control_flow_function_def = [
1223 1224 1225 1226 1227 1228
                WHILE_BODY_PREFIX,
                WHILE_BODY_PREFIX,
                FOR_CONDITION_PREFIX,
                FOR_BODY_PREFIX,
                TRUE_FUNC_PREFIX,
                FALSE_FUNC_PREFIX,
1229 1230 1231 1232
            ]

            def is_control_flow_def_node():
                for prefix in control_flow_function_def:
1233 1234
                    if node.name.startswith(prefix):
                        return True
1235 1236 1237
                return False

            if self._father_name_scope() and is_control_flow_def_node():
1238 1239 1240 1241 1242 1243
                self._father_name_scope().w_vars |= (
                    self._current_name_scope().w_vars
                )
                self._father_name_scope().push_pop_vars |= (
                    self._current_name_scope().push_pop_vars
                )
1244 1245 1246 1247

        self._visit_scope_node(node, pre_func, post_func)

    def _visit_scope_node(self, node, pre_func, post_func):
1248 1249
        """scope node main visit logic.
        pre_func and post_func is callbacks
1250 1251 1252
        """
        self._reset_name_scope(node)
        self.scope_node_stack.append(node)
1253
        self._current_name_scope().set_father(self._nearest_function_scope())
1254 1255
        if pre_func:
            pre_func()
1256
        self.generic_visit(node)
1257 1258
        if post_func:
            post_func()
1259 1260 1261 1262 1263
        self.scope_node_stack.pop()

    def _visit_controlflow_node(self, node):
        def post_func():
            self._father_name_scope().merge_from(self._current_name_scope())
1264
            self._nearest_function_scope().merge_from(
1265 1266 1267 1268 1269 1270
                self._current_name_scope()
            )
            self._current_name_scope().created = (
                self._nearest_function_scope().existed_vars()
                - node.before_created
            )
1271
            # gather created vars into father and used in CreateUndefinedVarTransform
1272 1273 1274
            self._nearest_function_scope().created |= (
                self._current_name_scope().created
            )
1275 1276

        def pre_func():
1277 1278 1279 1280 1281
            setattr(
                node,
                "before_created",
                self._nearest_function_scope().existed_vars(),
            )
1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306

        self._visit_scope_node(node, pre_func, post_func)

    def visit_For(self, node):
        self._visit_controlflow_node(node)

    def visit_While(self, node):
        self._visit_controlflow_node(node)

    def visit_If(self, node):
        self._visit_controlflow_node(node)

    def visit_Global(self, node):
        self._current_name_scope().globals |= set(node.names)

    def visit_Nonlocal(self, node):
        self._current_name_scope().nonlocals |= set(node.names)

    def visit_Attribute(self, node):
        self.generic_visit(node)
        write_context = (gast.Store, gast.AugStore, gast.Del)
        if isinstance(node.ctx, write_context):
            name = ast_to_source_code(node).strip()
            self._current_name_scope().w_vars.add(name)

1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317
    def visit_Call(self, node):
        self.generic_visit(node)
        if not isinstance(node.func, gast.Attribute):
            return
        variadic_length_method = ['append', 'pop']
        if node.func.attr not in variadic_length_method:
            return
        # we don't treat push and pop as a write operator. such as a[i]=10 is not modify a.
        name = ast_to_source_code(node.func.value).strip()
        self._current_name_scope().push_pop_vars.add(name)

1318
    def _get_argument_names(self, node):
1319 1320 1321
        """get all arguments name in the functiondef node.
        this node is local to the function and shouldn't
        be created.
1322 1323
        """
        assert isinstance(
1324 1325
            node, gast.FunctionDef
        ), "Input node is not function define node"
1326 1327 1328 1329 1330 1331 1332
        names = [a for a in node.args.args]
        names.append(node.args.vararg)
        names.append(node.args.kwarg)
        names = [i.id for i in names if i is not None]
        return names


1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345
def create_get_args_node(names):
    """
    Create get_args function as follows:

        def get_args_0():
            nonlocal x, y
            return x, y
    """

    def empty_node():
        func_def = """
        def {func_name}():
            return
1346 1347 1348
        """.format(
            func_name=unique_name.generate(GET_ARGS_FUNC_PREFIX)
        )
1349 1350 1351
        return gast.parse(textwrap.dedent(func_def)).body[0]

    assert isinstance(names, (list, tuple))
1352
    node = create_nonlocal_stmt_nodes(names)
1353 1354
    if not names:
        return empty_node()
1355
    if node == []:
1356 1357
        nonlocal_vars = "\n"
    else:
1358
        nonlocal_vars = ast_to_source_code(node[0])
1359 1360
    template = """
    def {func_name}():
1361
        {nonlocal_vars}
1362
        return {vars},
1363 1364 1365
    """
    func_def = template.format(
        func_name=unique_name.generate(GET_ARGS_FUNC_PREFIX),
1366
        nonlocal_vars=nonlocal_vars,
1367 1368
        vars=",".join(names),
    )
1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384
    return gast.parse(textwrap.dedent(func_def)).body[0]


def create_set_args_node(names):
    """
    Create set_args function as follows:

        def set_args_0(__args):
            nonlocal x, y
            x, y = __args
    """

    def empty_node():
        func_def = """
        def {func_name}({args}):
            pass
1385 1386 1387
        """.format(
            func_name=unique_name.generate(SET_ARGS_FUNC_PREFIX), args=ARGS_NAME
        )
1388 1389 1390
        return gast.parse(textwrap.dedent(func_def)).body[0]

    assert isinstance(names, (list, tuple))
1391
    node = create_nonlocal_stmt_nodes(names)
1392 1393
    if not names:
        return empty_node()
1394
    if node == []:
1395 1396
        nonlocal_vars = "\n"
    else:
1397
        nonlocal_vars = ast_to_source_code(node[0])
1398 1399
    template = """
    def {func_name}({args}):
1400
        {nonlocal_vars}
1401
        {vars}, = {args}
1402 1403 1404 1405
    """
    func_def = template.format(
        func_name=unique_name.generate(SET_ARGS_FUNC_PREFIX),
        args=ARGS_NAME,
1406
        nonlocal_vars=nonlocal_vars,
1407 1408
        vars=",".join(names),
    )
1409 1410 1411
    return gast.parse(textwrap.dedent(func_def)).body[0]


1412
def create_nonlocal_stmt_nodes(names):
1413 1414 1415
    assert isinstance(names, (list, tuple))

    mapped = list(filter(lambda n: '.' not in n, names))
1416
    mapped = list(filter(lambda n: '[' not in n, mapped))
1417
    names = sorted(
1418 1419
        mapped, key=mapped.index
    )  # to keep the order, we can't use set() to unique
1420 1421
    if not names:
        return []
1422
    func_code = "nonlocal {}".format(','.join(names))
1423
    return [gast.parse(func_code).body[0]]
1424 1425 1426


class GetterSetterHelper:
1427 1428 1429 1430
    """we have two classes of names in setter and getter function:
    w_vars(loop_vars) + push_pop_vars
    To simplify the setter logic in convert_while and convert_cond,
    we extract the helper class here.
1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445
    """

    def __init__(self, getter_func, setter_func, *name_lists):
        name_lists = map(lambda x: [] if x is None else x, name_lists)
        name_sets = map(lambda x: set(x), name_lists)
        self._union = list(reduce(lambda x, y: x | y, name_sets, set()))
        self._union.sort()
        self.getter = getter_func
        self.setter = setter_func
        self.name2id = {name: idx for idx, name in enumerate(self._union)}

    def union(self):
        return self._union

    def get(self, names):
1446 1447
        if names is None:
            names = []
1448
        vars = self.getter()
1449 1450
        if vars is None:
            return tuple()
1451
        for n in names:
1452 1453 1454 1455 1456
            assert (
                n in self.name2id
            ), "the name `{}` not in name union set`{}`.".format(
                n, self.name2id.keys()
            )
1457 1458 1459
        return tuple(map(lambda n: vars[self.name2id[n]], names))

    def set(self, names, values):
1460 1461 1462 1463
        if names is None:
            names = []
        if values is None:
            values = []
1464
        vars = self.getter()
1465 1466
        if vars is None:
            return
1467
        for n in names:
1468 1469 1470 1471 1472
            assert (
                n in self.name2id
            ), "the name `{}` not in name union set`{}`.".format(
                n, self.name2id.keys()
            )
1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486
        vars = list(vars)
        indices = list(map(lambda n: self.name2id[n], names))
        for i, v in zip(indices, values):
            vars[i] = v
        self.setter(vars)


def create_name_str(name_ids):
    """
    Return "('x', 'y')" for [x, y]
    """
    if not name_ids:
        return 'None'

1487
    names_str = ["'%s'" % (name.replace("'", "\\'")) for name in name_ids]
1488
    return "(%s, )" % ','.join(names_str)