utils.py 48.4 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
import ast
16
import astor
17 18
import atexit
import copy
19
import collections
20
from paddle.utils import gast
21
import inspect
22 23
import os
import sys
24
import shutil
25
import tempfile
26
import textwrap
27
import numpy as np
28

29
import paddle
30
from paddle.fluid import unique_name
31
from paddle.fluid.data_feeder import convert_dtype
32
from paddle.fluid import core
33 34
from paddle.fluid.layer_helper import LayerHelper
from paddle.fluid.layers import assign
35 36
import collections
from functools import reduce
37
import warnings
38

39 40 41 42 43
# Note(Aurelius): Do not forget the dot `.` to distinguish other
# module such as paddlenlp.
PADDLE_MODULE_PREFIX = 'paddle.'
DYGRAPH_MODULE_PREFIX = 'paddle.fluid.dygraph'
DYGRAPH_TO_STATIC_MODULE_PREFIX = 'paddle.fluid.dygraph.dygraph_to_static'
44 45
GET_ARGS_FUNC_PREFIX = 'get_args'
SET_ARGS_FUNC_PREFIX = 'set_args'
46
ALREADY_D2S = '__already_d2s'
47
ARGS_NAME = '__args'
48 49
# NOTE(liym27): Please use `getattr(ast_node, ORIGI_INFO)` instead of . operation to get the original information of ast node.
ORIGI_INFO = "Original information of source code for ast node."
50

51 52 53

class BaseNodeVisitor(gast.NodeVisitor):
    """
54
    Implement customized NodeVisitor inherited from gast.NodeVisitor.
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72
    Ancestor nodes are traced to easily support more operations of currently
    visited node.
    """

    def __init__(self):
        self.ancestor_nodes = []

    def visit(self, node):
        """Visit a node."""
        self.ancestor_nodes.append(node)

        method = 'visit_' + node.__class__.__name__
        visitor = getattr(self, method, self.generic_visit)
        ret = visitor(node)
        self.ancestor_nodes.pop()
        return ret


73 74 75 76 77 78 79 80 81 82 83 84 85
# imp is deprecated in python3
from importlib.machinery import SourceFileLoader

dygraph_class_to_static_api = {
    "CosineDecay": "cosine_decay",
    "ExponentialDecay": "exponential_decay",
    "InverseTimeDecay": "inverse_time_decay",
    "NaturalExpDecay": "natural_exp_decay",
    "NoamDecay": "noam_decay",
    "PiecewiseDecay": "piecewise_decay",
    "PolynomialDecay": "polynomial_decay",
}

86
DEL_TEMP_DIR = True  # A flag to avoid atexit.register more than once
87 88
FOR_ITER_INDEX_PREFIX = '__for_loop_var_index'
FOR_ITER_TUPLE_PREFIX = '__for_loop_iter_tuple'
89 90
FOR_ITER_TARGET_PREFIX = '__for_loop_iter_target'
FOR_ITER_ITERATOR_PREFIX = '__for_loop_iter_iterator'
91 92 93 94 95
FOR_ITER_TUPLE_INDEX_PREFIX = '__for_loop_iter_tuple_index'
FOR_ITER_VAR_LEN_PREFIX = '__for_loop_var_len'
FOR_ITER_VAR_NAME_PREFIX = '__for_loop_iter_var'
FOR_ITER_ZIP_TO_LIST_PREFIX = '__for_loop_iter_zip'

96
RE_PYNAME = '[a-zA-Z0-9_]+'
97
RE_PYMODULE = r'[a-zA-Z0-9_]+\.'
98

99 100
# FullArgSpec is valid from Python3. Defined a Namedtuple to
# to make it available in Python2.
101 102 103 104 105 106 107 108 109 110 111 112
FullArgSpec = collections.namedtuple(
    'FullArgSpec',
    [
        'args',
        'varargs',
        'varkw',
        'defaults',
        'kwonlyargs',
        'kwonlydefaults',
        'annotations',
    ],
)
113 114


115 116 117 118 119 120 121
def data_layer_not_check(name, shape, dtype='float32', lod_level=0):
    """
    This function creates a Tensor on the global block. The created Tensor
    doesn't check the dtype and the shape of feed data because dygraph input
    data can be various-length. This API is used in translating dygraph into
    static graph.

122
     Note:
123 124 125 126 127 128 129 130 131 132
        The default :code:`stop_gradient` attribute of the Tensor created by
        this API is true, which means the gradient won't be passed backward
        through the data Tensor. Set :code:`var.stop_gradient = False` If
        user would like to pass backward gradient.

    Args:
       name (str): The name/alias of the Tensor, see :ref:`api_guide_Name`
           for more details.
       shape (list|tuple): List|Tuple of integers declaring the shape. You can
           set "None" at a dimension to indicate the dimension can be of any
133
           size. For example, it is useful to set changeable batch size as "None"
134 135 136 137 138 139 140 141 142 143 144 145
       dtype (np.dtype|VarType|str, optional): The type of the data. Supported
           dtype: bool, float16, float32, float64, int8, int16, int32, int64,
           uint8. Default: float32
       lod_level (int, optional): The LoD level of the LoDTensor. Usually users
           don't have to set this value. For more details about when and how to
           use LoD level, see :ref:`user_guide_lod_tensor` . Default: 0

    Returns:
        Tensor: The global Tensor that gives access to the data.
    """
    helper = LayerHelper('data', **locals())
    shape = list(shape)
146
    for i in range(len(shape)):
147 148 149
        if shape[i] is None:
            shape[i] = -1

150 151 152 153 154 155 156 157 158 159
    return helper.create_global_variable(
        name=name,
        shape=shape,
        dtype=dtype,
        type=core.VarDesc.VarType.LOD_TENSOR,
        stop_gradient=True,
        lod_level=lod_level,
        is_data=True,
        need_check_feed=False,
    )
160

161

162
def create_undefined_variable():
163 164 165 166 167 168 169
    from paddle.fluid.dygraph.dygraph_to_static.return_transformer import (
        RETURN_NO_VALUE_MAGIC_NUM,
    )

    var = data_layer_not_check(
        unique_name.generate("undefined_var"), [1], "float64"
    )
170
    var.stop_gradient = False
171 172 173 174
    # the variable is created in block(0), we append assign in block(0) either.
    helper = LayerHelper('create_undefined_variable', **locals())
    saved_block_ids = helper.main_program.current_block_idx
    helper.main_program.current_block_idx = 0
175
    assign(RETURN_NO_VALUE_MAGIC_NUM, var)
176
    helper.main_program.current_block_idx = saved_block_ids
177
    return var
178 179


180 181 182 183 184 185
class UndefinedVar:
    def __init__(self, name):
        self.name = name

    def check(self):
        raise UnboundLocalError(
186 187
            "local variable '{}' should be created before using it."
        )
188 189


190 191 192 193 194
class Dygraph2StaticException(Exception):
    def __init__(self, message):
        super().__init__(message)


195 196 197 198 199 200 201
def saw(x):
    if isinstance(x, UndefinedVar):
        return x.check()
    else:
        return x


202 203 204 205 206
def getfullargspec(target):
    if hasattr(inspect, "getfullargspec"):
        return inspect.getfullargspec(target)
    else:
        argspec = inspect.getargspec(target)
207 208 209 210 211 212 213 214 215
        return FullArgSpec(
            args=argspec.args,
            varargs=argspec.varargs,
            varkw=argspec.keywords,
            defaults=argspec.defaults,
            kwonlyargs=[],
            kwonlydefaults=None,
            annotations={},
        )
216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231


def parse_arg_and_kwargs(function):
    """
    Returns full argument names as list. e.g ['x', 'y', 'z']
    """
    fullargspec = getfullargspec(function)
    arg_names = fullargspec.args
    if arg_names and 'self' == arg_names[0]:
        arg_names = fullargspec.args[1:]

    # parse default kwargs
    default_kwargs = {}
    default_values = fullargspec.defaults
    if default_values:
        assert len(default_values) <= len(arg_names)
232
        default_kwarg_names = arg_names[-len(default_values) :]
233 234 235 236 237
        default_kwargs = dict(zip(default_kwarg_names, default_values))

    return arg_names, default_kwargs


W
WeiXin 已提交
238 239 240 241 242 243 244 245 246
def parse_varargs_name(function):
    """
    Returns varargs name string of function. e.g: 'input' from `foo(x, *input)`
    """
    fullargspec = getfullargspec(function)
    varargs = fullargspec.varargs
    return varargs


247 248 249 250 251 252 253 254
def type_name(v):
    return type(v).__name__


def make_hashable(x, error_msg=None):
    """
    Makes input `x` hashable.

255
    For some unhashable objects, such as `dict/list/set/np.ndarray`,applying hash function by using their values.
256
    """
257
    if isinstance(x, (tuple, list, set)):
258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274
        return tuple(map(make_hashable, x))

    try:
        hash(x)
    except TypeError:
        if isinstance(x, np.ndarray):
            # Note: `tostring()` will return the binary data from np.ndarray that
            # means different value will lead to different hash code.
            return hash(x.tostring())
        elif isinstance(x, dict):
            return tuple(map(make_hashable, x.values()))

        error_msg = error_msg or "Requires a hashable object."
        raise ValueError(error_msg + " But received type: %s" % type_name(x))

    return x

275

276 277 278 279 280 281 282
def _is_api_in_module_helper(obj, module_prefix):
    m = inspect.getmodule(obj)
    return m is not None and m.__name__.startswith(module_prefix)


def is_api_in_module(node, module_prefix):
    assert isinstance(node, gast.Call), "Input non-Call node for is_dygraph_api"
283 284 285 286 287 288 289 290

    # Python can have gast.Call as function, for example: covert_call(func)(x)
    # We only check the most outside function
    func_node = node.func
    while isinstance(func_node, gast.Call):
        func_node = func_node.func

    func_str = astor.to_source(gast.gast_to_ast(func_node)).strip()
291
    try:
292 293 294 295 296
        # TODO(liym27):
        #  Consider a better to import modules like:
        #  source_file = inspect.getfile(dyfunc)
        #  import_statements = ImportVisitor(source_file).transform()
        #  import_str = "".join(import_statements)
297
        import paddle
L
liym27 已提交
298
        import paddle.fluid as fluid
299
        import paddle.fluid.dygraph as dygraph
L
liym27 已提交
300
        import paddle.fluid.layers as layers
301
        import paddle.jit.dy2static as _jst
302

303
        from paddle.fluid.dygraph import to_variable
304 305
        from paddle import to_tensor

306 307 308
        return eval(
            "_is_api_in_module_helper({}, '{}')".format(func_str, module_prefix)
        )
309
    except Exception:
310 311 312 313
        return False


def is_dygraph_api(node):
314

315
    # Note: A api in module dygraph_to_static is not a real dygraph api.
316
    if is_api_in_module(node, DYGRAPH_TO_STATIC_MODULE_PREFIX):
317 318
        return False

319 320
    # TODO(liym27): A better way to determine whether it is a dygraph api.
    #  Consider the decorator @dygraph_only
321
    return is_api_in_module(node, DYGRAPH_MODULE_PREFIX)
322 323 324


def is_paddle_api(node):
325 326 327 328 329 330
    return is_api_in_module(node, PADDLE_MODULE_PREFIX)


def is_paddle_func(func):
    m = inspect.getmodule(func)
    return m is not None and m.__name__.startswith(PADDLE_MODULE_PREFIX)
331 332 333 334 335 336 337 338


# Is numpy_api cannot reuse is_api_in_module because of numpy module problem
def is_numpy_api(node):
    assert isinstance(node, gast.Call), "Input non-Call node for is_numpy_api"
    func_str = astor.to_source(gast.gast_to_ast(node.func))
    try:
        import numpy as np
339 340 341 342

        module_result = eval(
            "_is_api_in_module_helper({}, '{}')".format(func_str, "numpy")
        )
343 344
        # BUG: np.random.uniform doesn't have module and cannot be analyzed
        # TODO: find a better way
345 346 347
        return module_result or (
            func_str.startswith("numpy.") or func_str.startswith("np.")
        )
348
    except Exception:
349 350 351
        return False


352 353
def _delete_keywords_from(node):
    assert isinstance(node, gast.Call)
354
    func_src = astor.to_source(gast.gast_to_ast(node.func))
355
    import paddle.fluid as fluid
356

357 358 359 360 361 362 363 364 365 366 367
    full_args = eval("inspect.getargspec({})".format(func_src))
    full_args_name = full_args[0]

    node.keywords = [k for k in node.keywords if k.arg in full_args_name]
    return


def to_static_api(dygraph_class):
    if dygraph_class in dygraph_class_to_static_api:
        return dygraph_class_to_static_api[dygraph_class]
    else:
368 369
        raise NotImplementedError(
            "Paddle dygraph API {} cannot be converted "
370 371
            "to static graph at present.".format(dygraph_class)
        )
372 373 374 375 376 377 378 379 380 381


def _add_keywords_to(node, dygraph_api_name):
    assert isinstance(node, gast.Call)
    if dygraph_api_name == "Linear":
        for ast_keyword in node.keywords:
            if ast_keyword.arg == "output_dim":
                ast_keyword.arg = "size"

        node.keywords.append(
382 383 384 385
            gast.keyword(
                arg="num_flatten_dims", value=gast.Constant(value=-1, kind=None)
            )
        )
386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403

    if dygraph_api_name == "BilinearTensorProduct":
        for ast_keyword in node.keywords:
            if ast_keyword.arg == "output_dim":
                ast_keyword.arg = "size"

    if dygraph_api_name == "PRelu":
        for ast_keyword in node.keywords:
            if ast_keyword.arg == "input":
                ast_keyword.arg = "x"
    return


def to_static_ast(node, class_node):
    assert isinstance(node, gast.Call)
    assert isinstance(class_node, gast.Call)
    static_api = to_static_api(class_node.func.attr)

404 405 406 407 408 409 410 411 412 413 414
    node.func = gast.Attribute(
        attr=static_api,
        ctx=gast.Load(),
        value=gast.Attribute(
            attr='layers',
            ctx=gast.Load(),
            value=gast.Name(
                ctx=gast.Load(), id='fluid', annotation=None, type_comment=None
            ),
        ),
    )
415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434

    update_args_of_func(node, class_node, 'forward')

    node.args.extend(class_node.args)
    node.keywords.extend(class_node.keywords)
    _add_keywords_to(node, class_node.func.attr)
    _delete_keywords_from(node)

    gast.fix_missing_locations(node)

    return node


def update_args_of_func(node, dygraph_node, method_name):
    assert isinstance(node, gast.Call)
    if method_name not in ["__init__", "forward"]:
        raise ValueError(
            "The method name of class to update args should be '__init__' or 'forward'"
        )

435
    class_src = astor.to_source(gast.gast_to_ast(dygraph_node.func))
436
    import paddle.fluid as fluid
437

438
    if method_name == "__init__" or eval(
439 440 441 442 443
        "issubclass({}, fluid.dygraph.Layer)".format(class_src)
    ):
        full_args = eval(
            "inspect.getargspec({}.{})".format(class_src, method_name)
        )
444 445 446 447 448 449 450 451 452 453 454
        full_args_name = [
            arg_name for arg_name in full_args[0] if arg_name != "self"
        ]
    else:
        full_args_name = []
    added_keywords = []
    for idx, arg in enumerate(node.args):
        added_keywords.append(gast.keyword(arg=full_args_name[idx], value=arg))

    node.args = []
    node.keywords = added_keywords + node.keywords
455 456 457


def create_api_shape_node(tensor_shape_node):
458 459 460
    assert isinstance(
        tensor_shape_node, (gast.Name, gast.Attribute, gast.Subscript)
    )
461 462 463

    if isinstance(tensor_shape_node, gast.Name):
        api_shape_node = gast.Call(
464
            func=gast.parse('paddle.shape').body[0].value,
465
            args=[tensor_shape_node],
466 467
            keywords=[],
        )
468
        return api_shape_node
469 470 471

    if isinstance(tensor_shape_node, gast.Attribute):
        api_shape_node = gast.Call(
472
            func=gast.parse('paddle.shape').body[0].value,
473
            args=[tensor_shape_node.value],
474 475
            keywords=[],
        )
476 477 478 479 480 481
        return api_shape_node

    if isinstance(tensor_shape_node, gast.Subscript):
        result_node = copy.deepcopy(tensor_shape_node)
        result_node.value = create_api_shape_node(result_node.value)
        return result_node
482 483


484
def get_constant_variable_node(name, value, shape=[1], dtype='int64'):
485 486 487
    return gast.parse(
        '%s = paddle.full(%s, "%s", %s)' % (name, str(shape), str(value), dtype)
    )
488 489 490 491


def get_attribute_full_name(node):
    assert isinstance(
492 493
        node, gast.Attribute
    ), "Input non-Attribute node to get attribute full name"
494 495 496
    return astor.to_source(gast.gast_to_ast(node)).strip()


497
def generate_name_node(name_ids, ctx=gast.Load(), gen_tuple_if_single=False):
498
    """
499 500 501 502 503 504 505
    If name_ids is list or tuple or set with multiple strings, this function
    generates gast.Tuple of gast.Name.
    If the name_ids is single string or contains only 1 string, this function
    returns gast.Name if gen_tuple_if_single==False else returns gast.Tuple
    with only one gast.Name

    This function is used at several gast.Return statements.
506
    """
507
    if isinstance(name_ids, str):
508 509
        name_ids = [name_ids]
    if not isinstance(name_ids, (list, tuple, set)):
510
        raise TypeError(
511 512 513
            'name_ids must be list or tuple or set, but received %s'
            % type(type(name_ids))
        )
514 515 516

    def create_node_for_name(name):
        if '.' not in name:
517 518 519
            return gast.Name(
                id=name, ctx=ctx, annotation=None, type_comment=None
            )
520 521 522
        return gast.parse(name).body[0].value

    gast_names = [create_node_for_name(name_id) for name_id in name_ids]
523
    if len(gast_names) == 1 and not gen_tuple_if_single:
524 525 526 527 528 529 530 531 532 533 534 535 536
        name_node = gast_names[0]
    else:
        name_node = gast.Tuple(elts=gast_names, ctx=ctx)
    return name_node


def create_funcDef_node(nodes, name, input_args, return_name_ids):
    """
    Wrapper all statements of nodes into one ast.FunctionDef, which can be
    called by ast.Call.
    """
    nodes = copy.copy(nodes)
    # add return statement
537 538
    if return_name_ids:
        nodes.append(gast.Return(value=generate_name_node(return_name_ids)))
539 540
    else:
        nodes.append(gast.Return(value=None))
541 542 543 544 545 546 547 548
    func_def_node = gast.FunctionDef(
        name=name,
        args=input_args,
        body=nodes,
        decorator_list=[],
        returns=None,
        type_comment=None,
    )
549 550 551
    return func_def_node


552 553 554 555 556 557 558 559
def index_in_list(array_list, item):
    try:
        return array_list.index(item)
    except ValueError:
        # Item not in array_list
        return -1


560 561 562 563 564 565 566 567 568
def create_assign_node(name, node):
    """
    Creates a `gast.Assign` node by given name_id as target and node as value.
    """
    targets = generate_name_node(name, ctx=gast.Store())
    assign_node = gast.Assign(targets=[targets], value=node)
    return targets, assign_node


569 570 571 572
def get_temp_dir():
    """
    Return @to_static temp directory.
    """
A
Aurelius84 已提交
573
    dir_name = "paddle/to_static_tmp/{pid}".format(pid=os.getpid())
574 575 576 577 578 579 580 581 582 583 584
    temp_dir = os.path.join(os.path.expanduser('~/.cache'), dir_name)
    is_windows = sys.platform.startswith('win')
    if is_windows:
        temp_dir = os.path.normpath(temp_dir)

    if not os.path.exists(temp_dir):
        os.makedirs(temp_dir)

    return temp_dir


585
def ast_to_func(ast_root, dyfunc, delete_on_exit=True):
586 587
    """
    Transform modified AST of decorated function into python callable object.
588 589
    TODO: If only decorate one of inner function instead of decorating the main
    function, the other inner functions are invisible for the decorated function.
590
    """
591

592 593 594 595 596 597 598 599 600 601 602 603
    def remove_if_exit(dir_path):
        if os.path.exists(dir_path):
            shutil.rmtree(dir_path)

    def func_prefix(func):
        pre_fix = func.__name__
        if hasattr(func, '__self__'):
            try:
                pre_fix = func.__self__.__class__.__name__ + '_' + func.__name__
            except:
                pass
        return pre_fix
604

605
    source = ast_to_source_code(ast_root)
606
    source = _inject_import_statements() + source
607
    temp_dir = get_temp_dir()
608 609 610 611 612 613 614 615
    f = tempfile.NamedTemporaryFile(
        mode='w',
        prefix=func_prefix(dyfunc),
        suffix='.py',
        delete=False,
        dir=temp_dir,
        encoding='utf-8',
    )
616 617 618 619
    with f:
        module_name = os.path.basename(f.name[:-3])
        f.write(source)

620 621 622 623 624
    global DEL_TEMP_DIR
    if delete_on_exit and DEL_TEMP_DIR:
        # Clear temporary files in TEMP_DIR while exitting Python process
        atexit.register(remove_if_exit, dir_path=temp_dir)
        DEL_TEMP_DIR = False
625

626
    func_name = dyfunc.__name__
627
    module = SourceFileLoader(module_name, f.name).load_module()
W
WeiXin 已提交
628 629 630 631 632 633 634 635
    # The 'forward' or 'another_forward' of 'TranslatedLayer' cannot be obtained
    # through 'func_name'. So set the special function name '__i_m_p_l__'.
    if hasattr(module, '__i_m_p_l__'):
        callable_func = getattr(module, '__i_m_p_l__')
        callable_func.__name__ = func_name
    elif hasattr(module, func_name):
        callable_func = getattr(module, func_name)
    else:
636
        raise ValueError(
637 638 639
            'Function: %s doesn\'t exist in the Module transformed from AST.'
            % func_name
        )
640 641 642 643 644 645 646 647
    # After transform dygraph function into callable_func saved in tmp file,
    # it lost the global variables from imported statements or defined in source file.
    # Recovers the necessary variables by `__globals__`.
    recover_globals_attribute(dyfunc, callable_func)

    return callable_func, f.name


648 649
def _inject_import_statements():
    import_statements = [
650 651 652 653 654 655 656 657
        "import paddle",
        "from paddle import Tensor",
        "import paddle.fluid as fluid",
        "import paddle.jit.dy2static as _jst",
        "from typing import *",
        "import numpy as np",
        "import warnings",
        "warnings.filterwarnings('ignore', category=DeprecationWarning)",
658 659 660 661
    ]
    return '\n'.join(import_statements) + '\n'


662 663 664 665 666
def recover_globals_attribute(src_obj, dst_obj):
    attr_name = '__globals__'

    src_globals = getattr(src_obj, attr_name, {})
    dst_globals = getattr(dst_obj, attr_name, {})
667

668
    for k, v in src_globals.items():
669 670 671
        # ignore builtin attribute.
        if not (k.startswith('__') and k.endswith('__')):
            dst_globals[k] = v
672 673


674 675 676 677 678 679
def func_to_source_code(function, dedent=True):
    """
    Transforms function into raw string of source code.
    """
    if not (inspect.isfunction(function) or inspect.ismethod(function)):
        raise TypeError(
680 681 682 683
            "The type of 'function' should be a function or method, but received {}.".format(
                type(function).__name__
            )
        )
684
    source_code_list, _ = inspect.getsourcelines(function)
685
    # Replace comments with blank lines so that error messages are not misplaced
686
    source_code_list = [
687 688
        line if not line.lstrip().startswith('#') else '\n'
        for line in source_code_list
689 690
    ]
    source_code = ''.join(source_code_list)
691 692 693 694 695 696
    if dedent:
        source_code = textwrap.dedent(source_code)

    return source_code


697 698
def ast_to_source_code(ast_node):
    """
699
    Transforms ast node into source code.
700 701 702
    """
    if not isinstance(ast_node, (gast.AST, ast.AST)):
        raise TypeError(
703 704 705
            "Type of ast_root should be gast.AST or ast.AST, but received %s."
            % type(ast_node)
        )
706 707
    if isinstance(ast_node, gast.AST):
        ast_node = gast.gast_to_ast(ast_node)
708 709 710 711 712 713

    # Do not wrap lines even if they are too long
    def pretty_source(source):
        return ''.join(source)

    source_code = astor.to_source(ast_node, pretty_source=pretty_source)
714
    return source_code
L
liym27 已提交
715 716 717 718 719 720


def is_candidate_node(node):
    """
    Nodes with specified type will be dependent on tensor.
    """
721 722 723 724 725 726 727 728 729 730 731
    is_compare_node = isinstance(
        node,
        (
            gast.Compare,
            gast.BoolOp,
            gast.UnaryOp,
            gast.For,
            gast.If,
            gast.While,
        ),
    )
L
liym27 已提交
732 733 734 735 736 737 738 739 740 741 742 743 744 745 746
    # TODO(Aurelius84): `.numpy()` may be an customized function,
    # and should consider a more elegant way to solve this problem.
    has_numpy_attr = ".numpy()" in ast_to_source_code(node)
    return is_compare_node or has_numpy_attr


def compare_with_none(node):
    """
    Whether the comparator of `gast.Compare` node is `None`.
    """
    if isinstance(node, gast.Compare):
        for child in [node.left, node.comparators]:
            # node.comparators is a list.
            if isinstance(child, list):
                child = child[0]
747 748 749
            if (isinstance(child, gast.Constant) and child.value is None) or (
                isinstance(child, gast.Name) and child.id == 'None'
            ):
L
liym27 已提交
750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766
                return True
    return False


class IsControlFlowVisitor(gast.NodeVisitor):
    """
    Judge whether the ast_node of control flow from Dygraph code dependent on paddle Tensor.
    `ast_node` can be gast.If, gast.For, gast.While, gast.If.test(gast.Compare, gast.BoolOp, gast.UnaryOp).

    If returns True,
    gast.If.test must meet at least one of the following requirements:
        1. involves at least one var whose type is Tensor.
        2. the Tensor var calls `.numpy()[]` interface or Tensor.shape is [1].
        3. involves Tensor.shape[i] and the shape[i] is unknown in compile time.
    gast.While must meet at least one of the requirements 1 to 5:
        4. has `break` statement.
        5. has `continue` statement.
767
    gast.For must meet at least one of the requirements 4 to 8:
L
liym27 已提交
768
        6. calls `range` function in `for` statement and the argument of range is Tensor.
769 770
        7. calls `enumerate` function in `for` statement and the argument of enumerate is Tensor.
        8. the iterable varaible in `for` statement is Tensor.
L
liym27 已提交
771 772 773 774 775 776 777 778 779 780 781 782 783
        TODO: Support non-range case

    The following examples should not be considered as control_flow_if:
        1. `if Tensor_var` or `if Tensor_var is None`
        2. if Tensor.shape[i] is determined with fixed value (not -1 or None)

    Note: pred in ConditionalBlock require variable, which means all vars should be Tensor
          or transformed into Tensor, like fill_constant(shape=[1], dtype='int32', value=Tensor.shape[i]).

    TODO: 1. need to deal with `tensor.shape[i]` which need to eval the data of shape[i],
             because reshape_op may be called before this statement.
    """

784 785 786
    def __init__(
        self, ast_node, static_analysis_visitor=None, node_var_type_map=None
    ):
L
liym27 已提交
787 788 789
        assert isinstance(
            ast_node, gast.AST
        ), "Type of input node should be gast.AST, but received %s." % type(
790 791
            ast_node
        )
L
liym27 已提交
792 793 794
        self.ast_root = ast_node
        if static_analysis_visitor is None:
            from .static_analysis import StaticAnalysisVisitor
795

L
liym27 已提交
796 797
            static_analysis_visitor = StaticAnalysisVisitor(ast_node)
        self.static_analysis_visitor = static_analysis_visitor
798 799
        self.node_to_wrapper_map = (
            self.static_analysis_visitor.get_node_to_wrapper_map()
L
liym27 已提交
800 801 802 803 804 805 806 807
        )
        self.node_var_type_map = node_var_type_map

        self.is_control_flow_num = 0
        self._compare_node_tenor_set = set()

    def transform(self):
        node = self.ast_root
808 809 810 811 812 813 814 815
        if isinstance(node, gast.If):
            self._visit_If(node)
        elif isinstance(node, gast.For):
            self._visit_For(node)
        elif isinstance(node, gast.While):
            self._visit_While(node)
        else:
            self.visit(node)
L
liym27 已提交
816 817 818 819 820 821 822 823 824
        return self.is_control_flow_num > 0

    def _visit_If(self, node):
        assert isinstance(node, gast.If)
        self.visit(node.test)
        return

    def _visit_For(self, node):
        assert isinstance(node, gast.For)
825 826 827
        if isinstance(node.iter, gast.Call):
            # for in range(var[0]|var.numpy()[0]) or for in enumerate(var|var.numpy())
            if isinstance(node.iter.func, gast.Name):
828 829 830 831
                if (
                    node.iter.func.id == "range"
                    or node.iter.func.id == "enumerate"
                ):
832 833 834 835 836 837 838 839 840 841
                    for arg in node.iter.args:
                        self.visit(arg)
                else:
                    return
            # for in var.numpy()
            elif isinstance(node.iter.func, gast.Attribute):
                if node.iter.func.attr == 'numpy':
                    self._visit_Call(node.iter)
                else:
                    return
842 843
            else:
                return
844 845 846
        elif isinstance(node.iter, gast.Name):
            # for in var
            self.visit(node.iter)
847
        else:
L
liym27 已提交
848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885
            return

        for child_node in gast.walk(node):
            if isinstance(child_node, (gast.Continue, gast.Break)):
                self._visit_break_continue(child_node)
        return

    def _visit_While(self, node):
        assert isinstance(node, gast.While)
        test = node.test
        self.generic_visit(test)
        for child_node in gast.walk(node):
            if isinstance(child_node, (gast.Continue, gast.Break)):
                self._visit_break_continue(child_node)
        return

    def _visit_break_continue(self, node):
        assert isinstance(node, (gast.Break, gast.Continue))
        wrapper_node = self.node_to_wrapper_map.get(node)
        if not wrapper_node:
            # Transformed node is not in node_to_wrapper_map
            return

        while wrapper_node.parent:
            parent_node = wrapper_node.parent.node
            if isinstance(parent_node, (gast.For, gast.While)):
                if parent_node is self.ast_root:
                    self.is_control_flow_num += 1
                    return
                else:
                    return

            wrapper_node = wrapper_node.parent

        return

    def visit_BoolOp(self, node):
        for i, child in enumerate(node.values):
886
            self.visit(child)
L
liym27 已提交
887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929
        return node

    def visit_Compare(self, node):
        pre_control_flow_num = self.is_control_flow_num
        if not compare_with_none(node):
            self.generic_visit(node)
            for child in gast.walk(node):
                if isinstance(child, gast.Subscript):
                    self._visit_Subscript(child)
        if self.is_control_flow_num > pre_control_flow_num:
            self._compare_node_tenor_set.add(node)
        return node

    def _visit_Subscript(self, node):
        self.generic_visit(node)
        if hasattr(node, 'value') and isinstance(node.value, gast.Call):
            self._visit_Call(node.value)
        return node

    def _visit_Call(self, node):
        assert isinstance(node, gast.Call)
        if isinstance(node.func, gast.Attribute):
            attr_node = node.func
            if attr_node.attr == 'numpy':
                self.is_control_flow_num += 1

    def visit_Call(self, node):
        self._visit_Call(node)
        if is_paddle_api(node):
            self.is_control_flow_num += 1
        return node

    def visit_Name(self, node):
        if self._is_node_with_tensor(node, node.id):
            self.is_control_flow_num += 1
        return node

    def visit_Constant(self, node):
        if self._is_node_with_tensor(node, node.value):
            self.is_control_flow_num += 1
        return node

    def _is_node_with_tensor(self, node, name_id):
930 931 932
        from paddle.fluid.dygraph.dygraph_to_static.static_analysis import (
            NodeVarType,
        )
L
liym27 已提交
933 934 935

        # Look up the node_var_type_map by name_id.
        if self.node_var_type_map:
936
            if name_id and isinstance(name_id, str):
L
liym27 已提交
937
                var_type = self.node_var_type_map.get(name_id, None)
938
                if var_type and var_type & NodeVarType.TENSOR_TYPES:
L
liym27 已提交
939 940
                    return True
        # if not found, look up the node_to_wrapper_map by node.
941
        wrapper_node = self.node_to_wrapper_map.get(node, None)
L
liym27 已提交
942
        if wrapper_node is not None:
943
            if wrapper_node.node_var_type & NodeVarType.TENSOR_TYPES:
L
liym27 已提交
944 945 946 947 948 949
                return True

        return False

    def get_compare_nodes_with_tensor(self):
        return self._compare_node_tenor_set
950 951


952 953 954 955 956 957 958 959 960 961
# NOTE: inspect.unwrap() exits in PY3 but not in PY2.
def unwrap(func):
    """
    Returns the object wrapped by decorators.
    """

    def _is_wrapped(f):
        return hasattr(f, '__wrapped__')

    unwrapped_f = func
962
    while _is_wrapped(unwrapped_f):
963 964 965
        unwrapped_f = unwrapped_f.__wrapped__

    return unwrapped_f
966 967


C
Chen Weihang 已提交
968
def input_specs_compatible(src_input_specs, desired_input_specs):
969 970 971 972
    """
    Returns True if the two input specs are compatible, otherwise False.

    args:
973 974 975 976
        src_input_spec (list or tuple[InputSpec et.al]): list/tuple of
            paddle.static.InputSpec or int/str et.al
        desired_input_specs (list or tuple[InputSpec et.al]): list/tuple of
            paddle.static.InputSpec or int/str et.al
977 978
    """
    len_specs = len(src_input_specs)
C
Chen Weihang 已提交
979 980
    if len_specs != len(desired_input_specs):
        # NOTE(chenweihang): if the input_spec of jit.save is a subset of
981
        # input_spec of to_static, also compatible
C
Chen Weihang 已提交
982 983 984 985
        for spec in src_input_specs:
            if spec not in desired_input_specs:
                return False
    else:
986 987 988
        for (src_spec, desired_spec) in zip(
            src_input_specs, desired_input_specs
        ):
989
            if isinstance(src_spec, paddle.static.InputSpec) or isinstance(
990 991
                desired_spec, paddle.static.InputSpec
            ):
992 993 994 995
                if not _compatible_tensor_spec(src_spec, desired_spec):
                    return False
            else:
                if not _compatible_non_tensor_spec(src_spec, desired_spec):
C
Chen Weihang 已提交
996 997
                    return False

998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024
    return True


def _compatible_tensor_spec(src_spec, desired_spec):
    """
    Check whether two tensor type spec is compatible.
    """
    for spec in [src_spec, desired_spec]:
        if not isinstance(spec, paddle.static.InputSpec):
            return False
    src_shape = src_spec.shape
    other_shape = desired_spec.shape
    len_shape = len(src_shape)
    if len_shape != len(other_shape):
        return False
    for j in range(len_shape):
        if src_shape[j] is None or src_shape[j] < 0:
            continue
        if other_shape[j] is None or other_shape[j] < 0:
            continue
        if src_shape[j] != other_shape[j]:
            return False

    src_dtype = convert_dtype(src_spec.dtype)
    other_dtype = convert_dtype(desired_spec.dtype)
    if src_dtype != other_dtype:
        return False
1025 1026

    return True
1027

1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048

def _compatible_non_tensor_spec(src_spec, desired_spec):
    """
    Check whether two non-tensor type spec is compatible.
    """

    def hash_value(spec):
        try:
            hash_val = make_hashable(spec)
        except:
            hash_val = None
        return hash_val

    src_hash_val = hash_value(src_spec)
    desired_hash_val = hash_value(desired_spec)

    if src_hash_val != desired_hash_val:
        return False
    else:
        return True

1049

1050 1051
class NameScope:
    def __init__(self):
1052
        """
1053 1054
        A NameScope is a object which manager all the variable names.
        only FunctionDef and Controlflow node will have a namescope property.
1055

1056
        type can be "function" and "controlflow"
1057

1058
        we don't analyze the read only variable because they don't affect the analysis.
1059 1060 1061 1062 1063 1064
        """
        self.globals = set()
        self.nonlocals = set()
        self.args = set()
        self.father = None  # point to the nearest function name scope.
        self.w_vars = set()  # all qualified + normal names been stored
1065
        self.created = set()  # useful for control flow compatibility
1066
        # only valid in control_flow nodes
1067 1068
        # may be remove later.
        self.push_pop_vars = set()  # we call push and pop in the vars
1069 1070 1071 1072 1073

    def set_father(self, father):
        self.father = father

    def existed_vars(self):
1074 1075
        """vars existing in current scope.
        they must not contain qualified names.
1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086
        """
        local_vars = self.w_vars - self.globals - self.nonlocals - self.args
        return set(filter(lambda x: '.' not in x, local_vars))

    def created_vars(self):
        return self.created

    def modified_vars(self):
        # may be globals / non-locals / args / qualified names and created_vars
        return self.w_vars

1087
    def variadic_length_vars(self):
1088
        """
1089
        At present, we do not support global append, such as
1090

1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103
        import numpy as np
        a = []
        def func():
            a.append() # global names `a`, we will raise a warning.
            p.append(a, 1) # global names `np`, we will raise a warning.
        """
        non_global_push_pop_names = []
        for var in self.push_pop_vars:
            if self._is_simple_name(var) and self.is_global_var(var):
                warnings.warn(
                    f"Find variable `{var}` defined in global scope"
                    f" and call `{var}.append() or {var}.pop()`"
                    f", which will be ignored and never be transfered into"
1104 1105
                    f" tensor array."
                )
1106 1107 1108
            else:
                non_global_push_pop_names.append(var)
        return set(non_global_push_pop_names)
1109

1110 1111
    def control_flow_vars(self):
        valid_names = self.w_vars
1112
        tmp = (self.father.global_vars & valid_names,)
1113 1114
        return {"global": tmp, "nonlocal": self.w_vars - tmp}

1115
    def _is_simple_name(self, name):
1116 1117
        if '.' in name or '[' in name:
            return False
1118 1119 1120
        return True

    def is_global_var(self, name):
1121
        """
1122
        Return whether the name is a var created in global scope.
1123
        Search from bottom to top. If it is not created or modified,
1124 1125 1126 1127
        it means global vars; otherwise, it means local vars.
        Only valid after FunctionNameLivenessAnalysis visitor.
        """
        assert self._is_simple_name(
1128 1129
            name
        ), "is_global_var accept a simple name, but get `{name}`."
1130 1131
        ancestor = self
        while ancestor is not None:
1132 1133 1134 1135
            if name in ancestor.globals:
                return True
            if name in (ancestor.nonlocals | ancestor.w_vars):
                return False
1136 1137 1138 1139 1140
            ancestor = ancestor.father
        return True

    def is_local_var(self, name):
        return not self.is_global_var(name)
1141 1142 1143 1144 1145 1146

    def merge_from(self, name_scope):
        self.globals |= name_scope.globals
        self.nonlocals |= name_scope.nonlocals
        self.args |= name_scope.args
        self.w_vars |= name_scope.w_vars
1147
        self.push_pop_vars |= name_scope.push_pop_vars
1148 1149 1150


class FunctionNameLivenessAnalysis(gast.NodeVisitor):
1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190
    """analyze the liveness of a function.

    every variables stored in this scope will be collected,
    in addition with global/nonlocal information and
    push_pop information.

    1. global variable is stored in node.var_globals.
    2. nonlocal variable is stored in node.var_nonlocals.
    3. arguments is stored in node.var_args.
    4. if a variable's push and pop attribute is called,
       it will be collected in push_pop_vars. They are
       used for transformation to tensor_array.
       NOTE: push_pop_vars **may not** in w_vars.
       a.push(0) don't modify the variable a, but the content
       of a.

    For example:

    def func(*args, **kargs):
        a = 12
        global i,j
        nonlocal x,y
        print(a)
        i = k
        b = []
        c = [1,2,3]
        for m in range(10):
            q = 12
            b.push(1)
            c.pop()

    After this visitor we have:
    # node is the FunctionDef node with name: "func"
    node.pd_scope = NameScope(
        globals = ['i', 'j'],
        nonlocals = ['x', 'y'],
        args = ['args', 'kargs'],
        wr_vars = ['a', 'i', 'q', 'm', 'c', 'b']
        push_pop_vars = ['b', 'c']
    )
1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209
    """

    def __init__(self, root_node):
        self.scope_node_stack = []  # controlflow, functiondef node
        self.visit(root_node)

    def _reset_name_scope(self, node):
        # always reset the node as empty namescope.
        setattr(node, "pd_scope", NameScope())

    def _get_name_scope(self, node):
        if not hasattr(node, "pd_scope"):
            setattr(node, "pd_scope", NameScope())
        return node.pd_scope

    def _current_name_scope(self):
        return self._get_name_scope(self.scope_node_stack[-1])

    def _father_name_scope(self):
1210 1211
        if len(self.scope_node_stack) == 1:
            return None
1212 1213 1214
        return self._get_name_scope(self.scope_node_stack[-2])

    def _nearest_function_scope(self):
1215 1216
        if len(self.scope_node_stack) == 1:
            return None
1217 1218 1219 1220
        for node in self.scope_node_stack[-2::-1]:
            if isinstance(node, gast.FunctionDef):
                return self._get_name_scope(node)

1221
    def visit_ListComp(self, node):
1222 1223 1224
        """[ i for i in range(10) ]
        In this case, `i` will not created in FunctionScope.
        We don't collect `i` by not calling generic_visit.
1225 1226 1227 1228
        """
        pass

    def visit_DictComp(self, node):
1229
        """the same as ListComp."""
1230 1231
        pass

1232 1233 1234 1235 1236 1237 1238 1239 1240
    def visit_Name(self, node):
        self.generic_visit(node)
        write_context = (gast.Store, gast.AugStore, gast.Del)
        if isinstance(node.ctx, write_context):
            self._current_name_scope().w_vars.add(node.id)

    def visit_FunctionDef(self, node):
        def pre_func():
            self._current_name_scope().args |= set(
1241 1242
                self._get_argument_names(node)
            )
1243 1244

        def post_func():
1245 1246
            """NOTE: why we need merge w_vars and push_pop_vars here ?
            because we do ifelse_transformer after loop_transformer. Loops will changed into functioons. but we know this function will be called in if. so we add w_vars to father function scope.
1247
            """
1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258
            from paddle.fluid.dygraph.dygraph_to_static.loop_transformer import (
                WHILE_CONDITION_PREFIX,
                WHILE_BODY_PREFIX,
                FOR_CONDITION_PREFIX,
                FOR_BODY_PREFIX,
            )
            from paddle.fluid.dygraph.dygraph_to_static.ifelse_transformer import (
                TRUE_FUNC_PREFIX,
                FALSE_FUNC_PREFIX,
            )

1259
            control_flow_function_def = [
1260 1261 1262 1263 1264 1265
                WHILE_BODY_PREFIX,
                WHILE_BODY_PREFIX,
                FOR_CONDITION_PREFIX,
                FOR_BODY_PREFIX,
                TRUE_FUNC_PREFIX,
                FALSE_FUNC_PREFIX,
1266 1267 1268 1269
            ]

            def is_control_flow_def_node():
                for prefix in control_flow_function_def:
1270 1271
                    if node.name.startswith(prefix):
                        return True
1272 1273 1274
                return False

            if self._father_name_scope() and is_control_flow_def_node():
1275 1276 1277 1278 1279 1280
                self._father_name_scope().w_vars |= (
                    self._current_name_scope().w_vars
                )
                self._father_name_scope().push_pop_vars |= (
                    self._current_name_scope().push_pop_vars
                )
1281 1282 1283 1284

        self._visit_scope_node(node, pre_func, post_func)

    def _visit_scope_node(self, node, pre_func, post_func):
1285 1286
        """scope node main visit logic.
        pre_func and post_func is callbacks
1287 1288 1289
        """
        self._reset_name_scope(node)
        self.scope_node_stack.append(node)
1290
        self._current_name_scope().set_father(self._nearest_function_scope())
1291 1292
        if pre_func:
            pre_func()
1293
        self.generic_visit(node)
1294 1295
        if post_func:
            post_func()
1296 1297 1298 1299 1300
        self.scope_node_stack.pop()

    def _visit_controlflow_node(self, node):
        def post_func():
            self._father_name_scope().merge_from(self._current_name_scope())
1301
            self._nearest_function_scope().merge_from(
1302 1303 1304 1305 1306 1307
                self._current_name_scope()
            )
            self._current_name_scope().created = (
                self._nearest_function_scope().existed_vars()
                - node.before_created
            )
1308
            # gather created vars into father and used in CreateUndefinedVarTransform
1309 1310 1311
            self._nearest_function_scope().created |= (
                self._current_name_scope().created
            )
1312 1313

        def pre_func():
1314 1315 1316 1317 1318
            setattr(
                node,
                "before_created",
                self._nearest_function_scope().existed_vars(),
            )
1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343

        self._visit_scope_node(node, pre_func, post_func)

    def visit_For(self, node):
        self._visit_controlflow_node(node)

    def visit_While(self, node):
        self._visit_controlflow_node(node)

    def visit_If(self, node):
        self._visit_controlflow_node(node)

    def visit_Global(self, node):
        self._current_name_scope().globals |= set(node.names)

    def visit_Nonlocal(self, node):
        self._current_name_scope().nonlocals |= set(node.names)

    def visit_Attribute(self, node):
        self.generic_visit(node)
        write_context = (gast.Store, gast.AugStore, gast.Del)
        if isinstance(node.ctx, write_context):
            name = ast_to_source_code(node).strip()
            self._current_name_scope().w_vars.add(name)

1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354
    def visit_Call(self, node):
        self.generic_visit(node)
        if not isinstance(node.func, gast.Attribute):
            return
        variadic_length_method = ['append', 'pop']
        if node.func.attr not in variadic_length_method:
            return
        # we don't treat push and pop as a write operator. such as a[i]=10 is not modify a.
        name = ast_to_source_code(node.func.value).strip()
        self._current_name_scope().push_pop_vars.add(name)

1355
    def _get_argument_names(self, node):
1356 1357 1358
        """get all arguments name in the functiondef node.
        this node is local to the function and shouldn't
        be created.
1359 1360
        """
        assert isinstance(
1361 1362
            node, gast.FunctionDef
        ), "Input node is not function define node"
1363 1364 1365 1366 1367 1368 1369
        names = [a for a in node.args.args]
        names.append(node.args.vararg)
        names.append(node.args.kwarg)
        names = [i.id for i in names if i is not None]
        return names


1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382
def create_get_args_node(names):
    """
    Create get_args function as follows:

        def get_args_0():
            nonlocal x, y
            return x, y
    """

    def empty_node():
        func_def = """
        def {func_name}():
            return
1383 1384 1385
        """.format(
            func_name=unique_name.generate(GET_ARGS_FUNC_PREFIX)
        )
1386 1387 1388
        return gast.parse(textwrap.dedent(func_def)).body[0]

    assert isinstance(names, (list, tuple))
1389
    node = create_nonlocal_stmt_nodes(names)
1390 1391
    if not names:
        return empty_node()
1392
    if node == []:
1393 1394
        nonlocal_vars = "\n"
    else:
1395
        nonlocal_vars = ast_to_source_code(node[0])
1396 1397
    template = """
    def {func_name}():
1398
        {nonlocal_vars}
1399
        return {vars},
1400 1401 1402
    """
    func_def = template.format(
        func_name=unique_name.generate(GET_ARGS_FUNC_PREFIX),
1403
        nonlocal_vars=nonlocal_vars,
1404 1405
        vars=",".join(names),
    )
1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421
    return gast.parse(textwrap.dedent(func_def)).body[0]


def create_set_args_node(names):
    """
    Create set_args function as follows:

        def set_args_0(__args):
            nonlocal x, y
            x, y = __args
    """

    def empty_node():
        func_def = """
        def {func_name}({args}):
            pass
1422 1423 1424
        """.format(
            func_name=unique_name.generate(SET_ARGS_FUNC_PREFIX), args=ARGS_NAME
        )
1425 1426 1427
        return gast.parse(textwrap.dedent(func_def)).body[0]

    assert isinstance(names, (list, tuple))
1428
    node = create_nonlocal_stmt_nodes(names)
1429 1430
    if not names:
        return empty_node()
1431
    if node == []:
1432 1433
        nonlocal_vars = "\n"
    else:
1434
        nonlocal_vars = ast_to_source_code(node[0])
1435 1436
    template = """
    def {func_name}({args}):
1437
        {nonlocal_vars}
1438
        {vars}, = {args}
1439 1440 1441 1442
    """
    func_def = template.format(
        func_name=unique_name.generate(SET_ARGS_FUNC_PREFIX),
        args=ARGS_NAME,
1443
        nonlocal_vars=nonlocal_vars,
1444 1445
        vars=",".join(names),
    )
1446 1447 1448
    return gast.parse(textwrap.dedent(func_def)).body[0]


1449
def create_nonlocal_stmt_nodes(names):
1450 1451 1452
    assert isinstance(names, (list, tuple))

    mapped = list(filter(lambda n: '.' not in n, names))
1453
    mapped = list(filter(lambda n: '[' not in n, mapped))
1454
    names = sorted(
1455 1456
        mapped, key=mapped.index
    )  # to keep the order, we can't use set() to unique
1457 1458
    if not names:
        return []
1459
    func_code = "nonlocal {}".format(','.join(names))
1460
    return [gast.parse(func_code).body[0]]
1461 1462 1463


class GetterSetterHelper:
1464 1465 1466 1467
    """we have two classes of names in setter and getter function:
    w_vars(loop_vars) + push_pop_vars
    To simplify the setter logic in convert_while and convert_cond,
    we extract the helper class here.
1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482
    """

    def __init__(self, getter_func, setter_func, *name_lists):
        name_lists = map(lambda x: [] if x is None else x, name_lists)
        name_sets = map(lambda x: set(x), name_lists)
        self._union = list(reduce(lambda x, y: x | y, name_sets, set()))
        self._union.sort()
        self.getter = getter_func
        self.setter = setter_func
        self.name2id = {name: idx for idx, name in enumerate(self._union)}

    def union(self):
        return self._union

    def get(self, names):
1483 1484
        if names is None:
            names = []
1485
        vars = self.getter()
1486 1487
        if vars is None:
            return tuple()
1488
        for n in names:
1489 1490 1491 1492 1493
            assert (
                n in self.name2id
            ), "the name `{}` not in name union set`{}`.".format(
                n, self.name2id.keys()
            )
1494 1495 1496
        return tuple(map(lambda n: vars[self.name2id[n]], names))

    def set(self, names, values):
1497 1498 1499 1500
        if names is None:
            names = []
        if values is None:
            values = []
1501
        vars = self.getter()
1502 1503
        if vars is None:
            return
1504
        for n in names:
1505 1506 1507 1508 1509
            assert (
                n in self.name2id
            ), "the name `{}` not in name union set`{}`.".format(
                n, self.name2id.keys()
            )
1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523
        vars = list(vars)
        indices = list(map(lambda n: self.name2id[n], names))
        for i, v in zip(indices, values):
            vars[i] = v
        self.setter(vars)


def create_name_str(name_ids):
    """
    Return "('x', 'y')" for [x, y]
    """
    if not name_ids:
        return 'None'

1524
    names_str = ["'%s'" % (name.replace("'", "\\'")) for name in name_ids]
1525
    return "(%s, )" % ','.join(names_str)