ast_transformer.py 7.2 KB
Newer Older
1
#   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import print_function
16

17 18 19 20
# gast is a generic AST to represent Python2 and Python3's Abstract Syntax Tree(AST).
# It provides a compatibility layer between the AST of various Python versions,
# as produced by ast.parse from the standard ast module.
# See details in https://github.com/serge-sans-paille/gast/
21
import os
22
from paddle.utils import gast
23
from paddle.fluid.dygraph.dygraph_to_static.base_transformer import BaseTransformer
24
from paddle.fluid.dygraph.dygraph_to_static.early_return_transformer import EarlyReturnTransformer
25
from paddle.fluid.dygraph.dygraph_to_static.assert_transformer import AssertTransformer
26
from paddle.fluid.dygraph.dygraph_to_static.basic_api_transformer import BasicApiTransformer
27
from paddle.fluid.dygraph.dygraph_to_static.break_continue_transformer import BreakContinueTransformer
28
from paddle.fluid.dygraph.dygraph_to_static.break_continue_transformer import BreakTransformOptimizer
29 30
from paddle.fluid.dygraph.dygraph_to_static.call_transformer import CallTransformer
from paddle.fluid.dygraph.dygraph_to_static.cast_transformer import CastTransformer
31
from paddle.fluid.dygraph.dygraph_to_static.grad_transformer import GradTransformer
32
from paddle.fluid.dygraph.dygraph_to_static.ifelse_transformer import IfElseTransformer
33
from paddle.fluid.dygraph.dygraph_to_static.list_transformer import ListTransformer
34
from paddle.fluid.dygraph.dygraph_to_static.logical_transformer import LogicalTransformer
35
from paddle.fluid.dygraph.dygraph_to_static.loop_transformer import LoopTransformer
36
from paddle.fluid.dygraph.dygraph_to_static.print_transformer import PrintTransformer
37
from paddle.fluid.dygraph.dygraph_to_static.return_transformer import ReturnTransformer
38
from paddle.fluid.dygraph.dygraph_to_static.static_analysis import StaticAnalysisVisitor
39
from paddle.fluid.dygraph.dygraph_to_static.tensor_shape_transformer import TensorShapeTransformer
40

41 42
from paddle.fluid.dygraph.dygraph_to_static import logging_utils
from paddle.fluid.dygraph.dygraph_to_static.utils import ast_to_source_code
43
from paddle.fluid.dygraph.dygraph_to_static.utils import get_attribute_full_name
44

45
__all__ = ['DygraphToStaticAst']
46

A
Aurelius84 已提交
47
DECORATOR_NAMES = ['declarative', 'to_static', 'dygraph_to_static_func']
48 49


50 51 52 53 54 55 56 57 58 59 60 61
def apply_optimization(transformers):
    """
    Judge wheter to apply optimized transformation, such as BreakTransformOptimizer.
    And not all optimized transformations are applied by default. It's controlled by
    'export FLAGS_optim_transformation=1'
    """
    flag = str(
        os.environ.get('FLAGS_optim_transformation')) in ['1', 'True', 'true']
    if flag:
        transformers.insert(3, BreakTransformOptimizer)


62
class DygraphToStaticAst(BaseTransformer):
63 64 65 66
    """
    Main class to transform Dygraph to Static Graph
    """

67 68 69
    def __init__(self):
        self.translator_logger = logging_utils.TranslatorLogger()

70
    def get_static_ast(self, root):
71
        # save root for some analysis may need global AST
72
        self.root = root
73 74 75
        self.static_analysis_visitor = StaticAnalysisVisitor(root)
        self.static_analysis_root = self.static_analysis_visitor.get_node_wrapper_root(
        )
76
        self.decorate_func_name = None
77 78 79
        self.transfer_from_node_type(self.static_analysis_root)
        return self.static_analysis_root

80 81 82 83 84
    def _apply(self, transformer, node_wrapper, log_level):
        transformer(node_wrapper).transform()
        self.translator_logger.log_transformed_code(log_level, self.root,
                                                    transformer.__name__)

85
    def transfer_from_node_type(self, node_wrapper):
86
        self.translator_logger.log(
87
            1, "Source code: \n{}".format(ast_to_source_code(self.root)))
88
        # Generic transformation
89
        self.visit(node_wrapper.node)
90

91
        transformers = [
92
            EarlyReturnTransformer,
93 94 95 96 97 98
            BasicApiTransformer,  # Basic Api
            TensorShapeTransformer,  # Tensor.shape -> layers.shape(Tensor)
            ListTransformer,  # List used in control flow
            BreakContinueTransformer,  # break/continue in loops
            ReturnTransformer,  # return in functions
            LogicalTransformer,  # logical and/or/not
99
            #CreateVariableTransformer,  # create undefined var for if / while / for
100 101 102 103 104 105
            LoopTransformer,  # for/while -> while_op
            IfElseTransformer,  # if/else -> cond_op
            AssertTransformer,  # assert statement
            PrintTransformer,  # print statement
            CallTransformer,  # transform call recursively
            CastTransformer,  # type casting statement
106
            GradTransformer,  # transform paddle.grad to paddle.gradients
107 108
        ]

109 110
        apply_optimization(transformers)

111 112 113 114 115
        for index, transformer in enumerate(transformers):
            self._apply(transformer, node_wrapper, log_level=index + 1)

        self.translator_logger.log_transformed_code(
            logging_utils.LOG_AllTransformer, self.root, "All Transformers")
116

117 118 119
    def visit_FunctionDef(self, node):
        if self.decorate_func_name is None:
            self.decorate_func_name = node.name
120

121 122 123
        self.generic_visit(node)
        # Remove the decorated name of dygraph_to_static
        if hasattr(node, 'decorator_list'):
124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141
            decorator_list = []
            for d in node.decorator_list:
                if isinstance(d, gast.Name) and d.id not in DECORATOR_NAMES:
                    raise NotImplementedError(
                        "ProgramTranslator hasn't implemented multiple decorators. Please remove "
                        + d.id + " in " + self.decorate_func_name)
                if isinstance(d, gast.Attribute):
                    full_attribute_name = get_attribute_full_name(d)
                    has_translate_decorator = False
                    for deco in DECORATOR_NAMES:
                        if deco in full_attribute_name:
                            has_translate_decorator = True
                            break
                    if not has_translate_decorator:
                        raise NotImplementedError(
                            "ProgramTranslator hasn't implemented multiple decorators. Please remove "
                            + full_attribute_name + " in " +
                            self.decorate_func_name)
142 143 144 145 146 147 148 149 150 151 152
            node.decorator_list = decorator_list
        return node

    def get_module_name(self):
        """
        Return the main function name which will be used as module name
        in ast_to_func.
        """
        # Should consider BaseAPITransformer which add new module name in Yamei's PR.
        assert self.decorate_func_name, "decorate_func_name shall not be None."
        return self.decorate_func_name