ast_transformer.py 6.2 KB
Newer Older
1
#   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16 17 18
# gast is a generic AST to represent Python2 and Python3's Abstract Syntax Tree(AST).
# It provides a compatibility layer between the AST of various Python versions,
# as produced by ast.parse from the standard ast module.
# See details in https://github.com/serge-sans-paille/gast/
19

20
import os
21
from paddle.utils import gast
22
from paddle.fluid.dygraph.dygraph_to_static.base_transformer import BaseTransformer
23
from paddle.fluid.dygraph.dygraph_to_static.early_return_transformer import EarlyReturnTransformer
24
from paddle.fluid.dygraph.dygraph_to_static.assert_transformer import AssertTransformer
25
from paddle.fluid.dygraph.dygraph_to_static.basic_api_transformer import BasicApiTransformer
26
from paddle.fluid.dygraph.dygraph_to_static.break_continue_transformer import BreakContinueTransformer
27
from paddle.fluid.dygraph.dygraph_to_static.break_continue_transformer import BreakTransformOptimizer
28 29
from paddle.fluid.dygraph.dygraph_to_static.call_transformer import CallTransformer
from paddle.fluid.dygraph.dygraph_to_static.cast_transformer import CastTransformer
30
from paddle.fluid.dygraph.dygraph_to_static.grad_transformer import GradTransformer
31
from paddle.fluid.dygraph.dygraph_to_static.ifelse_transformer import IfElseTransformer
32
from paddle.fluid.dygraph.dygraph_to_static.list_transformer import ListTransformer
33
from paddle.fluid.dygraph.dygraph_to_static.logical_transformer import LogicalTransformer
34
from paddle.fluid.dygraph.dygraph_to_static.loop_transformer import LoopTransformer
35
from paddle.fluid.dygraph.dygraph_to_static.print_transformer import PrintTransformer
36
from paddle.fluid.dygraph.dygraph_to_static.return_transformer import ReturnTransformer
37
from paddle.fluid.dygraph.dygraph_to_static.create_variable_transformer import CreateVariableTransformer
38
from paddle.fluid.dygraph.dygraph_to_static.static_analysis import StaticAnalysisVisitor
39
from paddle.fluid.dygraph.dygraph_to_static.tensor_shape_transformer import TensorShapeTransformer
40
from paddle.fluid.dygraph.dygraph_to_static.decorator_transformer import DecoratorTransformer
41

42 43
from paddle.fluid.dygraph.dygraph_to_static import logging_utils
from paddle.fluid.dygraph.dygraph_to_static.utils import ast_to_source_code
44
from paddle.fluid.dygraph.dygraph_to_static.utils import get_attribute_full_name
45

46
__all__ = ['DygraphToStaticAst']
47

48

49 50 51 52 53 54 55 56 57 58 59 60
def apply_optimization(transformers):
    """
    Judge wheter to apply optimized transformation, such as BreakTransformOptimizer.
    And not all optimized transformations are applied by default. It's controlled by
    'export FLAGS_optim_transformation=1'
    """
    flag = str(
        os.environ.get('FLAGS_optim_transformation')) in ['1', 'True', 'true']
    if flag:
        transformers.insert(3, BreakTransformOptimizer)


61
class DygraphToStaticAst(BaseTransformer):
62 63 64 65
    """
    Main class to transform Dygraph to Static Graph
    """

66 67 68
    def __init__(self):
        self.translator_logger = logging_utils.TranslatorLogger()

69
    def get_static_ast(self, root):
70
        # save root for some analysis may need global AST
71
        self.root = root
72 73 74
        self.static_analysis_visitor = StaticAnalysisVisitor(root)
        self.static_analysis_root = self.static_analysis_visitor.get_node_wrapper_root(
        )
75
        self.decorate_func_name = None
76 77 78
        self.transfer_from_node_type(self.static_analysis_root)
        return self.static_analysis_root

79 80 81 82 83
    def _apply(self, transformer, node_wrapper, log_level):
        transformer(node_wrapper).transform()
        self.translator_logger.log_transformed_code(log_level, self.root,
                                                    transformer.__name__)

84
    def transfer_from_node_type(self, node_wrapper):
85
        self.translator_logger.log(
86
            1, "Source code: \n{}".format(ast_to_source_code(self.root)))
87
        # Generic transformation
88
        self.visit(node_wrapper.node)
89

90
        transformers = [
91
            EarlyReturnTransformer,
92 93
            BasicApiTransformer,  # Basic Api
            TensorShapeTransformer,  # Tensor.shape -> layers.shape(Tensor)
94
            #ListTransformer,  # List used in control flow
95 96 97
            BreakContinueTransformer,  # break/continue in loops
            ReturnTransformer,  # return in functions
            LogicalTransformer,  # logical and/or/not
98
            CreateVariableTransformer,  # create undefined var for if / while / for
99 100 101 102 103 104
            LoopTransformer,  # for/while -> while_op
            IfElseTransformer,  # if/else -> cond_op
            AssertTransformer,  # assert statement
            PrintTransformer,  # print statement
            CallTransformer,  # transform call recursively
            CastTransformer,  # type casting statement
105
            #GradTransformer,  # transform paddle.grad to paddle.gradients
106
            DecoratorTransformer,  # transform decorators to function call
107 108
        ]

109 110
        apply_optimization(transformers)

111 112 113 114 115
        for index, transformer in enumerate(transformers):
            self._apply(transformer, node_wrapper, log_level=index + 1)

        self.translator_logger.log_transformed_code(
            logging_utils.LOG_AllTransformer, self.root, "All Transformers")
116

117 118 119
    def visit_FunctionDef(self, node):
        if self.decorate_func_name is None:
            self.decorate_func_name = node.name
120

121 122 123 124 125 126 127 128 129 130 131
        self.generic_visit(node)
        return node

    def get_module_name(self):
        """
        Return the main function name which will be used as module name
        in ast_to_func.
        """
        # Should consider BaseAPITransformer which add new module name in Yamei's PR.
        assert self.decorate_func_name, "decorate_func_name shall not be None."
        return self.decorate_func_name