program_translator.py 58.9 KB
Newer Older
1
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
import collections
16
import inspect
17 18
import textwrap
import threading
19
import warnings
20
import weakref
21

22
from paddle.fluid import _non_static_mode, core, framework
23
from paddle.fluid.data_feeder import check_type
24 25
from paddle.fluid.dygraph import layers
from paddle.fluid.dygraph.base import param_guard, switch_to_static_graph
26
from paddle.utils import flatten, gast
27 28 29 30 31 32 33 34 35

from . import error, logging_utils
from .ast_transformer import DygraphToStaticAst
from .function_spec import (
    FunctionSpec,
    _hash_spec_names,
    get_buffers,
    get_parameters,
)
36
from .origin_info import (
37 38 39 40
    attach_origin_info,
    create_and_update_origin_info_map,
    update_op_callstack_with_origin_info,
)
X
xiongkun 已提交
41
from .partial_program import PartialProgramLayerHook, partial_program_from
42
from .utils import (
43
    ALREADY_D2S,
44 45 46 47
    ast_to_func,
    ast_to_source_code,
    func_to_source_code,
    input_specs_compatible,
48
    is_paddle_func,
49
    make_hashable,
50
    prim_or_cinn_is_enabled,
51 52
    type_name,
    unwrap,
53
)
54

55
__all__ = []
56

57 58 59 60
# For each traced function, we set `max_traced_program_count` = 10 to consider caching performance.
# Once exceeding the threshold, we will raise warning to users to make sure the conversion is as expected.
MAX_TRACED_PROGRAM_COUNT = 10

61 62
CONVERSION_OPTIONS = "__jst_not_to_static"

63

64 65 66 67 68 69 70 71 72 73
def synchronized(func):
    func.__lock__ = threading.Lock()

    def lock_func(*args, **kwargs):
        with func.__lock__:
            return func(*args, **kwargs)

    return lock_func


74
class FunctionCache:
75 76 77 78 79
    """
    Caches the transformed functions to avoid redundant conversions of the same function.
    """

    def __init__(self):
80
        # Caches the converted static functions. {dygraph_func: static_func}
X
xiongkun 已提交
81
        self._converted_static_func_caches = weakref.WeakKeyDictionary()
82 83 84
        # Caches the converted ast node for same source code. {source_code: ast_root}
        self._code_to_ast_caches = dict()
        self._dygraph_to_static = DygraphToStaticAst()
85

86 87 88 89 90 91
    def convert_with_cache(self, func):
        """
        Returns the cached static function or converts it when first encounters the function.
        """
        # If hit cache, return it directly.
        static_func = self._converted_static_func_caches.get(func, None)
92 93

        if static_func is None:
94 95
            static_func = self._convert(func)
            self._converted_static_func_caches[func] = static_func
96 97 98

        return static_func

99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119
    def _convert(self, func):
        """
        Converts dygraph function into static function. For two functions with same dedent code,
        the second function will reuse the transformed ast node of previous one.

        For example:
            # A.py
            def foo(x, y):
                z = x + y
                return z

            # B.py
            def foo(x, y):
                z = x + y
                return z

        If the conversion of A.foo happens after B.foo, it will reuse the transformed ast node of B.foo
        to speed up the conversion.
        """
        # Note: In Python2, it will raise OSError when inspect function
        # with decorator directly and function.__wrapped__ holds the actual function.
120
        func = unwrap(func)
121
        source_code = func_to_source_code(func)
122 123 124 125 126

        # TODO(liym27):
        #  Consider this case: source_code in self._code_to_ast_caches,
        #  but actually they are methods in different classes.
        #  Maybe use (__class__, source_code) as key
127 128 129 130
        if source_code in self._code_to_ast_caches:
            root_wrapper = self._code_to_ast_caches[source_code]
        else:
            root = gast.parse(source_code)
131
            root = attach_origin_info(root, func)
132 133
            root_wrapper = self._dygraph_to_static.get_static_ast(root)
            self._code_to_ast_caches[source_code] = root_wrapper
134

135 136
        # Get static function from AST
        static_func, file_name = ast_to_func(root_wrapper.node, func)
137 138

        create_and_update_origin_info_map(root_wrapper.node, static_func)
139
        return static_func
140 141

    def exist(self, func):
142
        return func in self._converted_static_func_caches
143 144


145 146 147 148
_CACHE_LOCK = threading.Lock()
_FUNCTION_CACHE = FunctionCache()


149
def convert_to_static(function):
150
    """
151
    Transforms function of dygraph into static function using the cache mechanism.
152

153 154
    Note(dev): It will return function.__func__ if encountering class method.

155 156
    Args:
        function(callable): The function with dygraph layers that will be converted into static layers.
157
    """
158 159
    if getattr(function, ALREADY_D2S, None):
        return function
160 161 162

    # Return directly if decorated with @not_to_static and DO NOT Cache it
    options = getattr(function, CONVERSION_OPTIONS, None)
163 164 165 166 167
    # or ignore paddle api
    need_skip = (options is not None and options.not_convert) or is_paddle_func(
        function
    )
    if need_skip:
168 169
        return function.__func__ if inspect.ismethod(function) else function

170
    with _CACHE_LOCK:
171
        static_func = _FUNCTION_CACHE.convert_with_cache(function)
172
        setattr(static_func, ALREADY_D2S, True)
173 174 175
        return static_func


176
class CacheKey:
177 178 179
    """
    Cached key for ProgramCache.
    """
180

181
    __slots__ = [
182 183 184 185 186 187
        'function_spec',
        'input_args_with_spec',
        'input_kwargs_with_spec',
        'class_instance',
        'kwargs',
        '_spec_names_id',
188
    ]
189

190 191 192 193 194 195 196 197
    def __init__(
        self,
        function_spec,
        input_args_with_spec,
        input_kwargs_with_spec,
        class_instance,
        **kwargs
    ):
198 199
        """
        Initializes a cache key.
200

201 202
        Args:
            functions_spec(FunctionSpec): a FunctionSpec instance of decorated function.
203 204
            input_args_with_spec(list[InputSpec]): actual input args with some arguments replaced by InputSpec.
            input_kwargs_with_spec(list[{string:InputSpec}]): actual input kwargs with some arguments replaced by InputSpec.
205
            class_instance(object): a instance of class `Layer`.
206
            **kwargs(dict): manage other arguments used for better scalability
207
        """
208
        self.function_spec = function_spec
209 210
        self.input_args_with_spec = input_args_with_spec
        self.input_kwargs_with_spec = input_kwargs_with_spec
211
        self.class_instance = class_instance
212 213
        # NOTE: `kwargs` is usually not considered as basic member for `__hash__`
        self.kwargs = kwargs
214 215 216
        self._spec_names_id = _hash_spec_names(
            input_args_with_spec, input_kwargs_with_spec
        )
217 218 219

    @classmethod
    def from_func_and_args(cls, function_spec, args, kwargs, class_instance):
220
        """
221 222 223 224 225 226 227 228 229 230 231
        Generated a CacheKey instance by given inputs.

        Args:
            functions_spec(FunctionSpec): a FunctionSpec instance of decorated function.
            args(tuple): tuple of actual inputs arguments.
            kwargs(dict): dict of actual inputs keyword arguments.
            class_instance(object): a instance of class `Layer`.
        """
        # 1. filter `self` in args
        if args and isinstance(args[0], layers.Layer):
            args = args[1:]
232
        # 2. convert tensor and numpy array into InputSpec
233
        _args, _kwargs = function_spec.unified_args_and_kwargs(args, kwargs)
234 235 236 237
        (
            input_args_with_spec,
            input_kwargs_with_spec,
        ) = function_spec.args_to_input_spec(_args, _kwargs)
238 239

        # 3. check whether hit the cache or build a new program for the input arguments
240 241 242 243 244 245
        return CacheKey(
            function_spec,
            input_args_with_spec,
            input_kwargs_with_spec,
            class_instance,
        )
246 247 248

    def __hash__(self):
        error_msg = "Arguments to a `@paddle.jit.to_static` must be a hashable Python objects (or nested structures of these types)."
249
        with_hook = self.kwargs.get("with_hook", False)
250
        is_train = self.kwargs.get("is_train", False)
251 252 253 254 255 256 257 258 259 260 261
        return hash(
            (
                id(self.function_spec),
                make_hashable(self.input_args_with_spec, error_msg),
                make_hashable(self.input_kwargs_with_spec, error_msg),
                self._spec_names_id,
                self.class_instance,
                with_hook,
                is_train,
            )
        )
262 263 264 265 266 267 268 269

    def __eq__(self, other):
        return (type(self) is type(other)) and hash(self) == hash(other)

    def __neq__(self, other):
        return not self == other

    def __repr__(self):
270
        return "id(function_spec): {}, input_args_with_spec: {}, input_kwargs_with_spec: {}, class_instance: {}".format(
271 272 273 274 275
            id(self.function_spec),
            self.input_args_with_spec,
            self.input_kwargs_with_spec,
            self.class_instance,
        )
276 277 278 279 280 281 282 283 284


def unwrap_decorators(func):
    """
    Unwraps a decorated function and returns the decorator list and inner target.
    """
    decorators = []
    cur = func
    while True:
285
        if isinstance(cur, StaticFunction):
286 287 288 289 290
            decorators.append(cur)
            # Note: if `cur` is a method, keep it as bound method of class.
            instance = cur._class_instance
            if instance is not None:
                cur = cur.dygraph_function.__get__(instance)
291
            else:
292 293 294 295
                cur = cur.dygraph_function
        else:
            break
    return decorators, cur
296

297

298
class StaticFunction:
299 300 301 302 303
    """
    Wrapper class to Manage program conversion of decorated function.

    """

304
    def __init__(self, function, input_spec=None, **kwargs):
305
        """
306
        Initializes a `StaticFunction`.
307 308 309 310

        Args:
            function(callable): A function or method that will be converted into static program.
            input_spec(list[InputSpec]): list of InputSpec to specify the `shape/dtype/name` information for each input argument, default None.
311
            **kwargs(dict): other arguments like `build_strategy` et.al.
312 313
        """
        # save the instance `self` while decorating a method of class.
314

315 316 317
        if inspect.ismethod(function):
            self._dygraph_function = getattr(function, '__func__')
            self._class_instance = getattr(function, '__self__')
318

319 320 321
            if not hasattr(self._class_instance, '_original_funcs'):
                raise TypeError(
                    "When using 'to_static' to convert method of a class, "
322 323
                    "please ensure the class inherits from nn.Layer"
                )
324
            self._class_instance._original_funcs[
325 326
                function.__name__
            ] = self._dygraph_function
327 328 329 330
        else:
            self._dygraph_function = function
            self._class_instance = None

331 332 333
        if input_spec is not None and prim_or_cinn_is_enabled(
            kwargs.get("build_strategy", None)
        ):
J
Jiabin Yang 已提交
334 335
            from paddle.static import InputSpec

336
            for spec in flatten(input_spec):
J
Jiabin Yang 已提交
337
                if isinstance(spec, InputSpec) and -1 in spec.shape:
338 339 340 341 342 343
                    input_spec = None
                    warnings.warn(
                        'Now prim and cinn do not support -1 shape, but input_spec has -1 shape so we set it to None.'
                    )
                    break

344 345 346
        self._input_spec = input_spec
        self._function_spec = FunctionSpec(function, input_spec)
        self._program_cache = ProgramCache()
347
        self._descriptor_cache = weakref.WeakKeyDictionary()
348
        # Note: Hold a reference to ProgramTranslator for switching `enable_to_static`.
349
        self._program_trans = ProgramTranslator()
350
        self._kwargs = kwargs
351
        self._training = True
352 353
        self._cuda_graph_capture_mode = ""
        self._cuda_graph_pool_id = 0
354

355 356 357 358 359 360 361
        self._property = kwargs.get("property", False)

    @property
    def is_property(self):
        # whether is class proproty to be exported.
        return self._property

362
    def train(self):
363 364
        if (
            isinstance(self._class_instance, layers.Layer)
365
            and self._class_instance.training is False
366
        ):
367 368 369
            raise RuntimeError(
                "Failed to switch train mode. {} is a Layer's method, "
                "please use Layer.train() to switch train mode.".format(
370 371 372
                    self.dygraph_function
                )
            )
373 374 375
        self._training = True

    def eval(self):
376 377
        if (
            isinstance(self._class_instance, layers.Layer)
378
            and self._class_instance.training is True
379
        ):
380 381 382
            raise RuntimeError(
                "Failed to switch eval mode. {} is a Layer's method, "
                "please use Layer.eval() to switch eval mode.".format(
383 384 385
                    self.dygraph_function
                )
            )
386
        self._training = False
387 388 389 390 391 392

    def __get__(self, instance, owner):
        """
        Overrides this method to parse the class instance and call bound method correctly.

        For example:
393

394 395 396 397
            '''
            class Net(Layer):
                def __init__(self):
                    pass
398

399 400 401 402 403 404 405
                @paddle.jit.to_static
                def forward(self, x, y):
                    return x + y

            net = Net()
            out = net(x, y)
            '''
406

407 408
        In above case, `net(x, y)` will call `net.forward(x, y)` firstly that is a bound method
        of `Net` instance. After decorated by `@paddle.jit.to_static`, it will firstly to call `__get__`
409
        to parse the class instance correctly instead of the `StaticFunction` instance.
410
        """
411 412 413
        if instance not in self._descriptor_cache:
            if instance is None:
                return self
414
            # Note(Aurelius84): To construct new instance of StaticFunction when we
415 416 417 418 419 420 421 422
            # first encouter the bound function of layer and cache it.
            new_static_layer = self._clone()
            new_static_layer._class_instance = instance
            self._descriptor_cache[instance] = new_static_layer

        return self._descriptor_cache[instance]

    def _clone(self):
423
        return self.__class__(
424
            self.dygraph_function, self._input_spec, **self._kwargs
425
        )
426 427

    def __call__(self, *args, **kwargs):
428
        """
429 430 431 432
        Supports to call the returned instance with input `args` and `kwargs` directly.

        Args:
            *args(tuple): tuple of all input arguments from original decorated function.
433
            **kwargs(dict): dict of all input keyward arguments from original decorated function.
434 435 436

        Return:
            Outputs of decorated function.
437
        """
438 439
        if self._property:
            return self._call_dygraph_function(*args, **kwargs)
440

441
        # 1. call dygraph function directly if not enable `declarative`
442
        if not self._program_trans.enable_to_static:
443 444 445 446
            # NOTE(liym27):
            # Here calls `warnings.warn` but not `logging_utils.warn` because by default warnings.warn(message)
            # will show up **only once**. StaticFunction.__call__ will run many times, it is appropriate to
            # display this warning message only once.
447
            logging_utils.warn(
R
Ryan 已提交
448
                "The decorator '@paddle.jit.to_static' does NOT work when setting 'paddle.jit.enable_to_static' to False. "
449
                "We will just return dygraph output. If you would like to get static graph output, please call API "
R
Ryan 已提交
450
                "paddle.jit.enable_to_static(True)"
451
            )
452 453
            return self._call_dygraph_function(*args, **kwargs)

J
Jiabin Yang 已提交
454
        if not _non_static_mode():
455 456
            raise RuntimeError(
                "Failed to run the callable object {} decorated by '@paddle.jit.to_static', "
457
                "because it is NOT in dynamic mode. Please disable the static graph mode to enter dynamic mode with the "
458
                "following API: paddle.disable_static().".format(
459 460 461
                    self.dygraph_function
                )
            )
462

463 464
        # 2. trace ops from dygraph layers and cache the generated program.
        args, kwargs = self._function_spec.unified_args_and_kwargs(args, kwargs)
465

466 467
        try:
            concrete_program, partial_program_layer = self.get_concrete_program(
468 469
                *args, **kwargs, is_train=self._is_train_mode()
            )
470 471 472
            # 3. synchronize self.training attribute.
            if isinstance(self._class_instance, layers.Layer):
                partial_program_layer.training = self._class_instance.training
473 474
            else:
                partial_program_layer.training = self._training
475

476 477 478
            partial_program_layer._cuda_graph_capture_mode = (
                self._cuda_graph_capture_mode
            )
479 480
            partial_program_layer._cuda_graph_pool_id = self._cuda_graph_pool_id

481
            # 4. return outputs.
482 483 484 485 486 487 488
            try:
                return partial_program_layer(args)
            except Exception as e:
                if not hasattr(e, error.ERROR_DATA):
                    # runtime error
                    error.attach_error_data(e, in_runtime=True)
                    raise
489
        except Exception as e:
490
            error_data = getattr(e, error.ERROR_DATA, None)
491
            if error_data:
492
                error_data.raise_new_exception()
493
            else:
494 495
                logging_utils.warn(
                    "Please file an issue at 'https://github.com/PaddlePaddle/Paddle/issues'"
496 497
                    " if you can't handle this {} yourself.".format(type(e))
                )
498
                raise e
499

500 501
    def _is_train_mode(self):
        if self._class_instance is not None:
502 503 504
            if not hasattr(self._class_instance, 'training'):
                raise TypeError(
                    "When using 'to_static' to convert method of a class, "
505 506
                    "please ensure the class inherits from nn.Layer"
                )
507 508 509 510
            return self._class_instance.training
        else:
            return self._training

511 512 513 514 515 516
    def _call_dygraph_function(self, *args, **kwargs):
        """
        Calls dygraph function directly and returns the outputs.

        Args:
            *args(tuple): tuple of all input arguments from original decorated function.
517
            **kwargs(dict): dict of all input keyward arguments from original decorated function.
518 519 520 521

        Return:
            Outputs of dygraph function.
        """
522
        return self.dygraph_function(*args, **kwargs)
523

524 525 526 527 528 529 530 531 532
    def _raise_when_property(self):
        """raise RuntimeError when property=True

        Raises:
            RuntimeError: can not call this func when property=True
        """
        if self.is_property:
            raise RuntimeError("Can not call the func when property=True.")

533 534 535 536 537 538 539 540 541 542 543
    def get_concrete_program(self, *args, **kwargs):
        """
        Returns traced concrete program and inner executable partial layer.

        Args:
            *args(tuple): input arguments values or InputSpec
            **kwargs(dict) : input kwargs values.

        Returns:
            Traced ConcreteProgram and executable translated Layer.
        """
544
        self._raise_when_property()
545

546
        with_hook = kwargs.get("with_hook", False)
547
        is_train = kwargs.get("is_train", True)
548 549 550 551
        if "is_train" in kwargs:
            kwargs.pop("is_train")
        if "with_hook" in kwargs:
            kwargs.pop("with_hook")
552 553
        # 1. unify args/kwargs and replace Tensor with InputSpec
        if len(args) != len(self._function_spec.args_name):
554
            args, kwargs = self._function_spec.unified_args_and_kwargs(
555 556 557 558 559 560
                args, kwargs
            )
        (
            input_args_with_spec,
            input_kwargs_with_spec,
        ) = self._function_spec.args_to_input_spec(args, kwargs)
561 562

        # 2. generate cache key
563 564 565 566 567 568 569 570 571
        cache_key = CacheKey(
            self._function_spec,
            input_args_with_spec,
            input_kwargs_with_spec,
            self._class_instance,
            **self._kwargs,
            with_hook=with_hook,
            is_train=is_train
        )
572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587

        # 3. check whether hit the cache or build a new program for the input arguments
        concrete_program, partial_program_layer = self._program_cache[cache_key]
        return concrete_program, partial_program_layer

    def get_traced_count(self):
        """
        Returns the number of traced programs for the decorated function.
        """
        return len(self._program_cache)

    @property
    def code(self):
        """
        Returns the source code of transformed static function for debugging.
        """
588
        static_func = convert_to_static(self.dygraph_function)
589 590 591 592 593 594 595 596
        source_code = func_to_source_code(static_func)
        return source_code

    @property
    def dygraph_function(self):
        """
        Returns the original decorated function.
        """
597 598 599 600
        if self._class_instance is not None:
            return self._dygraph_function.__get__(self._class_instance)
        else:
            return self._dygraph_function
601 602 603 604 605

    @property
    def concrete_program(self):
        """
        Returns recent ConcreteProgram instance of decorated function.
A
Aurelius84 已提交
606 607 608 609 610 611 612 613 614 615 616 617 618

        Examples:
            .. code-block:: python

                import paddle
                from paddle.jit import to_static
                from paddle.static import InputSpec

                paddle.disable_static()

                def foo(x, y):
                    z = x + y
                    return z
619

A
Aurelius84 已提交
620 621 622 623 624 625 626 627
                # usage 1:
                decorated_foo = to_static(foo, input_spec=[InputSpec([10], name='x'), InputSpec([10], name='y')])
                print(decorated_foo.concrete_program)

                # usage 2:
                decorated_foo = to_static(foo)
                out_foo = decorated_foo(paddle.rand([10]), paddle.rand([10]))
                print(decorated_foo.concrete_program)
628
        """
629 630
        return self.concrete_program_specify_input_spec(input_spec=None)

631 632 633
    def concrete_program_specify_input_spec(
        self, input_spec=None, with_hook=False
    ):
634 635 636
        """
        Returns recent ConcreteProgram instance of decorated function while
        specifying input_spec. If the self._function_spec already has
637
        input_spec, it will check the compatibility of input input_spec and
638 639 640 641 642 643 644
        the self._function_spec.input_spec. If input input_spec=None, then
        this method uses self._function_spec.input_spec

        args:
            input_spec (list[InputSpec], optional): Describes the input of
                the translate function.
        """
645
        self._raise_when_property()
646 647 648 649 650
        # if specific the `input_spec`, the length of program_cache will always 1,
        # else, return the last one.
        cached_program_len = len(self._program_cache)
        # If specific `input_spec`, apply convertion from dygraph layers into static Program.
        if cached_program_len == 0:
C
Chen Weihang 已提交
651 652 653
            desired_input_spec = input_spec
            if self._function_spec.input_spec is not None:
                if input_spec is not None and not input_specs_compatible(
654 655
                    flatten(input_spec), flatten(self._function_spec.input_spec)
                ):
656
                    raise ValueError(
657 658 659 660
                        "The `input_spec`: {} used to construct concrete_program is conflict with the `input_spec`: {} in `@paddle.jit.to_static`".format(
                            input_spec, self._function_spec.input_spec
                        )
                    )
C
Chen Weihang 已提交
661 662 663
                # NOTE(chenweihang): we should always translated program based on the `input_spec`
                # decorated on forward if it is valid
                desired_input_spec = self._function_spec.input_spec
664 665
                if input_spec is not None:
                    logging_utils.warn(
666 667 668 669
                        "\n\nYou have specified `input_spec` both in function definition (higher priority) and `paddle.jit.save` (will be ignored.)\n\n\t Using: {}\n\n\t Ignore: {}\n".format(
                            desired_input_spec, input_spec
                        )
                    )
670

671
            has_input_spec = desired_input_spec is not None
A
Aurelius84 已提交
672
            if has_input_spec:
C
Chen Weihang 已提交
673
                concrete_program, _ = self.get_concrete_program(
674 675
                    *desired_input_spec,
                    with_hook=with_hook,
676 677
                    is_train=self._is_train_mode()
                )
678
                return concrete_program
679
            else:
A
Aurelius84 已提交
680
                raise ValueError(
681 682 683 684
                    "No valid transformed program for {}.\n\t    Please specific `input_spec` in `@paddle.jit.to_static` or feed input tensor to call the decorated function at once.\n".format(
                        self._function_spec
                    )
                )
685 686 687 688 689 690
        elif with_hook:
            cache_key = self._program_cache._recent_cache_key
            cache_key.kwargs["with_hook"] = True
            concrete_program, _ = self._program_cache[cache_key]
            return concrete_program

691 692
        # If more than one programs have been cached, return the recent converted program by default.
        elif cached_program_len > 1:
693
            logging_utils.warn(
694 695 696 697
                "Current {} has more than one cached programs: {}, the last traced progam will be return by default.".format(
                    self._function_spec, cached_program_len
                )
            )
698

699 700 701 702
        cache_key, (
            concrete_program,
            partial_layer,
        ) = self._program_cache.last()
703
        return concrete_program
704

705 706 707
    def rollback(self):
        """
        Rollback into original dygraph functions for current class instance.
708

709 710 711 712 713 714 715 716 717 718
        Returns:
            Function or Method

        Example::
            .. code-block:: python

                import paddle

                class Net(paddle.nn.Layer):
                    def __init__(self):
719
                        super().__init__()
720 721 722 723 724 725 726 727 728

                    def forward(self, x, flag=True):
                        if flag:
                            out = x + 1
                        else:
                            out = x - 1
                        return out

                x = paddle.randn([10, 1], 'float32')
729
                net = paddle.jit.to_static(Net())  # convert into static graph mode
730
                out = net(x)
731

732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747
                net.forward.rollback()  # rollback into dygraph mode
                out = net(x)
        """

        def rollback_impl(class_instance):
            for name, func in class_instance._original_funcs.items():
                setattr(class_instance, name, func.__get__(class_instance))

            for sublayer in class_instance.sublayers(include_self=False):
                rollback_impl(sublayer)

        if self._class_instance is None:
            return self._dygraph_function

        # only rollback sub-functions on path of top _dygraph_function
        func_name = self._dygraph_function.__name__
748 749 750 751 752
        assert (
            func_name in self._class_instance._original_funcs
        ), "Not Found function '{}' in class '{}'.".format(
            func_name, self._class_instance.__name__
        )
753
        func = self._class_instance._original_funcs[func_name]
754 755 756
        setattr(
            self._class_instance, func_name, func.__get__(self._class_instance)
        )
757 758 759 760 761 762

        for sublayer in self._class_instance.sublayers(include_self=False):
            rollback_impl(sublayer)

        return getattr(self._class_instance, func_name)

763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778
    def __deepcopy__(self, memo):
        """
        Customized behavior for copy.deepcopy, return original decorated function instead
        of a new StaticFunction Object. StaticFunction itself is not copyable becuase it's
        associated with class_instance.

        We add __deepcopy__ here only for the following usage:

        Example::
            .. code-block:: python

                import copy
                import paddle

                class Net(paddle.nn.Layer):
                    def __init__(self):
779
                        super().__init__()
780 781 782 783 784 785 786 787 788

                    def forward(self, x, flag=True):
                        if flag:
                            out = x + 1
                        else:
                            out = x - 1
                        return out

                x = paddle.randn([10, 1], 'float32')
789
                net = paddle.jit.to_static(Net())  # convert into static graph mode
790 791

                copy_net = copy.deepcopy(net)      # deepcopy a new net without @to_static
792

793 794 795 796 797 798
        Please attention that original 'net' will unwrap @to_static and rollback into simple Layer.
        """
        if self._class_instance is not None:
            net_name = type(self._class_instance).__name__
            logging_utils.log(
                level=-1,
799 800 801 802 803
                msg="Not recommend to deepcopy '{}' decorated with @to_static, it has side effect that will"
                " rollback into original state before @to_static. Please deepcopy '{}' before applying @to_static.".format(
                    net_name, net_name
                ),
            )
804
            self.rollback()
805 806 807
            return self._dygraph_function.__get__(
                memo[id(self._class_instance)]
            )
808 809 810
        else:
            return self._dygraph_function

811 812 813 814 815
    @property
    def inputs(self):
        """
        Returns input tensors of recent converted static program.
        """
816
        self._raise_when_property()
817 818
        concrete_program = self.concrete_program
        inputs = [
819 820
            var
            for var in flatten(concrete_program.inputs)
821 822 823
            if isinstance(var, framework.Variable)
        ]
        return inputs
824

825
    @property
826 827 828 829
    def outputs(self):
        """
        Returns output tensors of recent converted static program.
        """
830
        self._raise_when_property()
831 832
        concrete_program = self.concrete_program
        outputs = [
833 834
            var
            for var in flatten(concrete_program.outputs)
835 836 837 838
            if isinstance(var, framework.Variable)
        ]

        return outputs
839

840
    @property
841 842 843 844
    def main_program(self):
        """
        Returns recent converted static main program.
        """
845
        self._raise_when_property()
846 847 848
        concrete_program = self.concrete_program
        main_program = concrete_program.main_program
        return main_program
849

850 851 852
    @property
    def program_cache(self):
        return self._program_cache
853

854 855 856
    @property
    def function_spec(self):
        return self._function_spec
857 858


859 860 861 862 863 864 865 866 867 868
def _verify_init_in_dynamic_mode(class_instance):
    """
    Verifies the instance is initialized in dynamic mode.
    """
    if isinstance(class_instance, layers.Layer):
        if not class_instance._init_in_dynamic_mode:
            raise RuntimeError(
                " `paddle.jit.to_static` is only available in dynamic mode. Please call `paddle.disable_static()` before "
                "initializing your Layer class `{}` . Because parameters of Layer class should be initialized firstly "
                "in dynamic mode while applying transformation.".format(
869 870 871
                    class_instance
                )
            )
872 873


874
class HookHelper:
875 876 877 878 879 880 881 882 883
    """
    Only For converting pre/post hooks operation in outermost layer while jit.save.
    Because hooks in sublayer have been processed automatically.
    """

    def __init__(self, func, class_instance, with_hook=False):
        self.func = func
        self.class_instance = class_instance
        self.with_hook = with_hook
884 885 886 887 888
        self.need_apply_hook = (
            with_hook
            and isinstance(self.class_instance, layers.Layer)
            and getattr(func, "__name__") == "forward"
        )
889 890 891 892 893

    def apply_pre_hooks(self, inputs):
        """
        Apply _forward_pre_hooks from outermost layer
        """
894 895
        if not self.need_apply_hook:
            return inputs
896 897 898 899 900 901

        inputs = inputs[1:]
        for forward_pre_hook in self.class_instance._forward_pre_hooks.values():
            hook_result = forward_pre_hook(self.class_instance, inputs)
            if hook_result is not None:
                if not isinstance(hook_result, tuple):
902
                    hook_result = (hook_result,)
903 904 905 906 907 908 909 910
                inputs = hook_result

        return [self.class_instance] + list(inputs)

    def apply_post_hooks(self, inputs, outputs):
        """
        Apply _forward_post_hooks from outermost layer
        """
911 912
        if not self.need_apply_hook:
            return outputs
913 914

        inputs = inputs[1:]
915 916 917 918 919 920
        for (
            forward_post_hook
        ) in self.class_instance._forward_post_hooks.values():
            hook_result = forward_post_hook(
                self.class_instance, inputs, outputs
            )
921 922 923 924 925 926 927
            if hook_result is not None:
                outputs = hook_result

        inputs.insert(0, self.class_instance)
        return outputs


928
class ConcreteProgram:
929 930

    __slots__ = [
931 932 933 934 935 936 937
        'inputs',
        'outputs',
        'main_program',
        "startup_program",
        "parameters",
        "function",
        'kwargs',
938 939
    ]

940 941 942 943 944 945 946 947 948 949
    def __init__(
        self,
        inputs,
        outputs,
        parameters,
        function,
        main_program,
        startup_program=None,
        **kwargs
    ):
950 951 952
        self.inputs = inputs
        self.outputs = outputs
        self.main_program = main_program
953
        self.startup_program = startup_program
954
        self.parameters = parameters
955
        self.function = function
956
        self.kwargs = kwargs
957 958 959

    @staticmethod
    @switch_to_static_graph
960 961 962
    def from_func_spec(
        func_spec, input_spec, input_kwargs_spec, class_instance, **kwargs
    ):
963
        """
964 965
        Builds the main_program with specialized inputs and returns outputs
        of program as fetch_list.
966 967 968

        Args:
            func_spec(FunctionSpec): A FunctionSpec instance for decorated function.
969
            input_spec(list[InputSpec]):
970
        """
971 972 973
        # verify the instance is initialized in imperative mode.
        _verify_init_in_dynamic_mode(class_instance)

974
        # Transforms dygraph function into static function and caches it.
975
        dygraph_function = func_spec.dygraph_function
976
        static_func = convert_to_static(dygraph_function)
977
        # apply pre\post hook for outermost layer
978 979 980
        hook_helper = HookHelper(
            dygraph_function, class_instance, kwargs.get("with_hook", False)
        )
981

982 983
        main_program, startup_program = framework.Program(), framework.Program()
        # Note: The random seed should be synchronized into cached program
984
        # if set in `fluid.dygraph_guard` because some ops rely on it, such as
985
        # `fluid.layers.dropout`.
986
        main_program.random_seed = framework.default_main_program().random_seed
987 988 989
        startup_program.random_seed = (
            framework.default_startup_program().random_seed
        )
990

991
        from paddle.fluid.dygraph.base import _switch_declarative_mode_guard_
992

993
        with framework.program_guard(main_program, startup_program):
994 995
            with _switch_declarative_mode_guard_(is_declarative=True):
                # 1. Adds `fluid.data` layers for input if needed
996
                static_inputs = func_spec.to_static_inputs_with_spec(
997 998
                    input_spec, main_program
                )
999
                _kwargs = func_spec.to_static_inputs_with_spec(
1000 1001
                    input_kwargs_spec, main_program
                )
1002
                if class_instance:
1003 1004 1005
                    static_inputs = tuple(
                        [class_instance] + list(static_inputs)
                    )
1006

1007
                # 2. Builds program only once and returns the output Variables.
1008 1009 1010
                with param_guard(
                    get_parameters(class_instance, False)
                ), param_guard(get_buffers(class_instance, False)):
1011
                    try:
1012 1013
                        # only for jit.save, do nothing while train and eval process
                        inputs = hook_helper.apply_pre_hooks(static_inputs)
1014 1015
                        if _kwargs:
                            outputs = static_func(*inputs, **_kwargs)
1016 1017
                        else:
                            outputs = static_func(*inputs)
1018
                        outputs = hook_helper.apply_post_hooks(inputs, outputs)
1019 1020
                    except BaseException as e:
                        # NOTE: If e is raised in compile time, e should be attached to ERROR_DATA here.
1021
                        error.attach_error_data(e)
1022 1023 1024
                        error_data = getattr(e, error.ERROR_DATA, None)
                        if error_data:
                            error_data.raise_new_exception()
1025 1026
                        raise

1027 1028 1029 1030 1031 1032 1033
                # 3. Gets all ParamBases and buffered VarBases in the function
                all_parameters_and_buffers = (
                    ProgramTranslator.get_instance()._params_recorder.pop(
                        main_program
                    )
                )

1034
                if outputs is not None:
1035 1036 1037 1038
                    need_wrap_into_list = (
                        not isinstance(outputs, (tuple, list))
                        or len(outputs) == 1
                    )
1039 1040
                    if need_wrap_into_list:
                        outputs = [outputs]
1041

1042 1043
        main_program = update_op_callstack_with_origin_info(main_program)

1044 1045 1046 1047 1048 1049 1050 1051 1052
        return ConcreteProgram(
            inputs=static_inputs,
            outputs=outputs,
            parameters=all_parameters_and_buffers,
            function=dygraph_function,
            main_program=main_program,
            startup_program=startup_program,
            **kwargs
        )
1053 1054


1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082
class ParametersRecorder:
    def __init__(self):
        self.params_dict = {}

    @synchronized
    def add(self, program, param):
        """use the default_program as key, append param the parameter list."""
        key = self._program_hash(program)
        if key not in self.params_dict:
            self.params_dict[key] = set()
        params = self.params_dict[key]
        params.add(param)

    def pop(self, program):
        params = self.params_dict.get(self._program_hash(program))
        if params is None:
            return []
        del self.params_dict[self._program_hash(program)]
        return list(params)

    def _program_hash(self, program):
        """
        because program is not deleted while calling from_func_spec.
        so it's ok to use id(program)
        """
        return id(program)


1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125
class FallbackProgramLayer(object):
    __slots__ = [
        '_instance',
        '_dy_func',
        'training',
        '_cuda_graph_capture_mode',
        '_cuda_graph_pool_id',
    ]

    def __init__(self, instance, dy_func):
        self._instance = instance
        self._dy_func = dy_func

    def __call__(self, inputs):
        return self._dy_func(*inputs)

    def __getattr__(self, key):
        if key not in self.__slots__:
            raise RuntimeError(
                "There raises a exception after applying `@paddle.jit.to_static()` and already switch into fallback mode. \n"
                "You can't get attribute for a fallback program layer. Please check `to_static.error` file for detail."
            )
        elif key in ['training']:
            if self._instance is not None:
                return getattr(self._instance, key)
            return

        return super().__getattr__(key)

    def __setattr__(self, key, value):
        if key not in self.__slots__:
            raise RuntimeError(
                "There raises a exception after applying `@paddle.jit.to_static()` and already switch into fallback mode. \n"
                "You can't get attribute for a fallback program layer. Please check `to_static.error` file for detail."
            )
        elif key in ['training']:
            if self._instance is not None:
                return setattr(self._instance, key, value)
            return

        return super().__setattr__(key, value)


1126
class ProgramCache:
1127 1128 1129
    """
    Wrapper class for the program functions defined by dygraph function.
    """
1130

1131 1132
    dy2static_error_file = "to_static.error"

1133
    def __init__(self):
1134
        # {hash_id : (concrete_program, partial_layer)}
1135
        self._caches = collections.OrderedDict()
1136
        # trace mostly recent used program
1137
        self._recent_key = None
1138
        self._recent_cache_key = None
1139

1140
    def _build_once(self, cache_key):
1141 1142
        # TODO(Aurelius84): Need a gloabl FLAGS to enable/disable to_prim
        enable_prim = cache_key.kwargs['build_strategy'].build_cinn_pass
1143 1144
        # TODO(CZ): later when use cinn, set_prim_all_enabled and check_and_set_prim_all_enabled will be set at else branch.

1145 1146
        # NOTE(xiongkun): Need a global FLAGS to enable/disable fallback
        enable_fallback = enable_prim
1147
        core.check_and_set_prim_all_enabled()
1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165
        try:
            concrete_program = ConcreteProgram.from_func_spec(
                func_spec=cache_key.function_spec,
                input_spec=cache_key.input_args_with_spec,
                input_kwargs_spec=cache_key.input_kwargs_with_spec,
                class_instance=cache_key.class_instance,
                **cache_key.kwargs
            )
        except Exception as e:
            if enable_fallback:
                warnings.warn(
                    "Exception is thrown while applying @paddle.jit.to_static. It will fallback into dygraph mode for training.\n"
                    "1. You can check `to_static.error` file in current workspace directory for detail.\n"
                    "2. In fallback mode, you can only do training, can't call paddle.jit.save(). Please modify model code according `to_static.error` firstly"
                )
                # TODO(xiongkun) change different file name to avoid overwrite.
                with open(self.dy2static_error_file, "w") as fp:
                    fp.write(str(e))
1166

1167 1168 1169 1170 1171 1172 1173
                fallback_layer = FallbackProgramLayer(
                    cache_key.class_instance,
                    cache_key.function_spec.dygraph_function,
                )
                return fallback_layer, fallback_layer
            else:
                raise
1174

1175 1176 1177 1178 1179 1180 1181 1182
        if prim_or_cinn_is_enabled(cache_key.kwargs['build_strategy']):
            for var in concrete_program.main_program.list_vars():
                if -1 in var.shape:
                    warnings.warn(
                        "Now prim and cinn do not support -1 shape, but the shape of var {} is {}".format(
                            var.name, var.shape
                        )
                    )
1183

X
xiongkun 已提交
1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207
        class PrimHooker(PartialProgramLayerHook):
            def __init__(self):
                custom_vjps = set()
                if core._is_fwd_prim_enabled() and core._is_bwd_prim_enabled():
                    custom_vjps = {
                        op.type
                        for op in concrete_program.main_program.block(0).ops
                        if core.has_comp_grad_op_maker(op.type)
                    }
                self.custom_vjps = custom_vjps
                self.custom_vjps = {"softmax"}

            def before_append_backward(
                self, partial_program_layer, forward_program
            ):
                if core._is_fwd_prim_enabled():
                    to_prim(forward_program.block(0), self.custom_vjps)
                return forward_program

            def after_append_backward(
                self, partial_program_layer, whole_program, backward_start_idx
            ):
                backward_length = (
                    len(whole_program.block(0).ops) - backward_start_idx
1208
                )
X
xiongkun 已提交
1209 1210 1211 1212 1213 1214
                if core._is_fwd_prim_enabled() and len(self.custom_vjps) != 0:
                    to_prim(whole_program.block(0))
                new_start_index = (
                    len(whole_program.block(0).ops) - backward_length
                )
                return whole_program, new_start_index
1215

X
xiongkun 已提交
1216 1217 1218 1219
            def after_infer(self, partial_program_layer, infer_program):
                if core._is_fwd_prim_enabled():
                    to_prim(infer_program.block(0))
                return infer_program
1220

X
xiongkun 已提交
1221 1222
        partial_program = partial_program_from(concrete_program)
        partial_program.set_hooker(PrimHooker())
1223 1224
        return concrete_program, partial_program

1225

1226
    def __getitem__(self, item):
1227
        if not isinstance(item, CacheKey):
1228 1229 1230 1231
            raise ValueError(
                'type(item) should be CacheKey, but received %s'
                % type_name(item)
            )
1232
        item_id = hash(item)
1233
        self._recent_cache_key = item
1234
        self._recent_key = item_id
1235 1236
        if item_id not in self._caches:
            self._caches[item_id] = self._build_once(item)
1237 1238 1239
            # Note: raise warnings if number of traced program is more than `max_tracing_count`
            current_tracing_count = len(self._caches)
            if current_tracing_count > MAX_TRACED_PROGRAM_COUNT:
1240
                logging_utils.warn(
1241
                    "Current traced program number: {} > `max_tracing_count`:{}. Too much cached programs will bring expensive overhead. "
1242 1243 1244 1245
                    "The reason may be: (1) passing tensors with different shapes, (2) passing python objects instead of tensors.".format(
                        current_tracing_count, MAX_TRACED_PROGRAM_COUNT
                    )
                )
1246

1247
        return self._caches[item_id]
1248

1249
    def get_program(self, item):
1250
        if not isinstance(item, CacheKey):
1251
            raise ValueError(
1252 1253 1254
                "Input item's type should be FunctionSpec, but received %s"
                % type_name(item)
            )
1255 1256
        item_id = hash(item)
        if item_id not in self._caches:
1257
            raise RuntimeError(
1258
                "Failed to find program for input item, please decorate input function by `@paddle.jit.to_static`."
1259
            )
1260
        return self._caches[item_id]
1261

1262
    def last(self):
1263 1264 1265
        assert (
            len(self._caches) >= 1
        ), "No valid cached program in ProgramCache."
1266 1267
        assert self._recent_key is not None
        return self._recent_key, self._caches[self._recent_key]
1268

1269 1270 1271 1272
    def __len__(self):
        return len(self._caches)

    def concrete_programs(self):
1273
        return [cp for key, (cp, _) in self._caches.items()]
1274

1275 1276 1277
    def clear(self):
        self._caches = collections.OrderedDict()

1278

1279
class ProgramTranslator:
1280
    """
1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292
    Class to translate dygraph function into static graph function. The object
    of this class is a singleton.

    Args:
        None.

    Returns:
        ProgramTranslator: the singleton object.

    Examples:
        .. code-block:: python

1293
            import paddle
1294

1295 1296 1297
            # Two methods get same object because ProgramTranslator is a singleton
            paddle.jit.ProgramTranslator()
            paddle.jit.ProgramTranslator.get_instance()
1298

1299 1300
    """

1301
    _singleton_lock = threading.Lock()
1302 1303 1304 1305 1306 1307
    _instance = None

    @synchronized
    def __new__(cls, *args, **kwargs):
        if cls._instance is None:
            cls._instance = object.__new__(cls, *args, **kwargs)
1308
            cls._instance._initialized = False
1309 1310 1311 1312 1313
        return cls._instance

    @classmethod
    def get_instance(cls):
        if cls._instance is None:
1314 1315
            with cls._singleton_lock:
                cls._instance = cls()
1316 1317 1318 1319 1320
        return cls._instance

    @classmethod
    def reset(cls):
        if cls._instance is not None:
1321
            cls._instance._initialized = False
1322 1323
            cls._instance.__init__()

1324
    def __init__(self):
1325
        # To make sure that calls __init__ only once.
1326
        if self._initialized:
1327
            return
1328 1329
        self._initialized = True
        self._program_cache = ProgramCache()
1330
        self._params_recorder = ParametersRecorder()
1331
        self.enable_to_static = True
1332

1333
    def enable(self, enable_to_static):
1334
        """
1335
        Enable or disable the converting from imperative to static graph by
1336 1337 1338
        ProgramTranslator globally.

        Args:
1339
            enable_to_static (bool): True or False to enable or disable converting to static.
1340 1341 1342 1343 1344 1345 1346

        Returns:
            None.

        Examples:
            .. code-block:: python

1347
                import paddle
1348 1349


1350 1351 1352 1353 1354 1355 1356
                @paddle.jit.to_static
                def func(x):
                    if paddle.mean(x) > 0:
                        x_v = x - 1
                    else:
                        x_v = x + 1
                    return x_v
1357

1358

R
Ryan 已提交
1359
                paddle.jit.enable_to_static(False)
1360 1361 1362

                x = paddle.ones([1, 2])
                # ProgramTranslator is disabled so the func is run in dygraph
1363
                print(func(x))  # [[0. 0.]]
L
liym27 已提交
1364

1365
        """
1366 1367 1368 1369 1370 1371
        check_type(
            enable_to_static,
            "enable_to_static",
            bool,
            "ProgramTranslator.enable",
        )
1372
        self.enable_to_static = enable_to_static
1373

1374 1375
    def get_output(self, dygraph_func, *args, **kwargs):
        """
1376
        Returns the output dygraph Tensor for dygraph function. The dygraph
1377
        function will be translated into static graph function so the under
1378
        beneath numerical result will be calculated by static graph mode.
1379 1380 1381

        Args:
            dygraph_func (callable): the dygraph function.
1382 1383
            *args (tuple): the input argument of dygraph_func.
            **kwargs (dict): the input argument of dygraph_func.
1384 1385

        Returns:
1386
            Tensor or tuple of Tensors: the dygraph Tensor containing digital result.
1387 1388 1389 1390

        Examples:
            .. code-block:: python

1391 1392
                import paddle

1393 1394

                def func(x):
1395
                    if paddle.mean(x) > 0:
1396 1397 1398 1399 1400 1401
                        x_v = x - 1
                    else:
                        x_v = x + 1
                    return x_v


1402 1403 1404 1405
                prog_trans = paddle.jit.ProgramTranslator()

                x = paddle.ones([1, 2])
                x_v = prog_trans.get_output(func, x)
1406
                print(x_v)  # [[0. 0.]]
1407

1408
        """
1409 1410 1411
        assert callable(
            dygraph_func
        ), "Input dygraph_func is not a callable in ProgramTranslator.get_output"
1412

1413
        if not self.enable_to_static:
1414 1415
            # Here calls `warnings.warn` but not `logging_utils.warn` because by default warnings.warn(message)
            # will show up **only once**.
1416
            logging_utils.warn(
1417 1418 1419 1420
                "The ProgramTranslator.get_output doesn't work when setting ProgramTranslator.enable to False. "
                "We will just return dygraph output. "
                "Please call ProgramTranslator.enable(True) if you would like to get static output."
            )
1421
            return dygraph_func(*args, **kwargs)
1422
        try:
1423
            function_spec = FunctionSpec(dygraph_func)
1424
            cache_key = CacheKey.from_func_and_args(
1425 1426 1427 1428 1429
                function_spec,
                args,
                kwargs,
                getattr(dygraph_func, '__self__', None),
            )
1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445
            _, partial_program_layer = self._program_cache[cache_key]

            if args and isinstance(args[0], layers.Layer):
                # Synchronize self.training attribute.
                partial_program_layer.training = args[0].training
                args = args[1:]
            try:
                return partial_program_layer(args)
            except BaseException as e:
                # NOTE:
                # 1. If e is raised in compile time, e should have been attached to ERROR_DATA before;
                # 2. If e raised in runtime, e should be attached to ERROR_DATA here.
                if not hasattr(e, error.ERROR_DATA):
                    # runtime error
                    error.attach_error_data(e, in_runtime=True)
                raise
1446
        except BaseException as e:
1447 1448 1449 1450 1451 1452
            error_data = getattr(e, error.ERROR_DATA, None)
            if error_data:
                error_data.raise_new_exception()
            else:
                logging_utils.warn(
                    "Please file an issue at 'https://github.com/PaddlePaddle/Paddle/issues'"
1453 1454
                    " if you can't handle this {} yourself.".format(type(e))
                )
1455
                raise e
1456 1457 1458

    def get_func(self, dygraph_func):
        """
1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469
        Returns a callable function which converts imperative dygraph APIs of
        the input dygraph_func into declarative net-building APIs, which means
        it doesn't return immediate digital result as get_output does.
        Users should handle Program and Executor by themselves.

        Args:
            dygraph_func (callable): the dygraph function.

        Returns:
            callable: converting imperative dygraph APIs into declarative
            net-building APIs.
1470 1471 1472 1473

        Examples:
            .. code-block:: python

1474 1475
                import paddle

1476 1477

                def func(x):
1478
                    if paddle.mean(x) > 0:
1479 1480 1481 1482 1483 1484
                        x_v = x - 1
                    else:
                        x_v = x + 1
                    return x_v


1485
                prog_trans = paddle.jit.ProgramTranslator()
1486 1487 1488
                static_func = prog_trans.get_func(func)
                print(callable(static_func)) # True

1489
        """
1490 1491 1492
        assert callable(
            dygraph_func
        ), "Input dygraph_func is not a callable in ProgramTranslator.get_func"
1493

1494
        if not self.enable_to_static:
1495
            logging_utils.warn(
1496 1497 1498
                "The ProgramTranslator.get_func doesn't work when setting ProgramTranslator.enable to False. We will "
                "just return dygraph output. Please call ProgramTranslator.enable(True) if you would like to get static output."
            )
1499
            return dygraph_func
1500

1501
        static_func = convert_to_static(dygraph_func)
1502 1503
        return static_func

1504 1505
    def get_program(self, dygraph_func, *args, **kwargs):
        """
1506
        Returns the translated static program and input/output Tensors from
1507 1508 1509 1510
        dygraph function. The users can use the program to run by executor.

        Args:
            dygraph_func (callable): the dygraph function.
1511 1512
            *args (tuple): the input argument of dygraph_func.
            **kwargs (dict): the input argument of dygraph_func.
1513 1514 1515

        Returns:
            tuple of (main_program, startup_program, inputs, outputs) whose
1516
            types are (Program, Program, list of Tensors, list of Tensors).
1517 1518
            main_program: the converted main program.
            startup_program: the converted startup program.
1519 1520
            inputs: list of input Tensors which need to be fed.
            outputs: list of output Tensors which users can fetch.
1521 1522 1523 1524

        Examples:
            .. code-block:: python

1525 1526
                import paddle

1527 1528

                def func(x):
1529
                    if paddle.mean(x) > 0:
1530 1531 1532 1533 1534 1535
                        x_v = x - 1
                    else:
                        x_v = x + 1
                    return x_v


1536 1537
                prog_trans = paddle.jit.ProgramTranslator()
                x = paddle.ones([1, 2])
1538 1539
                main_prog, start_prog, inputs, outputs = prog_trans.get_program(func, x)
                print([i.name for i in inputs])
1540
                # [u'generated_tensor_0'] the feed input Tensor name representing x
1541
                print([o.name for o in outputs])
1542
                # [u'_generated_var_4'] the fetch output Tensor name representing x_v
1543

1544
        """
1545 1546 1547
        assert callable(
            dygraph_func
        ), "Input dygraph_func is not a callable in ProgramTranslator.get_program"
1548

1549
        if not self.enable_to_static:
1550
            logging_utils.warn(
1551 1552 1553 1554
                "The ProgramTranslator.get_program doesn't work when setting ProgramTranslator.enable to False."
                "We will just return dygraph output. "
                "Please call ProgramTranslator.enable(True) if you would like to get static output."
            )
1555
            return dygraph_func(*args, **kwargs)
1556

1557
        function_spec = FunctionSpec(dygraph_func)
1558
        cache_key = CacheKey.from_func_and_args(
1559 1560
            function_spec, args, kwargs, getattr(dygraph_func, '__self__', None)
        )
1561 1562
        concrete_program, partial_program_layer = self._program_cache[cache_key]

1563 1564
        # Note: concrete_program hold all input/output infos include non-Variable
        input_vars = [
1565 1566
            var
            for var in concrete_program.inputs
1567 1568 1569
            if isinstance(var, framework.Variable)
        ]
        output_vars = [
1570 1571
            var
            for var in concrete_program.outputs
1572 1573 1574
            if isinstance(var, framework.Variable)
        ]

1575 1576 1577 1578 1579 1580
        return (
            concrete_program.main_program,
            concrete_program.startup_program,
            input_vars,
            output_vars,
        )
1581

1582 1583
    def get_code(self, dygraph_func):
        """
1584 1585 1586 1587 1588 1589
        Returns the translated static function string code from dygraph function.

        Args:
            dygraph_func (callable): the dygraph function.

        Returns:
1590 1591 1592 1593 1594
            str: the string code of translated static function.

        Examples:
            .. code-block:: python

1595 1596 1597 1598 1599 1600 1601 1602 1603
                import paddle


                def func(x):
                    if paddle.mean(x) > 0:
                        x_v = x - 1
                    else:
                        x_v = x + 1
                    return x_v
1604 1605


1606
                prog_trans = paddle.jit.ProgramTranslator()
1607

1608 1609
                code = prog_trans.get_code(func)
                print(type(code)) # <class 'str'>
1610

1611
        """
1612 1613 1614
        assert callable(
            dygraph_func
        ), "Input dygraph_func is not a callable in ProgramTranslator.get_code"
1615
        # Gets AST from dygraph function
1616 1617 1618

        unwrap_func = unwrap(dygraph_func)
        raw_code = inspect.getsource(unwrap_func)
1619 1620 1621 1622 1623 1624 1625 1626 1627 1628 1629
        code = textwrap.dedent(raw_code)
        root = gast.parse(code)

        # Transform AST
        dygraph_to_static = DygraphToStaticAst()
        root_wrapper = dygraph_to_static.get_static_ast(root)

        # Get source_code
        source_code = ast_to_source_code(root_wrapper.node)
        return source_code

1630
    def get_program_cache(self):
1631
        """
1632 1633 1634 1635 1636 1637 1638 1639 1640
        Returns the ProgramCache instance. This method is used by PaddlePaddle
        developers to manage program cache in ProgramTranslator. Normal users
        don't have to call this method.

        Returns:
            ProgramCache: ProgramCache instance of ProgramTranslator.

        Examples:
            .. code-block:: python
1641

1642
                import paddle
1643

1644
                prog_trans = paddle.jit.ProgramTranslator()
1645 1646
                prog_cache = prog_trans.get_program_cache()

1647
        """
1648
        return self._program_cache
R
Ryan 已提交
1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690 1691 1692


def enable_to_static(enable_to_static_bool):

    """
    Enable or disable the converting from imperative to static graph by
    ProgramTranslator globally.

    Args:
        enable_to_static_bool (bool): True or False to enable or disable converting to static.

    Returns:
        None.

    Examples:
        .. code-block:: python

            import paddle


            @paddle.jit.to_static
            def func(x):
                if paddle.mean(x) > 0:
                    x_v = x - 1
                else:
                    x_v = x + 1
                return x_v


            paddle.jit.enable_to_static(False)

            x = paddle.ones([1, 2])
            # ProgramTranslator is disabled so the func is run in dygraph
            print(func(x))  # [[0. 0.]]

    """
    check_type(
        enable_to_static_bool,
        "enable_to_static_bool",
        bool,
        "paddle.jit.enable_to_static",
    )
    _program_trans = ProgramTranslator()
    _program_trans.enable(enable_to_static_bool)
1693 1694 1695


@switch_to_static_graph
X
xiongkun 已提交
1696
def to_prim(blocks, exclude=frozenset()):
1697 1698 1699
    # TODO(Aurelius84): Fix this cycle import problem
    from paddle.incubate.autograd import primapi

X
xiongkun 已提交
1700
    primapi.to_prim(blocks, exclude)