program_translator.py 58.8 KB
Newer Older
1
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
import collections
16
import inspect
17 18
import textwrap
import threading
19
import warnings
20
import weakref
21

22
from paddle.amp.auto_cast import _in_amp_guard
23
from paddle.fluid import _non_static_mode, core, framework
24
from paddle.fluid.data_feeder import check_type
25
from paddle.fluid.dygraph.base import param_guard, switch_to_static_graph
26
from paddle.nn.layer import layers
27
from paddle.utils import flatten, gast
28 29 30 31 32 33 34 35 36

from . import error, logging_utils
from .ast_transformer import DygraphToStaticAst
from .function_spec import (
    FunctionSpec,
    _hash_spec_names,
    get_buffers,
    get_parameters,
)
37
from .origin_info import (
38 39 40 41
    attach_origin_info,
    create_and_update_origin_info_map,
    update_op_callstack_with_origin_info,
)
X
xiongkun 已提交
42
from .partial_program import PartialProgramLayerHook, partial_program_from
43
from .utils import (
44
    ALREADY_D2S,
45 46 47 48
    ast_to_func,
    ast_to_source_code,
    func_to_source_code,
    input_specs_compatible,
49
    is_paddle_func,
50
    make_hashable,
51
    prim_or_cinn_is_enabled,
52 53
    type_name,
    unwrap,
54
)
55

56
__all__ = []
57

58 59 60 61
# For each traced function, we set `max_traced_program_count` = 10 to consider caching performance.
# Once exceeding the threshold, we will raise warning to users to make sure the conversion is as expected.
MAX_TRACED_PROGRAM_COUNT = 10

62 63
CONVERSION_OPTIONS = "__jst_not_to_static"

64

65 66 67 68 69 70 71 72 73 74
def synchronized(func):
    func.__lock__ = threading.Lock()

    def lock_func(*args, **kwargs):
        with func.__lock__:
            return func(*args, **kwargs)

    return lock_func


75
class FunctionCache:
76 77 78 79 80
    """
    Caches the transformed functions to avoid redundant conversions of the same function.
    """

    def __init__(self):
81
        # Caches the converted static functions. {dygraph_func: static_func}
X
xiongkun 已提交
82
        self._converted_static_func_caches = weakref.WeakKeyDictionary()
83 84 85
        # Caches the converted ast node for same source code. {source_code: ast_root}
        self._code_to_ast_caches = dict()
        self._dygraph_to_static = DygraphToStaticAst()
86

87 88 89 90 91 92
    def convert_with_cache(self, func):
        """
        Returns the cached static function or converts it when first encounters the function.
        """
        # If hit cache, return it directly.
        static_func = self._converted_static_func_caches.get(func, None)
93 94

        if static_func is None:
95 96
            static_func = self._convert(func)
            self._converted_static_func_caches[func] = static_func
97 98 99

        return static_func

100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120
    def _convert(self, func):
        """
        Converts dygraph function into static function. For two functions with same dedent code,
        the second function will reuse the transformed ast node of previous one.

        For example:
            # A.py
            def foo(x, y):
                z = x + y
                return z

            # B.py
            def foo(x, y):
                z = x + y
                return z

        If the conversion of A.foo happens after B.foo, it will reuse the transformed ast node of B.foo
        to speed up the conversion.
        """
        # Note: In Python2, it will raise OSError when inspect function
        # with decorator directly and function.__wrapped__ holds the actual function.
121
        func = unwrap(func)
122
        source_code = func_to_source_code(func)
123 124 125 126 127

        # TODO(liym27):
        #  Consider this case: source_code in self._code_to_ast_caches,
        #  but actually they are methods in different classes.
        #  Maybe use (__class__, source_code) as key
128 129 130 131
        if source_code in self._code_to_ast_caches:
            root_wrapper = self._code_to_ast_caches[source_code]
        else:
            root = gast.parse(source_code)
132
            root = attach_origin_info(root, func)
133 134
            root_wrapper = self._dygraph_to_static.get_static_ast(root)
            self._code_to_ast_caches[source_code] = root_wrapper
135

136 137
        # Get static function from AST
        static_func, file_name = ast_to_func(root_wrapper.node, func)
138 139

        create_and_update_origin_info_map(root_wrapper.node, static_func)
140
        return static_func
141 142

    def exist(self, func):
143
        return func in self._converted_static_func_caches
144 145


146 147 148 149
_CACHE_LOCK = threading.Lock()
_FUNCTION_CACHE = FunctionCache()


150
def convert_to_static(function):
151
    """
152
    Transforms function of dygraph into static function using the cache mechanism.
153

154 155
    Note(dev): It will return function.__func__ if encountering class method.

156 157
    Args:
        function(callable): The function with dygraph layers that will be converted into static layers.
158
    """
159 160
    if getattr(function, ALREADY_D2S, None):
        return function
161 162 163

    # Return directly if decorated with @not_to_static and DO NOT Cache it
    options = getattr(function, CONVERSION_OPTIONS, None)
164 165 166 167 168
    # or ignore paddle api
    need_skip = (options is not None and options.not_convert) or is_paddle_func(
        function
    )
    if need_skip:
169 170
        return function.__func__ if inspect.ismethod(function) else function

171
    with _CACHE_LOCK:
172
        static_func = _FUNCTION_CACHE.convert_with_cache(function)
173
        setattr(static_func, ALREADY_D2S, True)
174 175 176
        return static_func


177
class CacheKey:
178 179 180
    """
    Cached key for ProgramCache.
    """
181

182
    __slots__ = [
183 184 185 186 187 188
        'function_spec',
        'input_args_with_spec',
        'input_kwargs_with_spec',
        'class_instance',
        'kwargs',
        '_spec_names_id',
189
    ]
190

191 192 193 194 195 196
    def __init__(
        self,
        function_spec,
        input_args_with_spec,
        input_kwargs_with_spec,
        class_instance,
197
        **kwargs,
198
    ):
199 200
        """
        Initializes a cache key.
201

202 203
        Args:
            functions_spec(FunctionSpec): a FunctionSpec instance of decorated function.
204 205
            input_args_with_spec(list[InputSpec]): actual input args with some arguments replaced by InputSpec.
            input_kwargs_with_spec(list[{string:InputSpec}]): actual input kwargs with some arguments replaced by InputSpec.
206
            class_instance(object): a instance of class `Layer`.
207
            **kwargs(dict): manage other arguments used for better scalability
208
        """
209
        self.function_spec = function_spec
210 211
        self.input_args_with_spec = input_args_with_spec
        self.input_kwargs_with_spec = input_kwargs_with_spec
212
        self.class_instance = class_instance
213 214
        # NOTE: `kwargs` is usually not considered as basic member for `__hash__`
        self.kwargs = kwargs
215 216 217
        self._spec_names_id = _hash_spec_names(
            input_args_with_spec, input_kwargs_with_spec
        )
218 219 220

    @classmethod
    def from_func_and_args(cls, function_spec, args, kwargs, class_instance):
221
        """
222 223 224 225 226 227 228 229 230 231 232
        Generated a CacheKey instance by given inputs.

        Args:
            functions_spec(FunctionSpec): a FunctionSpec instance of decorated function.
            args(tuple): tuple of actual inputs arguments.
            kwargs(dict): dict of actual inputs keyword arguments.
            class_instance(object): a instance of class `Layer`.
        """
        # 1. filter `self` in args
        if args and isinstance(args[0], layers.Layer):
            args = args[1:]
233
        # 2. convert tensor and numpy array into InputSpec
234
        _args, _kwargs = function_spec.unified_args_and_kwargs(args, kwargs)
235 236 237 238
        (
            input_args_with_spec,
            input_kwargs_with_spec,
        ) = function_spec.args_to_input_spec(_args, _kwargs)
239 240

        # 3. check whether hit the cache or build a new program for the input arguments
241 242 243 244 245 246
        return CacheKey(
            function_spec,
            input_args_with_spec,
            input_kwargs_with_spec,
            class_instance,
        )
247 248 249

    def __hash__(self):
        error_msg = "Arguments to a `@paddle.jit.to_static` must be a hashable Python objects (or nested structures of these types)."
250
        with_hook = self.kwargs.get("with_hook", False)
251
        is_train = self.kwargs.get("is_train", False)
252 253 254 255 256 257 258 259 260 261 262
        return hash(
            (
                id(self.function_spec),
                make_hashable(self.input_args_with_spec, error_msg),
                make_hashable(self.input_kwargs_with_spec, error_msg),
                self._spec_names_id,
                self.class_instance,
                with_hook,
                is_train,
            )
        )
263 264 265 266 267 268 269 270

    def __eq__(self, other):
        return (type(self) is type(other)) and hash(self) == hash(other)

    def __neq__(self, other):
        return not self == other

    def __repr__(self):
271
        return "id(function_spec): {}, input_args_with_spec: {}, input_kwargs_with_spec: {}, class_instance: {}".format(
272 273 274 275 276
            id(self.function_spec),
            self.input_args_with_spec,
            self.input_kwargs_with_spec,
            self.class_instance,
        )
277 278 279 280 281 282 283 284 285


def unwrap_decorators(func):
    """
    Unwraps a decorated function and returns the decorator list and inner target.
    """
    decorators = []
    cur = func
    while True:
286
        if isinstance(cur, StaticFunction):
287 288 289 290 291
            decorators.append(cur)
            # Note: if `cur` is a method, keep it as bound method of class.
            instance = cur._class_instance
            if instance is not None:
                cur = cur.dygraph_function.__get__(instance)
292
            else:
293 294 295 296
                cur = cur.dygraph_function
        else:
            break
    return decorators, cur
297

298

299
class StaticFunction:
300 301 302 303 304
    """
    Wrapper class to Manage program conversion of decorated function.

    """

305
    def __init__(self, function, input_spec=None, **kwargs):
306
        """
307
        Initializes a `StaticFunction`.
308 309 310 311

        Args:
            function(callable): A function or method that will be converted into static program.
            input_spec(list[InputSpec]): list of InputSpec to specify the `shape/dtype/name` information for each input argument, default None.
312
            **kwargs(dict): other arguments like `build_strategy` et.al.
313 314
        """
        # save the instance `self` while decorating a method of class.
315

316
        if inspect.ismethod(function):
317 318
            self._dygraph_function = function.__func__
            self._class_instance = function.__self__
319

320 321 322
            if not hasattr(self._class_instance, '_original_funcs'):
                raise TypeError(
                    "When using 'to_static' to convert method of a class, "
323 324
                    "please ensure the class inherits from nn.Layer"
                )
325
            self._class_instance._original_funcs[
326 327
                function.__name__
            ] = self._dygraph_function
328 329 330 331
        else:
            self._dygraph_function = function
            self._class_instance = None

332 333 334
        if input_spec is not None and prim_or_cinn_is_enabled(
            kwargs.get("build_strategy", None)
        ):
J
Jiabin Yang 已提交
335 336
            from paddle.static import InputSpec

337
            for spec in flatten(input_spec):
J
Jiabin Yang 已提交
338
                if isinstance(spec, InputSpec) and -1 in spec.shape:
339 340 341 342 343 344
                    input_spec = None
                    warnings.warn(
                        'Now prim and cinn do not support -1 shape, but input_spec has -1 shape so we set it to None.'
                    )
                    break

345 346 347
        self._input_spec = input_spec
        self._function_spec = FunctionSpec(function, input_spec)
        self._program_cache = ProgramCache()
348
        self._descriptor_cache = weakref.WeakKeyDictionary()
349
        # Note: Hold a reference to ProgramTranslator for switching `enable_to_static`.
350
        self._program_trans = ProgramTranslator()
351
        self._kwargs = kwargs
352
        self._training = True
353 354
        self._cuda_graph_capture_mode = ""
        self._cuda_graph_pool_id = 0
355

356 357 358 359 360 361 362
        self._property = kwargs.get("property", False)

    @property
    def is_property(self):
        # whether is class proproty to be exported.
        return self._property

363
    def train(self):
364 365
        if (
            isinstance(self._class_instance, layers.Layer)
366
            and self._class_instance.training is False
367
        ):
368 369 370
            raise RuntimeError(
                "Failed to switch train mode. {} is a Layer's method, "
                "please use Layer.train() to switch train mode.".format(
371 372 373
                    self.dygraph_function
                )
            )
374 375 376
        self._training = True

    def eval(self):
377 378
        if (
            isinstance(self._class_instance, layers.Layer)
379
            and self._class_instance.training is True
380
        ):
381 382 383
            raise RuntimeError(
                "Failed to switch eval mode. {} is a Layer's method, "
                "please use Layer.eval() to switch eval mode.".format(
384 385 386
                    self.dygraph_function
                )
            )
387
        self._training = False
388 389 390 391 392 393

    def __get__(self, instance, owner):
        """
        Overrides this method to parse the class instance and call bound method correctly.

        For example:
394

395 396 397 398
            '''
            class Net(Layer):
                def __init__(self):
                    pass
399

400 401 402 403 404 405 406
                @paddle.jit.to_static
                def forward(self, x, y):
                    return x + y

            net = Net()
            out = net(x, y)
            '''
407

408 409
        In above case, `net(x, y)` will call `net.forward(x, y)` firstly that is a bound method
        of `Net` instance. After decorated by `@paddle.jit.to_static`, it will firstly to call `__get__`
410
        to parse the class instance correctly instead of the `StaticFunction` instance.
411
        """
412 413 414
        if instance not in self._descriptor_cache:
            if instance is None:
                return self
415
            # Note(Aurelius84): To construct new instance of StaticFunction when we
416 417 418 419 420 421 422 423
            # first encouter the bound function of layer and cache it.
            new_static_layer = self._clone()
            new_static_layer._class_instance = instance
            self._descriptor_cache[instance] = new_static_layer

        return self._descriptor_cache[instance]

    def _clone(self):
424
        return self.__class__(
425
            self.dygraph_function, self._input_spec, **self._kwargs
426
        )
427 428

    def __call__(self, *args, **kwargs):
429
        """
430 431 432 433
        Supports to call the returned instance with input `args` and `kwargs` directly.

        Args:
            *args(tuple): tuple of all input arguments from original decorated function.
434
            **kwargs(dict): dict of all input keyward arguments from original decorated function.
435 436 437

        Return:
            Outputs of decorated function.
438
        """
439 440
        if self._property:
            return self._call_dygraph_function(*args, **kwargs)
441

442
        # 1. call dygraph function directly if not enable `declarative`
443
        if not self._program_trans.enable_to_static:
444 445 446 447
            # NOTE(liym27):
            # Here calls `warnings.warn` but not `logging_utils.warn` because by default warnings.warn(message)
            # will show up **only once**. StaticFunction.__call__ will run many times, it is appropriate to
            # display this warning message only once.
448
            logging_utils.warn(
R
Ryan 已提交
449
                "The decorator '@paddle.jit.to_static' does NOT work when setting 'paddle.jit.enable_to_static' to False. "
450
                "We will just return dygraph output. If you would like to get static graph output, please call API "
R
Ryan 已提交
451
                "paddle.jit.enable_to_static(True)"
452
            )
453 454
            return self._call_dygraph_function(*args, **kwargs)

J
Jiabin Yang 已提交
455
        if not _non_static_mode():
456 457
            raise RuntimeError(
                "Failed to run the callable object {} decorated by '@paddle.jit.to_static', "
458
                "because it is NOT in dynamic mode. Please disable the static graph mode to enter dynamic mode with the "
459
                "following API: paddle.disable_static().".format(
460 461 462
                    self.dygraph_function
                )
            )
463

464 465
        # 2. trace ops from dygraph layers and cache the generated program.
        args, kwargs = self._function_spec.unified_args_and_kwargs(args, kwargs)
466

467 468
        try:
            concrete_program, partial_program_layer = self.get_concrete_program(
469 470
                *args, **kwargs, is_train=self._is_train_mode()
            )
471 472 473
            # 3. synchronize self.training attribute.
            if isinstance(self._class_instance, layers.Layer):
                partial_program_layer.training = self._class_instance.training
474 475
            else:
                partial_program_layer.training = self._training
476

477 478 479
            partial_program_layer._cuda_graph_capture_mode = (
                self._cuda_graph_capture_mode
            )
480 481
            partial_program_layer._cuda_graph_pool_id = self._cuda_graph_pool_id

482
            # 4. return outputs.
483 484 485 486 487 488 489
            try:
                return partial_program_layer(args)
            except Exception as e:
                if not hasattr(e, error.ERROR_DATA):
                    # runtime error
                    error.attach_error_data(e, in_runtime=True)
                    raise
490
        except Exception as e:
491
            error_data = getattr(e, error.ERROR_DATA, None)
492
            if error_data:
493
                error_data.raise_new_exception()
494
            else:
495 496
                logging_utils.warn(
                    "Please file an issue at 'https://github.com/PaddlePaddle/Paddle/issues'"
497 498
                    " if you can't handle this {} yourself.".format(type(e))
                )
499
                raise e
500

501 502
    def _is_train_mode(self):
        if self._class_instance is not None:
503 504 505
            if not hasattr(self._class_instance, 'training'):
                raise TypeError(
                    "When using 'to_static' to convert method of a class, "
506 507
                    "please ensure the class inherits from nn.Layer"
                )
508 509 510 511
            return self._class_instance.training
        else:
            return self._training

512 513 514 515 516 517
    def _call_dygraph_function(self, *args, **kwargs):
        """
        Calls dygraph function directly and returns the outputs.

        Args:
            *args(tuple): tuple of all input arguments from original decorated function.
518
            **kwargs(dict): dict of all input keyward arguments from original decorated function.
519 520 521 522

        Return:
            Outputs of dygraph function.
        """
523
        return self.dygraph_function(*args, **kwargs)
524

525 526 527 528 529 530 531 532 533
    def _raise_when_property(self):
        """raise RuntimeError when property=True

        Raises:
            RuntimeError: can not call this func when property=True
        """
        if self.is_property:
            raise RuntimeError("Can not call the func when property=True.")

534 535 536 537 538 539 540 541 542 543 544
    def get_concrete_program(self, *args, **kwargs):
        """
        Returns traced concrete program and inner executable partial layer.

        Args:
            *args(tuple): input arguments values or InputSpec
            **kwargs(dict) : input kwargs values.

        Returns:
            Traced ConcreteProgram and executable translated Layer.
        """
545
        self._raise_when_property()
546

547
        with_hook = kwargs.get("with_hook", False)
548
        is_train = kwargs.get("is_train", True)
549 550 551 552
        if "is_train" in kwargs:
            kwargs.pop("is_train")
        if "with_hook" in kwargs:
            kwargs.pop("with_hook")
553 554
        # 1. unify args/kwargs and replace Tensor with InputSpec
        if len(args) != len(self._function_spec.args_name):
555
            args, kwargs = self._function_spec.unified_args_and_kwargs(
556 557 558 559 560 561
                args, kwargs
            )
        (
            input_args_with_spec,
            input_kwargs_with_spec,
        ) = self._function_spec.args_to_input_spec(args, kwargs)
562 563

        # 2. generate cache key
564 565 566 567 568 569 570
        cache_key = CacheKey(
            self._function_spec,
            input_args_with_spec,
            input_kwargs_with_spec,
            self._class_instance,
            **self._kwargs,
            with_hook=with_hook,
571
            is_train=is_train,
572
        )
573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588

        # 3. check whether hit the cache or build a new program for the input arguments
        concrete_program, partial_program_layer = self._program_cache[cache_key]
        return concrete_program, partial_program_layer

    def get_traced_count(self):
        """
        Returns the number of traced programs for the decorated function.
        """
        return len(self._program_cache)

    @property
    def code(self):
        """
        Returns the source code of transformed static function for debugging.
        """
589
        static_func = convert_to_static(self.dygraph_function)
590 591 592 593 594 595 596 597
        source_code = func_to_source_code(static_func)
        return source_code

    @property
    def dygraph_function(self):
        """
        Returns the original decorated function.
        """
598 599 600 601
        if self._class_instance is not None:
            return self._dygraph_function.__get__(self._class_instance)
        else:
            return self._dygraph_function
602 603 604 605 606

    @property
    def concrete_program(self):
        """
        Returns recent ConcreteProgram instance of decorated function.
A
Aurelius84 已提交
607 608 609 610 611 612 613 614 615 616 617 618 619

        Examples:
            .. code-block:: python

                import paddle
                from paddle.jit import to_static
                from paddle.static import InputSpec

                paddle.disable_static()

                def foo(x, y):
                    z = x + y
                    return z
620

A
Aurelius84 已提交
621 622 623 624 625 626 627 628
                # usage 1:
                decorated_foo = to_static(foo, input_spec=[InputSpec([10], name='x'), InputSpec([10], name='y')])
                print(decorated_foo.concrete_program)

                # usage 2:
                decorated_foo = to_static(foo)
                out_foo = decorated_foo(paddle.rand([10]), paddle.rand([10]))
                print(decorated_foo.concrete_program)
629
        """
630 631
        return self.concrete_program_specify_input_spec(input_spec=None)

632 633 634
    def concrete_program_specify_input_spec(
        self, input_spec=None, with_hook=False
    ):
635 636 637
        """
        Returns recent ConcreteProgram instance of decorated function while
        specifying input_spec. If the self._function_spec already has
638
        input_spec, it will check the compatibility of input input_spec and
639 640 641 642 643 644 645
        the self._function_spec.input_spec. If input input_spec=None, then
        this method uses self._function_spec.input_spec

        args:
            input_spec (list[InputSpec], optional): Describes the input of
                the translate function.
        """
646
        self._raise_when_property()
647 648 649 650 651
        # if specific the `input_spec`, the length of program_cache will always 1,
        # else, return the last one.
        cached_program_len = len(self._program_cache)
        # If specific `input_spec`, apply convertion from dygraph layers into static Program.
        if cached_program_len == 0:
C
Chen Weihang 已提交
652 653 654
            desired_input_spec = input_spec
            if self._function_spec.input_spec is not None:
                if input_spec is not None and not input_specs_compatible(
655 656
                    flatten(input_spec), flatten(self._function_spec.input_spec)
                ):
657
                    raise ValueError(
658 659 660 661
                        "The `input_spec`: {} used to construct concrete_program is conflict with the `input_spec`: {} in `@paddle.jit.to_static`".format(
                            input_spec, self._function_spec.input_spec
                        )
                    )
C
Chen Weihang 已提交
662 663 664
                # NOTE(chenweihang): we should always translated program based on the `input_spec`
                # decorated on forward if it is valid
                desired_input_spec = self._function_spec.input_spec
665 666
                if input_spec is not None:
                    logging_utils.warn(
667 668 669 670
                        "\n\nYou have specified `input_spec` both in function definition (higher priority) and `paddle.jit.save` (will be ignored.)\n\n\t Using: {}\n\n\t Ignore: {}\n".format(
                            desired_input_spec, input_spec
                        )
                    )
671

672
            has_input_spec = desired_input_spec is not None
A
Aurelius84 已提交
673
            if has_input_spec:
C
Chen Weihang 已提交
674
                concrete_program, _ = self.get_concrete_program(
675 676
                    *desired_input_spec,
                    with_hook=with_hook,
677
                    is_train=self._is_train_mode(),
678
                )
679
                return concrete_program
680
            else:
A
Aurelius84 已提交
681
                raise ValueError(
682 683 684 685
                    "No valid transformed program for {}.\n\t    Please specific `input_spec` in `@paddle.jit.to_static` or feed input tensor to call the decorated function at once.\n".format(
                        self._function_spec
                    )
                )
686 687 688 689 690 691
        elif with_hook:
            cache_key = self._program_cache._recent_cache_key
            cache_key.kwargs["with_hook"] = True
            concrete_program, _ = self._program_cache[cache_key]
            return concrete_program

692 693
        # If more than one programs have been cached, return the recent converted program by default.
        elif cached_program_len > 1:
694
            logging_utils.warn(
695 696 697 698
                "Current {} has more than one cached programs: {}, the last traced progam will be return by default.".format(
                    self._function_spec, cached_program_len
                )
            )
699

700 701 702 703
        cache_key, (
            concrete_program,
            partial_layer,
        ) = self._program_cache.last()
704
        return concrete_program
705

706 707 708
    def rollback(self):
        """
        Rollback into original dygraph functions for current class instance.
709

710 711 712 713 714 715 716 717 718 719
        Returns:
            Function or Method

        Example::
            .. code-block:: python

                import paddle

                class Net(paddle.nn.Layer):
                    def __init__(self):
720
                        super().__init__()
721 722 723 724 725 726 727 728 729

                    def forward(self, x, flag=True):
                        if flag:
                            out = x + 1
                        else:
                            out = x - 1
                        return out

                x = paddle.randn([10, 1], 'float32')
730
                net = paddle.jit.to_static(Net())  # convert into static graph mode
731
                out = net(x)
732

733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748
                net.forward.rollback()  # rollback into dygraph mode
                out = net(x)
        """

        def rollback_impl(class_instance):
            for name, func in class_instance._original_funcs.items():
                setattr(class_instance, name, func.__get__(class_instance))

            for sublayer in class_instance.sublayers(include_self=False):
                rollback_impl(sublayer)

        if self._class_instance is None:
            return self._dygraph_function

        # only rollback sub-functions on path of top _dygraph_function
        func_name = self._dygraph_function.__name__
749 750 751 752 753
        assert (
            func_name in self._class_instance._original_funcs
        ), "Not Found function '{}' in class '{}'.".format(
            func_name, self._class_instance.__name__
        )
754
        func = self._class_instance._original_funcs[func_name]
755 756 757
        setattr(
            self._class_instance, func_name, func.__get__(self._class_instance)
        )
758 759 760 761 762 763

        for sublayer in self._class_instance.sublayers(include_self=False):
            rollback_impl(sublayer)

        return getattr(self._class_instance, func_name)

764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779
    def __deepcopy__(self, memo):
        """
        Customized behavior for copy.deepcopy, return original decorated function instead
        of a new StaticFunction Object. StaticFunction itself is not copyable becuase it's
        associated with class_instance.

        We add __deepcopy__ here only for the following usage:

        Example::
            .. code-block:: python

                import copy
                import paddle

                class Net(paddle.nn.Layer):
                    def __init__(self):
780
                        super().__init__()
781 782 783 784 785 786 787 788 789

                    def forward(self, x, flag=True):
                        if flag:
                            out = x + 1
                        else:
                            out = x - 1
                        return out

                x = paddle.randn([10, 1], 'float32')
790
                net = paddle.jit.to_static(Net())  # convert into static graph mode
791 792

                copy_net = copy.deepcopy(net)      # deepcopy a new net without @to_static
793

794 795 796 797 798 799
        Please attention that original 'net' will unwrap @to_static and rollback into simple Layer.
        """
        if self._class_instance is not None:
            net_name = type(self._class_instance).__name__
            logging_utils.log(
                level=-1,
800 801 802 803 804
                msg="Not recommend to deepcopy '{}' decorated with @to_static, it has side effect that will"
                " rollback into original state before @to_static. Please deepcopy '{}' before applying @to_static.".format(
                    net_name, net_name
                ),
            )
805
            self.rollback()
806 807 808
            return self._dygraph_function.__get__(
                memo[id(self._class_instance)]
            )
809 810 811
        else:
            return self._dygraph_function

812 813 814 815 816
    @property
    def inputs(self):
        """
        Returns input tensors of recent converted static program.
        """
817
        self._raise_when_property()
818 819
        concrete_program = self.concrete_program
        inputs = [
820 821
            var
            for var in flatten(concrete_program.inputs)
822 823 824
            if isinstance(var, framework.Variable)
        ]
        return inputs
825

826
    @property
827 828 829 830
    def outputs(self):
        """
        Returns output tensors of recent converted static program.
        """
831
        self._raise_when_property()
832 833
        concrete_program = self.concrete_program
        outputs = [
834 835
            var
            for var in flatten(concrete_program.outputs)
836 837 838 839
            if isinstance(var, framework.Variable)
        ]

        return outputs
840

841
    @property
842 843 844 845
    def main_program(self):
        """
        Returns recent converted static main program.
        """
846
        self._raise_when_property()
847 848 849
        concrete_program = self.concrete_program
        main_program = concrete_program.main_program
        return main_program
850

851 852 853
    @property
    def program_cache(self):
        return self._program_cache
854

855 856 857
    @property
    def function_spec(self):
        return self._function_spec
858 859


860 861 862 863 864 865 866 867 868 869
def _verify_init_in_dynamic_mode(class_instance):
    """
    Verifies the instance is initialized in dynamic mode.
    """
    if isinstance(class_instance, layers.Layer):
        if not class_instance._init_in_dynamic_mode:
            raise RuntimeError(
                " `paddle.jit.to_static` is only available in dynamic mode. Please call `paddle.disable_static()` before "
                "initializing your Layer class `{}` . Because parameters of Layer class should be initialized firstly "
                "in dynamic mode while applying transformation.".format(
870 871 872
                    class_instance
                )
            )
873 874


875
class HookHelper:
876 877 878 879 880 881 882 883 884
    """
    Only For converting pre/post hooks operation in outermost layer while jit.save.
    Because hooks in sublayer have been processed automatically.
    """

    def __init__(self, func, class_instance, with_hook=False):
        self.func = func
        self.class_instance = class_instance
        self.with_hook = with_hook
885 886 887
        self.need_apply_hook = (
            with_hook
            and isinstance(self.class_instance, layers.Layer)
888
            and func.__name__ == "forward"
889
        )
890 891 892 893 894

    def apply_pre_hooks(self, inputs):
        """
        Apply _forward_pre_hooks from outermost layer
        """
895 896
        if not self.need_apply_hook:
            return inputs
897 898 899 900 901 902

        inputs = inputs[1:]
        for forward_pre_hook in self.class_instance._forward_pre_hooks.values():
            hook_result = forward_pre_hook(self.class_instance, inputs)
            if hook_result is not None:
                if not isinstance(hook_result, tuple):
903
                    hook_result = (hook_result,)
904 905 906 907 908 909 910 911
                inputs = hook_result

        return [self.class_instance] + list(inputs)

    def apply_post_hooks(self, inputs, outputs):
        """
        Apply _forward_post_hooks from outermost layer
        """
912 913
        if not self.need_apply_hook:
            return outputs
914 915

        inputs = inputs[1:]
916 917 918 919 920 921
        for (
            forward_post_hook
        ) in self.class_instance._forward_post_hooks.values():
            hook_result = forward_post_hook(
                self.class_instance, inputs, outputs
            )
922 923 924 925 926 927 928
            if hook_result is not None:
                outputs = hook_result

        inputs.insert(0, self.class_instance)
        return outputs


929
class ConcreteProgram:
930 931

    __slots__ = [
932 933 934 935 936 937 938
        'inputs',
        'outputs',
        'main_program',
        "startup_program",
        "parameters",
        "function",
        'kwargs',
939 940
    ]

941 942 943 944 945 946 947 948
    def __init__(
        self,
        inputs,
        outputs,
        parameters,
        function,
        main_program,
        startup_program=None,
949
        **kwargs,
950
    ):
951 952 953
        self.inputs = inputs
        self.outputs = outputs
        self.main_program = main_program
954
        self.startup_program = startup_program
955
        self.parameters = parameters
956
        self.function = function
957
        self.kwargs = kwargs
958 959 960

    @staticmethod
    @switch_to_static_graph
961 962 963
    def from_func_spec(
        func_spec, input_spec, input_kwargs_spec, class_instance, **kwargs
    ):
964
        """
965 966
        Builds the main_program with specialized inputs and returns outputs
        of program as fetch_list.
967 968 969

        Args:
            func_spec(FunctionSpec): A FunctionSpec instance for decorated function.
970
            input_spec(list[InputSpec]):
971
        """
972 973 974
        # verify the instance is initialized in imperative mode.
        _verify_init_in_dynamic_mode(class_instance)

975
        # Transforms dygraph function into static function and caches it.
976
        dygraph_function = func_spec.dygraph_function
977
        static_func = convert_to_static(dygraph_function)
978
        # apply pre\post hook for outermost layer
979 980 981
        hook_helper = HookHelper(
            dygraph_function, class_instance, kwargs.get("with_hook", False)
        )
982

983 984
        main_program, startup_program = framework.Program(), framework.Program()
        # Note: The random seed should be synchronized into cached program
985
        # if set in `fluid.dygraph_guard` because some ops rely on it, such as
986
        # `fluid.layers.dropout`.
987
        main_program.random_seed = framework.default_main_program().random_seed
988 989 990
        startup_program.random_seed = (
            framework.default_startup_program().random_seed
        )
991

992
        from paddle.fluid.dygraph.base import _switch_declarative_mode_guard_
993

994
        with framework.program_guard(main_program, startup_program):
995 996
            with _switch_declarative_mode_guard_(is_declarative=True):
                # 1. Adds `fluid.data` layers for input if needed
997
                static_inputs = func_spec.to_static_inputs_with_spec(
998 999
                    input_spec, main_program
                )
1000
                _kwargs = func_spec.to_static_inputs_with_spec(
1001 1002
                    input_kwargs_spec, main_program
                )
1003
                if class_instance:
1004 1005 1006
                    static_inputs = tuple(
                        [class_instance] + list(static_inputs)
                    )
1007

1008
                # 2. Builds program only once and returns the output Variables.
1009 1010 1011
                with param_guard(
                    get_parameters(class_instance, False)
                ), param_guard(get_buffers(class_instance, False)):
1012
                    try:
1013 1014
                        # only for jit.save, do nothing while train and eval process
                        inputs = hook_helper.apply_pre_hooks(static_inputs)
1015 1016
                        if _kwargs:
                            outputs = static_func(*inputs, **_kwargs)
1017 1018
                        else:
                            outputs = static_func(*inputs)
1019
                        outputs = hook_helper.apply_post_hooks(inputs, outputs)
1020 1021
                    except BaseException as e:
                        # NOTE: If e is raised in compile time, e should be attached to ERROR_DATA here.
1022
                        error.attach_error_data(e)
1023 1024 1025
                        error_data = getattr(e, error.ERROR_DATA, None)
                        if error_data:
                            error_data.raise_new_exception()
1026 1027
                        raise

1028 1029 1030 1031 1032 1033 1034
                # 3. Gets all ParamBases and buffered VarBases in the function
                all_parameters_and_buffers = (
                    ProgramTranslator.get_instance()._params_recorder.pop(
                        main_program
                    )
                )

1035
                if outputs is not None:
1036 1037 1038 1039
                    need_wrap_into_list = (
                        not isinstance(outputs, (tuple, list))
                        or len(outputs) == 1
                    )
1040 1041
                    if need_wrap_into_list:
                        outputs = [outputs]
1042

1043 1044
        main_program = update_op_callstack_with_origin_info(main_program)

1045 1046 1047 1048 1049 1050 1051
        return ConcreteProgram(
            inputs=static_inputs,
            outputs=outputs,
            parameters=all_parameters_and_buffers,
            function=dygraph_function,
            main_program=main_program,
            startup_program=startup_program,
1052
            **kwargs,
1053
        )
1054 1055


1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083
class ParametersRecorder:
    def __init__(self):
        self.params_dict = {}

    @synchronized
    def add(self, program, param):
        """use the default_program as key, append param the parameter list."""
        key = self._program_hash(program)
        if key not in self.params_dict:
            self.params_dict[key] = set()
        params = self.params_dict[key]
        params.add(param)

    def pop(self, program):
        params = self.params_dict.get(self._program_hash(program))
        if params is None:
            return []
        del self.params_dict[self._program_hash(program)]
        return list(params)

    def _program_hash(self, program):
        """
        because program is not deleted while calling from_func_spec.
        so it's ok to use id(program)
        """
        return id(program)


1084
class FallbackProgramLayer:
1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126
    __slots__ = [
        '_instance',
        '_dy_func',
        'training',
        '_cuda_graph_capture_mode',
        '_cuda_graph_pool_id',
    ]

    def __init__(self, instance, dy_func):
        self._instance = instance
        self._dy_func = dy_func

    def __call__(self, inputs):
        return self._dy_func(*inputs)

    def __getattr__(self, key):
        if key not in self.__slots__:
            raise RuntimeError(
                "There raises a exception after applying `@paddle.jit.to_static()` and already switch into fallback mode. \n"
                "You can't get attribute for a fallback program layer. Please check `to_static.error` file for detail."
            )
        elif key in ['training']:
            if self._instance is not None:
                return getattr(self._instance, key)
            return

        return super().__getattr__(key)

    def __setattr__(self, key, value):
        if key not in self.__slots__:
            raise RuntimeError(
                "There raises a exception after applying `@paddle.jit.to_static()` and already switch into fallback mode. \n"
                "You can't get attribute for a fallback program layer. Please check `to_static.error` file for detail."
            )
        elif key in ['training']:
            if self._instance is not None:
                return setattr(self._instance, key, value)
            return

        return super().__setattr__(key, value)


1127
class ProgramCache:
1128 1129 1130
    """
    Wrapper class for the program functions defined by dygraph function.
    """
1131

1132 1133
    dy2static_error_file = "to_static.error"

1134
    def __init__(self):
1135
        # {hash_id : (concrete_program, partial_layer)}
1136
        self._caches = collections.OrderedDict()
1137
        # trace mostly recent used program
1138
        self._recent_key = None
1139
        self._recent_cache_key = None
1140

1141
    def _build_once(self, cache_key):
1142 1143
        # TODO(Aurelius84): Need a gloabl FLAGS to enable/disable to_prim
        enable_prim = cache_key.kwargs['build_strategy'].build_cinn_pass
1144 1145
        # TODO(CZ): later when use cinn, set_prim_all_enabled and check_and_set_prim_all_enabled will be set at else branch.

1146 1147
        # NOTE(xiongkun): Need a global FLAGS to enable/disable fallback
        enable_fallback = enable_prim
1148
        core.check_and_set_prim_all_enabled()
1149 1150 1151 1152 1153 1154
        try:
            concrete_program = ConcreteProgram.from_func_spec(
                func_spec=cache_key.function_spec,
                input_spec=cache_key.input_args_with_spec,
                input_kwargs_spec=cache_key.input_kwargs_with_spec,
                class_instance=cache_key.class_instance,
1155
                **cache_key.kwargs,
1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166
            )
        except Exception as e:
            if enable_fallback:
                warnings.warn(
                    "Exception is thrown while applying @paddle.jit.to_static. It will fallback into dygraph mode for training.\n"
                    "1. You can check `to_static.error` file in current workspace directory for detail.\n"
                    "2. In fallback mode, you can only do training, can't call paddle.jit.save(). Please modify model code according `to_static.error` firstly"
                )
                # TODO(xiongkun) change different file name to avoid overwrite.
                with open(self.dy2static_error_file, "w") as fp:
                    fp.write(str(e))
1167

1168 1169 1170 1171 1172 1173 1174
                fallback_layer = FallbackProgramLayer(
                    cache_key.class_instance,
                    cache_key.function_spec.dygraph_function,
                )
                return fallback_layer, fallback_layer
            else:
                raise
1175

1176 1177 1178 1179 1180 1181 1182 1183
        if prim_or_cinn_is_enabled(cache_key.kwargs['build_strategy']):
            for var in concrete_program.main_program.list_vars():
                if -1 in var.shape:
                    warnings.warn(
                        "Now prim and cinn do not support -1 shape, but the shape of var {} is {}".format(
                            var.name, var.shape
                        )
                    )
1184

X
xiongkun 已提交
1185
        partial_program = partial_program_from(concrete_program)
1186 1187 1188 1189
        if core._is_fwd_prim_enabled() and not _in_amp_guard():
            partial_program.set_hooker(
                PrimHooker(concrete_program.main_program)
            )
1190 1191
        return concrete_program, partial_program

1192
    def __getitem__(self, item):
1193
        if not isinstance(item, CacheKey):
1194 1195 1196 1197
            raise ValueError(
                'type(item) should be CacheKey, but received %s'
                % type_name(item)
            )
1198
        item_id = hash(item)
1199
        self._recent_cache_key = item
1200
        self._recent_key = item_id
1201 1202
        if item_id not in self._caches:
            self._caches[item_id] = self._build_once(item)
1203 1204 1205
            # Note: raise warnings if number of traced program is more than `max_tracing_count`
            current_tracing_count = len(self._caches)
            if current_tracing_count > MAX_TRACED_PROGRAM_COUNT:
1206
                logging_utils.warn(
1207
                    "Current traced program number: {} > `max_tracing_count`:{}. Too much cached programs will bring expensive overhead. "
1208 1209 1210 1211
                    "The reason may be: (1) passing tensors with different shapes, (2) passing python objects instead of tensors.".format(
                        current_tracing_count, MAX_TRACED_PROGRAM_COUNT
                    )
                )
1212

1213
        return self._caches[item_id]
1214

1215
    def get_program(self, item):
1216
        if not isinstance(item, CacheKey):
1217
            raise ValueError(
1218 1219 1220
                "Input item's type should be FunctionSpec, but received %s"
                % type_name(item)
            )
1221 1222
        item_id = hash(item)
        if item_id not in self._caches:
1223
            raise RuntimeError(
1224
                "Failed to find program for input item, please decorate input function by `@paddle.jit.to_static`."
1225
            )
1226
        return self._caches[item_id]
1227

1228
    def last(self):
1229 1230 1231
        assert (
            len(self._caches) >= 1
        ), "No valid cached program in ProgramCache."
1232 1233
        assert self._recent_key is not None
        return self._recent_key, self._caches[self._recent_key]
1234

1235 1236 1237 1238
    def __len__(self):
        return len(self._caches)

    def concrete_programs(self):
1239
        return [cp for key, (cp, _) in self._caches.items()]
1240

1241 1242 1243
    def clear(self):
        self._caches = collections.OrderedDict()

1244

1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276
class PrimHooker(PartialProgramLayerHook):
    def __init__(self, original_program):
        if len(original_program.blocks) > 1:
            raise ValueError(
                'The primitive mode only support one block currently.'
            )
        self.custom_vjps = set()
        if core._is_all_prim_enabled():
            self.custom_vjps = {
                op.type
                for op in original_program.block(0).ops
                if core.has_comp_grad_op_maker(op.type)
            }

    def before_append_backward(self, forward_program):
        if core._is_fwd_prim_enabled():
            _to_prim(forward_program.blocks, blacklist=self.custom_vjps)
        return forward_program

    def after_append_backward(self, whole_program, backward_start_idx):
        backward_length = len(whole_program.block(0).ops) - backward_start_idx
        if core._is_fwd_prim_enabled() and len(self.custom_vjps) != 0:
            _to_prim(whole_program.blocks, whitelist=self.custom_vjps)
        new_start_index = len(whole_program.block(0).ops) - backward_length
        return whole_program, new_start_index

    def after_infer(self, infer_program):
        if core._is_fwd_prim_enabled():
            _to_prim(infer_program.block(0))
        return infer_program


1277
class ProgramTranslator:
1278
    """
1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290
    Class to translate dygraph function into static graph function. The object
    of this class is a singleton.

    Args:
        None.

    Returns:
        ProgramTranslator: the singleton object.

    Examples:
        .. code-block:: python

1291
            import paddle
1292

1293 1294 1295
            # Two methods get same object because ProgramTranslator is a singleton
            paddle.jit.ProgramTranslator()
            paddle.jit.ProgramTranslator.get_instance()
1296

1297 1298
    """

1299
    _singleton_lock = threading.Lock()
1300 1301 1302 1303 1304 1305
    _instance = None

    @synchronized
    def __new__(cls, *args, **kwargs):
        if cls._instance is None:
            cls._instance = object.__new__(cls, *args, **kwargs)
1306
            cls._instance._initialized = False
1307 1308 1309 1310 1311
        return cls._instance

    @classmethod
    def get_instance(cls):
        if cls._instance is None:
1312 1313
            with cls._singleton_lock:
                cls._instance = cls()
1314 1315 1316 1317 1318
        return cls._instance

    @classmethod
    def reset(cls):
        if cls._instance is not None:
1319
            cls._instance._initialized = False
1320 1321
            cls._instance.__init__()

1322
    def __init__(self):
1323
        # To make sure that calls __init__ only once.
1324
        if self._initialized:
1325
            return
1326 1327
        self._initialized = True
        self._program_cache = ProgramCache()
1328
        self._params_recorder = ParametersRecorder()
1329
        self.enable_to_static = True
1330

1331
    def enable(self, enable_to_static):
1332
        """
1333
        Enable or disable the converting from imperative to static graph by
1334 1335 1336
        ProgramTranslator globally.

        Args:
1337
            enable_to_static (bool): True or False to enable or disable converting to static.
1338 1339 1340 1341 1342 1343 1344

        Returns:
            None.

        Examples:
            .. code-block:: python

1345
                import paddle
1346 1347


1348 1349 1350 1351 1352 1353 1354
                @paddle.jit.to_static
                def func(x):
                    if paddle.mean(x) > 0:
                        x_v = x - 1
                    else:
                        x_v = x + 1
                    return x_v
1355

1356

R
Ryan 已提交
1357
                paddle.jit.enable_to_static(False)
1358 1359 1360

                x = paddle.ones([1, 2])
                # ProgramTranslator is disabled so the func is run in dygraph
1361
                print(func(x))  # [[0. 0.]]
L
liym27 已提交
1362

1363
        """
1364 1365 1366 1367 1368 1369
        check_type(
            enable_to_static,
            "enable_to_static",
            bool,
            "ProgramTranslator.enable",
        )
1370
        self.enable_to_static = enable_to_static
1371

1372 1373
    def get_output(self, dygraph_func, *args, **kwargs):
        """
1374
        Returns the output dygraph Tensor for dygraph function. The dygraph
1375
        function will be translated into static graph function so the under
1376
        beneath numerical result will be calculated by static graph mode.
1377 1378 1379

        Args:
            dygraph_func (callable): the dygraph function.
1380 1381
            *args (tuple): the input argument of dygraph_func.
            **kwargs (dict): the input argument of dygraph_func.
1382 1383

        Returns:
1384
            Tensor or tuple of Tensors: the dygraph Tensor containing digital result.
1385 1386 1387 1388

        Examples:
            .. code-block:: python

1389 1390
                import paddle

1391 1392

                def func(x):
1393
                    if paddle.mean(x) > 0:
1394 1395 1396 1397 1398 1399
                        x_v = x - 1
                    else:
                        x_v = x + 1
                    return x_v


1400 1401 1402 1403
                prog_trans = paddle.jit.ProgramTranslator()

                x = paddle.ones([1, 2])
                x_v = prog_trans.get_output(func, x)
1404
                print(x_v)  # [[0. 0.]]
1405

1406
        """
1407 1408 1409
        assert callable(
            dygraph_func
        ), "Input dygraph_func is not a callable in ProgramTranslator.get_output"
1410

1411
        if not self.enable_to_static:
1412 1413
            # Here calls `warnings.warn` but not `logging_utils.warn` because by default warnings.warn(message)
            # will show up **only once**.
1414
            logging_utils.warn(
1415 1416 1417 1418
                "The ProgramTranslator.get_output doesn't work when setting ProgramTranslator.enable to False. "
                "We will just return dygraph output. "
                "Please call ProgramTranslator.enable(True) if you would like to get static output."
            )
1419
            return dygraph_func(*args, **kwargs)
1420
        try:
1421
            function_spec = FunctionSpec(dygraph_func)
1422
            cache_key = CacheKey.from_func_and_args(
1423 1424 1425 1426 1427
                function_spec,
                args,
                kwargs,
                getattr(dygraph_func, '__self__', None),
            )
1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443
            _, partial_program_layer = self._program_cache[cache_key]

            if args and isinstance(args[0], layers.Layer):
                # Synchronize self.training attribute.
                partial_program_layer.training = args[0].training
                args = args[1:]
            try:
                return partial_program_layer(args)
            except BaseException as e:
                # NOTE:
                # 1. If e is raised in compile time, e should have been attached to ERROR_DATA before;
                # 2. If e raised in runtime, e should be attached to ERROR_DATA here.
                if not hasattr(e, error.ERROR_DATA):
                    # runtime error
                    error.attach_error_data(e, in_runtime=True)
                raise
1444
        except BaseException as e:
1445 1446 1447 1448 1449 1450
            error_data = getattr(e, error.ERROR_DATA, None)
            if error_data:
                error_data.raise_new_exception()
            else:
                logging_utils.warn(
                    "Please file an issue at 'https://github.com/PaddlePaddle/Paddle/issues'"
1451 1452
                    " if you can't handle this {} yourself.".format(type(e))
                )
1453
                raise e
1454 1455 1456

    def get_func(self, dygraph_func):
        """
1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467
        Returns a callable function which converts imperative dygraph APIs of
        the input dygraph_func into declarative net-building APIs, which means
        it doesn't return immediate digital result as get_output does.
        Users should handle Program and Executor by themselves.

        Args:
            dygraph_func (callable): the dygraph function.

        Returns:
            callable: converting imperative dygraph APIs into declarative
            net-building APIs.
1468 1469 1470 1471

        Examples:
            .. code-block:: python

1472 1473
                import paddle

1474 1475

                def func(x):
1476
                    if paddle.mean(x) > 0:
1477 1478 1479 1480 1481 1482
                        x_v = x - 1
                    else:
                        x_v = x + 1
                    return x_v


1483
                prog_trans = paddle.jit.ProgramTranslator()
1484 1485 1486
                static_func = prog_trans.get_func(func)
                print(callable(static_func)) # True

1487
        """
1488 1489 1490
        assert callable(
            dygraph_func
        ), "Input dygraph_func is not a callable in ProgramTranslator.get_func"
1491

1492
        if not self.enable_to_static:
1493
            logging_utils.warn(
1494 1495 1496
                "The ProgramTranslator.get_func doesn't work when setting ProgramTranslator.enable to False. We will "
                "just return dygraph output. Please call ProgramTranslator.enable(True) if you would like to get static output."
            )
1497
            return dygraph_func
1498

1499
        static_func = convert_to_static(dygraph_func)
1500 1501
        return static_func

1502 1503
    def get_program(self, dygraph_func, *args, **kwargs):
        """
1504
        Returns the translated static program and input/output Tensors from
1505 1506 1507 1508
        dygraph function. The users can use the program to run by executor.

        Args:
            dygraph_func (callable): the dygraph function.
1509 1510
            *args (tuple): the input argument of dygraph_func.
            **kwargs (dict): the input argument of dygraph_func.
1511 1512 1513

        Returns:
            tuple of (main_program, startup_program, inputs, outputs) whose
1514
            types are (Program, Program, list of Tensors, list of Tensors).
1515 1516
            main_program: the converted main program.
            startup_program: the converted startup program.
1517 1518
            inputs: list of input Tensors which need to be fed.
            outputs: list of output Tensors which users can fetch.
1519 1520 1521 1522

        Examples:
            .. code-block:: python

1523 1524
                import paddle

1525 1526

                def func(x):
1527
                    if paddle.mean(x) > 0:
1528 1529 1530 1531 1532 1533
                        x_v = x - 1
                    else:
                        x_v = x + 1
                    return x_v


1534 1535
                prog_trans = paddle.jit.ProgramTranslator()
                x = paddle.ones([1, 2])
1536 1537
                main_prog, start_prog, inputs, outputs = prog_trans.get_program(func, x)
                print([i.name for i in inputs])
1538
                # [u'generated_tensor_0'] the feed input Tensor name representing x
1539
                print([o.name for o in outputs])
1540
                # [u'_generated_var_4'] the fetch output Tensor name representing x_v
1541

1542
        """
1543 1544 1545
        assert callable(
            dygraph_func
        ), "Input dygraph_func is not a callable in ProgramTranslator.get_program"
1546

1547
        if not self.enable_to_static:
1548
            logging_utils.warn(
1549 1550 1551 1552
                "The ProgramTranslator.get_program doesn't work when setting ProgramTranslator.enable to False."
                "We will just return dygraph output. "
                "Please call ProgramTranslator.enable(True) if you would like to get static output."
            )
1553
            return dygraph_func(*args, **kwargs)
1554

1555
        function_spec = FunctionSpec(dygraph_func)
1556
        cache_key = CacheKey.from_func_and_args(
1557 1558
            function_spec, args, kwargs, getattr(dygraph_func, '__self__', None)
        )
1559 1560
        concrete_program, partial_program_layer = self._program_cache[cache_key]

1561 1562
        # Note: concrete_program hold all input/output infos include non-Variable
        input_vars = [
1563 1564
            var
            for var in concrete_program.inputs
1565 1566 1567
            if isinstance(var, framework.Variable)
        ]
        output_vars = [
1568 1569
            var
            for var in concrete_program.outputs
1570 1571 1572
            if isinstance(var, framework.Variable)
        ]

1573 1574 1575 1576 1577 1578
        return (
            concrete_program.main_program,
            concrete_program.startup_program,
            input_vars,
            output_vars,
        )
1579

1580 1581
    def get_code(self, dygraph_func):
        """
1582 1583 1584 1585 1586 1587
        Returns the translated static function string code from dygraph function.

        Args:
            dygraph_func (callable): the dygraph function.

        Returns:
1588 1589 1590 1591 1592
            str: the string code of translated static function.

        Examples:
            .. code-block:: python

1593 1594 1595 1596 1597 1598 1599 1600 1601
                import paddle


                def func(x):
                    if paddle.mean(x) > 0:
                        x_v = x - 1
                    else:
                        x_v = x + 1
                    return x_v
1602 1603


1604
                prog_trans = paddle.jit.ProgramTranslator()
1605

1606 1607
                code = prog_trans.get_code(func)
                print(type(code)) # <class 'str'>
1608

1609
        """
1610 1611 1612
        assert callable(
            dygraph_func
        ), "Input dygraph_func is not a callable in ProgramTranslator.get_code"
1613
        # Gets AST from dygraph function
1614 1615 1616

        unwrap_func = unwrap(dygraph_func)
        raw_code = inspect.getsource(unwrap_func)
1617 1618 1619 1620 1621 1622 1623 1624 1625 1626 1627
        code = textwrap.dedent(raw_code)
        root = gast.parse(code)

        # Transform AST
        dygraph_to_static = DygraphToStaticAst()
        root_wrapper = dygraph_to_static.get_static_ast(root)

        # Get source_code
        source_code = ast_to_source_code(root_wrapper.node)
        return source_code

1628
    def get_program_cache(self):
1629
        """
1630 1631 1632 1633 1634 1635 1636 1637 1638
        Returns the ProgramCache instance. This method is used by PaddlePaddle
        developers to manage program cache in ProgramTranslator. Normal users
        don't have to call this method.

        Returns:
            ProgramCache: ProgramCache instance of ProgramTranslator.

        Examples:
            .. code-block:: python
1639

1640
                import paddle
1641

1642
                prog_trans = paddle.jit.ProgramTranslator()
1643 1644
                prog_cache = prog_trans.get_program_cache()

1645
        """
1646
        return self._program_cache
R
Ryan 已提交
1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690


def enable_to_static(enable_to_static_bool):

    """
    Enable or disable the converting from imperative to static graph by
    ProgramTranslator globally.

    Args:
        enable_to_static_bool (bool): True or False to enable or disable converting to static.

    Returns:
        None.

    Examples:
        .. code-block:: python

            import paddle


            @paddle.jit.to_static
            def func(x):
                if paddle.mean(x) > 0:
                    x_v = x - 1
                else:
                    x_v = x + 1
                return x_v


            paddle.jit.enable_to_static(False)

            x = paddle.ones([1, 2])
            # ProgramTranslator is disabled so the func is run in dygraph
            print(func(x))  # [[0. 0.]]

    """
    check_type(
        enable_to_static_bool,
        "enable_to_static_bool",
        bool,
        "paddle.jit.enable_to_static",
    )
    _program_trans = ProgramTranslator()
    _program_trans.enable(enable_to_static_bool)
1691 1692 1693


@switch_to_static_graph
1694 1695
def _to_prim(blocks, blacklist=frozenset(), whitelist=frozenset()):
    """Swith to static graph and call to_prim."""
1696 1697 1698
    # TODO(Aurelius84): Fix this cycle import problem
    from paddle.incubate.autograd import primapi

1699
    primapi.to_prim(blocks, blacklist=blacklist, whitelist=whitelist)