program_translator.py 68.5 KB
Newer Older
1
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
import collections
16
import inspect
17
import os
18 19
import textwrap
import threading
20
import warnings
21
import weakref
22

23
from paddle.fluid import core, framework
24
from paddle.fluid.data_feeder import check_type
25 26 27 28 29
from paddle.fluid.dygraph.base import (
    _switch_declarative_mode_guard_,
    param_guard,
    switch_to_static_graph,
)
30 31
from paddle.fluid.unique_name import UniqueNameGenerator
from paddle.fluid.unique_name import guard as UniqueNameGuard
32
from paddle.framework import in_dynamic_mode
33
from paddle.nn.layer import layers
34
from paddle.utils import flatten, gast
35 36 37 38 39 40 41 42 43

from . import error, logging_utils
from .ast_transformer import DygraphToStaticAst
from .function_spec import (
    FunctionSpec,
    _hash_spec_names,
    get_buffers,
    get_parameters,
)
44
from .origin_info import (
45 46 47 48
    attach_origin_info,
    create_and_update_origin_info_map,
    update_op_callstack_with_origin_info,
)
X
xiongkun 已提交
49
from .partial_program import PartialProgramLayerHook, partial_program_from
50
from .utils import (
51
    ALREADY_D2S,
52
    NO_SHAPE_VAR_TYPE,
53 54
    ast_to_func,
    ast_to_source_code,
55
    backend_guard,
56 57
    func_to_source_code,
    input_specs_compatible,
58
    is_paddle_func,
59
    make_hashable,
60
    prim_or_cinn_is_enabled,
61 62
    type_name,
    unwrap,
63
)
64

65
__all__ = []
66

67 68 69 70
# For each traced function, we set `max_traced_program_count` = 10 to consider caching performance.
# Once exceeding the threshold, we will raise warning to users to make sure the conversion is as expected.
MAX_TRACED_PROGRAM_COUNT = 10

71 72
CONVERSION_OPTIONS = "__jst_not_to_static"

73

74 75 76 77 78 79 80 81 82 83
def synchronized(func):
    func.__lock__ = threading.Lock()

    def lock_func(*args, **kwargs):
        with func.__lock__:
            return func(*args, **kwargs)

    return lock_func


84
class FunctionCache:
85 86 87 88 89
    """
    Caches the transformed functions to avoid redundant conversions of the same function.
    """

    def __init__(self):
90
        # Caches the converted static functions. {dygraph_func: static_func}
X
xiongkun 已提交
91
        self._converted_static_func_caches = weakref.WeakKeyDictionary()
92
        # Caches the converted ast node for same source code. {source_code: ast_root}
93
        self._code_to_ast_caches = {}
94
        self._dygraph_to_static = DygraphToStaticAst()
95

96 97 98 99 100 101
    def convert_with_cache(self, func):
        """
        Returns the cached static function or converts it when first encounters the function.
        """
        # If hit cache, return it directly.
        static_func = self._converted_static_func_caches.get(func, None)
102 103

        if static_func is None:
104 105
            static_func = self._convert(func)
            self._converted_static_func_caches[func] = static_func
106 107 108

        return static_func

109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129
    def _convert(self, func):
        """
        Converts dygraph function into static function. For two functions with same dedent code,
        the second function will reuse the transformed ast node of previous one.

        For example:
            # A.py
            def foo(x, y):
                z = x + y
                return z

            # B.py
            def foo(x, y):
                z = x + y
                return z

        If the conversion of A.foo happens after B.foo, it will reuse the transformed ast node of B.foo
        to speed up the conversion.
        """
        # Note: In Python2, it will raise OSError when inspect function
        # with decorator directly and function.__wrapped__ holds the actual function.
130
        func = unwrap(func)
131
        source_code = func_to_source_code(func)
132 133 134 135 136

        # TODO(liym27):
        #  Consider this case: source_code in self._code_to_ast_caches,
        #  but actually they are methods in different classes.
        #  Maybe use (__class__, source_code) as key
137
        if source_code in self._code_to_ast_caches:
138
            root = self._code_to_ast_caches[source_code]
139 140
        else:
            root = gast.parse(source_code)
141
            root = attach_origin_info(root, func)
142 143
            root = self._dygraph_to_static.get_static_ast(root)
            self._code_to_ast_caches[source_code] = root
144

145
        # Get static function from AST
146
        static_func, file_name = ast_to_func(root, func)
147

148
        create_and_update_origin_info_map(root, static_func)
149
        return static_func
150 151

    def exist(self, func):
152
        return func in self._converted_static_func_caches
153 154


155 156 157 158
_CACHE_LOCK = threading.Lock()
_FUNCTION_CACHE = FunctionCache()


159
def convert_to_static(function):
160
    """
161
    Transforms function of dygraph into static function using the cache mechanism.
162

163 164
    Note(dev): It will return function.__func__ if encountering class method.

165 166
    Args:
        function(callable): The function with dygraph layers that will be converted into static layers.
167
    """
168 169
    if getattr(function, ALREADY_D2S, None):
        return function
170 171 172

    # Return directly if decorated with @not_to_static and DO NOT Cache it
    options = getattr(function, CONVERSION_OPTIONS, None)
173 174 175 176 177
    # or ignore paddle api
    need_skip = (options is not None and options.not_convert) or is_paddle_func(
        function
    )
    if need_skip:
178 179
        return function.__func__ if inspect.ismethod(function) else function

180
    with _CACHE_LOCK:
181
        static_func = _FUNCTION_CACHE.convert_with_cache(function)
182
        setattr(static_func, ALREADY_D2S, True)
183 184 185
        return static_func


186
class CacheKey:
187 188 189
    """
    Cached key for ProgramCache.
    """
190

191
    __slots__ = [
192 193 194 195 196 197
        'function_spec',
        'input_args_with_spec',
        'input_kwargs_with_spec',
        'class_instance',
        'kwargs',
        '_spec_names_id',
198
        '_new_ir_flags',
199
    ]
200

201 202 203 204 205 206
    def __init__(
        self,
        function_spec,
        input_args_with_spec,
        input_kwargs_with_spec,
        class_instance,
207
        **kwargs,
208
    ):
209 210
        """
        Initializes a cache key.
211

212 213
        Args:
            functions_spec(FunctionSpec): a FunctionSpec instance of decorated function.
214 215
            input_args_with_spec(list[InputSpec]): actual input args with some arguments replaced by InputSpec.
            input_kwargs_with_spec(list[{string:InputSpec}]): actual input kwargs with some arguments replaced by InputSpec.
216
            class_instance(object): a instance of class `Layer`.
217
            **kwargs(dict): manage other arguments used for better scalability
218
        """
219
        self.function_spec = function_spec
220 221
        self.input_args_with_spec = input_args_with_spec
        self.input_kwargs_with_spec = input_kwargs_with_spec
222
        self.class_instance = class_instance
223 224
        # NOTE: `kwargs` is usually not considered as basic member for `__hash__`
        self.kwargs = kwargs
225 226 227
        self._spec_names_id = _hash_spec_names(
            input_args_with_spec, input_kwargs_with_spec
        )
228 229 230
        self._new_ir_flags = os.environ.get(
            'FLAGS_enable_new_ir_in_executor', None
        )
231 232 233

    @classmethod
    def from_func_and_args(cls, function_spec, args, kwargs, class_instance):
234
        """
235 236 237 238 239 240 241 242 243 244 245
        Generated a CacheKey instance by given inputs.

        Args:
            functions_spec(FunctionSpec): a FunctionSpec instance of decorated function.
            args(tuple): tuple of actual inputs arguments.
            kwargs(dict): dict of actual inputs keyword arguments.
            class_instance(object): a instance of class `Layer`.
        """
        # 1. filter `self` in args
        if args and isinstance(args[0], layers.Layer):
            args = args[1:]
246
        # 2. convert tensor and numpy array into InputSpec
247
        _args, _kwargs = function_spec.unified_args_and_kwargs(args, kwargs)
248 249 250 251
        (
            input_args_with_spec,
            input_kwargs_with_spec,
        ) = function_spec.args_to_input_spec(_args, _kwargs)
252 253

        # 3. check whether hit the cache or build a new program for the input arguments
254 255 256 257 258 259
        return CacheKey(
            function_spec,
            input_args_with_spec,
            input_kwargs_with_spec,
            class_instance,
        )
260 261 262

    def __hash__(self):
        error_msg = "Arguments to a `@paddle.jit.to_static` must be a hashable Python objects (or nested structures of these types)."
263
        with_hook = self.kwargs.get("with_hook", False)
264
        is_train = self.kwargs.get("is_train", False)
265 266 267 268 269 270 271 272 273
        return hash(
            (
                id(self.function_spec),
                make_hashable(self.input_args_with_spec, error_msg),
                make_hashable(self.input_kwargs_with_spec, error_msg),
                self._spec_names_id,
                self.class_instance,
                with_hook,
                is_train,
274
                self._new_ir_flags,
275 276
            )
        )
277 278 279 280 281 282 283 284

    def __eq__(self, other):
        return (type(self) is type(other)) and hash(self) == hash(other)

    def __neq__(self, other):
        return not self == other

    def __repr__(self):
285
        return "id(function_spec): {}, input_args_with_spec: {}, input_kwargs_with_spec: {}, class_instance: {}".format(
286 287 288 289 290
            id(self.function_spec),
            self.input_args_with_spec,
            self.input_kwargs_with_spec,
            self.class_instance,
        )
291 292 293 294 295 296 297 298 299


def unwrap_decorators(func):
    """
    Unwraps a decorated function and returns the decorator list and inner target.
    """
    decorators = []
    cur = func
    while True:
300
        if isinstance(cur, StaticFunction):
301 302 303 304 305
            decorators.append(cur)
            # Note: if `cur` is a method, keep it as bound method of class.
            instance = cur._class_instance
            if instance is not None:
                cur = cur.dygraph_function.__get__(instance)
306
            else:
307 308 309 310
                cur = cur.dygraph_function
        else:
            break
    return decorators, cur
311

312

313
class StaticFunction:
314
    def __init__(self, function, input_spec=None, **kwargs):
315
        """
316
        Initializes a `StaticFunction`.
317 318 319 320

        Args:
            function(callable): A function or method that will be converted into static program.
            input_spec(list[InputSpec]): list of InputSpec to specify the `shape/dtype/name` information for each input argument, default None.
321
            **kwargs(dict): other arguments like `build_strategy` et.al.
322 323
        """
        # save the instance `self` while decorating a method of class.
324

325
        if inspect.ismethod(function):
326 327
            self._dygraph_function = function.__func__
            self._class_instance = function.__self__
328

329 330 331
            if not hasattr(self._class_instance, '_original_funcs'):
                raise TypeError(
                    "When using 'to_static' to convert method of a class, "
332 333
                    "please ensure the class inherits from nn.Layer"
                )
334
            self._class_instance._original_funcs[
335 336
                function.__name__
            ] = self._dygraph_function
337 338 339 340
        else:
            self._dygraph_function = function
            self._class_instance = None

341
        if input_spec is not None and prim_or_cinn_is_enabled(
342
            kwargs.get("build_strategy", None), kwargs.get("backend", None)
343
        ):
J
Jiabin Yang 已提交
344 345
            from paddle.static import InputSpec

346
            for spec in flatten(input_spec):
J
Jiabin Yang 已提交
347
                if isinstance(spec, InputSpec) and -1 in spec.shape:
348 349 350 351 352 353
                    input_spec = None
                    warnings.warn(
                        'Now prim and cinn do not support -1 shape, but input_spec has -1 shape so we set it to None.'
                    )
                    break

354 355 356
        self._input_spec = input_spec
        self._function_spec = FunctionSpec(function, input_spec)
        self._program_cache = ProgramCache()
357
        self._descriptor_cache = weakref.WeakKeyDictionary()
358
        # Note: Hold a reference to ProgramTranslator for switching `enable_to_static`.
359
        self._program_trans = ProgramTranslator()
360
        self._kwargs = kwargs
361
        self._training = True
362 363
        self._cuda_graph_capture_mode = ""
        self._cuda_graph_pool_id = 0
364 365 366 367 368 369 370
        self._property = kwargs.get("property", False)

    @property
    def is_property(self):
        # whether is class proproty to be exported.
        return self._property

371
    def train(self):
372 373
        if (
            isinstance(self._class_instance, layers.Layer)
374
            and self._class_instance.training is False
375
        ):
376 377 378
            raise RuntimeError(
                "Failed to switch train mode. {} is a Layer's method, "
                "please use Layer.train() to switch train mode.".format(
379 380 381
                    self.dygraph_function
                )
            )
382 383 384
        self._training = True

    def eval(self):
385 386
        if (
            isinstance(self._class_instance, layers.Layer)
387
            and self._class_instance.training is True
388
        ):
389 390 391
            raise RuntimeError(
                "Failed to switch eval mode. {} is a Layer's method, "
                "please use Layer.eval() to switch eval mode.".format(
392 393 394
                    self.dygraph_function
                )
            )
395
        self._training = False
396 397 398 399 400 401

    def __get__(self, instance, owner):
        """
        Overrides this method to parse the class instance and call bound method correctly.

        For example:
402

403 404 405 406
            '''
            class Net(Layer):
                def __init__(self):
                    pass
407

408 409 410 411 412 413 414
                @paddle.jit.to_static
                def forward(self, x, y):
                    return x + y

            net = Net()
            out = net(x, y)
            '''
415

416 417
        In above case, `net(x, y)` will call `net.forward(x, y)` firstly that is a bound method
        of `Net` instance. After decorated by `@paddle.jit.to_static`, it will firstly to call `__get__`
418
        to parse the class instance correctly instead of the `StaticFunction` instance.
419
        """
420 421 422
        if instance not in self._descriptor_cache:
            if instance is None:
                return self
423
            # Note(Aurelius84): To construct new instance of StaticFunction when we
424 425
            # first encouter the bound function of layer and cache it.
            new_static_layer = self._clone()
426
            if (
427 428
                isinstance(instance, layers.Layer)
                and self._dygraph_function.__name__
429 430 431 432 433
                not in instance._original_funcs.keys()
            ):
                instance._original_funcs[
                    self._dygraph_function.__name__
                ] = self._dygraph_function
434 435 436 437 438 439
            new_static_layer._class_instance = instance
            self._descriptor_cache[instance] = new_static_layer

        return self._descriptor_cache[instance]

    def _clone(self):
440
        return self.__class__(
441
            self.dygraph_function, self._input_spec, **self._kwargs
442
        )
443 444

    def __call__(self, *args, **kwargs):
445
        """
446 447 448 449
        Supports to call the returned instance with input `args` and `kwargs` directly.

        Args:
            *args(tuple): tuple of all input arguments from original decorated function.
450
            **kwargs(dict): dict of all input keyward arguments from original decorated function.
451 452 453

        Return:
            Outputs of decorated function.
454
        """
455 456
        if self._property:
            return self._call_dygraph_function(*args, **kwargs)
457

458
        # 1. call dygraph function directly if not enable `declarative`
459
        if not self._program_trans.enable_to_static:
460 461 462 463
            # NOTE(liym27):
            # Here calls `warnings.warn` but not `logging_utils.warn` because by default warnings.warn(message)
            # will show up **only once**. StaticFunction.__call__ will run many times, it is appropriate to
            # display this warning message only once.
464
            logging_utils.warn(
R
Ryan 已提交
465
                "The decorator '@paddle.jit.to_static' does NOT work when setting 'paddle.jit.enable_to_static' to False. "
466
                "We will just return dygraph output. If you would like to get static graph output, please call API "
R
Ryan 已提交
467
                "paddle.jit.enable_to_static(True)"
468
            )
469 470
            return self._call_dygraph_function(*args, **kwargs)

471
        if not in_dynamic_mode():
472 473
            raise RuntimeError(
                "Failed to run the callable object {} decorated by '@paddle.jit.to_static', "
474
                "because it is NOT in dynamic mode. Please disable the static graph mode to enter dynamic mode with the "
475
                "following API: paddle.disable_static().".format(
476 477 478
                    self.dygraph_function
                )
            )
479

X
xiongkun 已提交
480
        return self._perform_call(*args, **kwargs)
481

482 483
    def _is_train_mode(self):
        if self._class_instance is not None:
484 485 486
            if not hasattr(self._class_instance, 'training'):
                raise TypeError(
                    "When using 'to_static' to convert method of a class, "
487 488
                    "please ensure the class inherits from nn.Layer"
                )
489 490 491 492
            return self._class_instance.training
        else:
            return self._training

493 494 495 496 497 498
    def _call_dygraph_function(self, *args, **kwargs):
        """
        Calls dygraph function directly and returns the outputs.

        Args:
            *args(tuple): tuple of all input arguments from original decorated function.
499
            **kwargs(dict): dict of all input keyward arguments from original decorated function.
500 501 502 503

        Return:
            Outputs of dygraph function.
        """
504
        return self.dygraph_function(*args, **kwargs)
505

506 507 508 509 510 511 512 513 514
    def _raise_when_property(self):
        """raise RuntimeError when property=True

        Raises:
            RuntimeError: can not call this func when property=True
        """
        if self.is_property:
            raise RuntimeError("Can not call the func when property=True.")

X
xiongkun 已提交
515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593
    def get_concrete_program(self, *args, **kwargs):
        raise NotImplementedError("Not implemented yet.")

    def get_concrete_program_with_cache_key(self, cached_key):
        raise NotImplementedError("Not implemented yet.")

    def get_traced_count(self):
        raise NotImplementedError("Not implemented yet.")

    @property
    def code(self):
        raise NotImplementedError("Not implemented yet.")

    @property
    def dygraph_function(self):
        """
        Returns the original decorated function.
        """
        if self._class_instance is not None:
            return self._dygraph_function.__get__(self._class_instance)
        else:
            return self._dygraph_function

    @property
    def concrete_program(self):
        raise NotImplementedError("Not implemented yet.")

    def concrete_program_specify_input_spec(
        self, input_spec=None, with_hook=False, is_prim_infer=False
    ):
        raise NotImplementedError("Not implemented yet.")

    def rollback(self):
        """
        Rollback into original dygraph functions for current class instance.

        Returns:
            Function or Method

        Example::
            .. code-block:: python

                >>> # doctest: +SKIP
                >>> import paddle

                >>> class Net(paddle.nn.Layer):
                ...     def __init__(self):
                ...         super().__init__()
                ...
                ...     def forward(self, x, flag=True):
                ...         if flag:
                ...             out = x + 1
                ...         else:
                ...             out = x - 1
                ...         return out
                ...
                >>> x = paddle.randn([10, 1], 'float32')
                >>> net = paddle.jit.to_static(Net())  # convert into static graph mode
                >>> out = net(x)

                >>> net.forward.rollback()  # rollback into dygraph mode
                >>> out = net(x)
        """

        def rollback_impl(class_instance):
            for name, func in class_instance._original_funcs.items():
                setattr(class_instance, name, func.__get__(class_instance))

            for sublayer in class_instance.sublayers(include_self=False):
                rollback_impl(sublayer)

        if self._class_instance is None:
            return self._dygraph_function

        # only rollback sub-functions on path of top _dygraph_function
        func_name = self._dygraph_function.__name__
        assert (
            func_name in self._class_instance._original_funcs
        ), "Not Found function '{}' in class '{}'.".format(
594
            func_name, self._class_instance.__class__
X
xiongkun 已提交
595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714
        )
        func = self._class_instance._original_funcs[func_name]
        setattr(
            self._class_instance, func_name, func.__get__(self._class_instance)
        )

        for sublayer in self._class_instance.sublayers(include_self=False):
            rollback_impl(sublayer)

        return getattr(self._class_instance, func_name)

    def __deepcopy__(self, memo):
        """
        Customized behavior for copy.deepcopy, return original decorated function instead
        of a new StaticFunction Object. StaticFunction itself is not copyable becuase it's
        associated with class_instance.

        We add __deepcopy__ here only for the following usage:

        Example::
            .. code-block:: python

                >>> import copy
                >>> import paddle

                >>> class Net(paddle.nn.Layer):
                ...     def __init__(self):
                ...         super().__init__()
                ...
                ...     def forward(self, x, flag=True):
                ...         if flag:
                ...             out = x + 1
                ...         else:
                ...             out = x - 1
                ...         return out
                ...
                >>> x = paddle.randn([10, 1], 'float32')
                >>> net = paddle.jit.to_static(Net())  # convert into static graph mode

                >>> copy_net = copy.deepcopy(net)      # deepcopy a new net without @to_static

        Please attention that original 'net' will unwrap @to_static and rollback into simple Layer.
        """
        if self._class_instance is not None:
            net_name = type(self._class_instance).__name__
            logging_utils.log(
                level=-1,
                msg="Not recommend to deepcopy '{}' decorated with @to_static, it has side effect that will"
                " rollback into original state before @to_static. Please deepcopy '{}' before applying @to_static.".format(
                    net_name, net_name
                ),
            )
            self.rollback()
            return self._dygraph_function.__get__(
                memo[id(self._class_instance)]
            )
        else:
            return self._dygraph_function

    @property
    def inputs(self):
        raise NotImplementedError("Not implemented yet.")

    @property
    def outputs(self):
        raise NotImplementedError("Not implemented yet.")

    @property
    def main_program(self):
        raise NotImplementedError("Not implemented yet.")

    @property
    def program_cache(self):
        raise NotImplementedError("Not implemented yet.")

    @property
    def function_spec(self):
        raise NotImplementedError("Not implemented yet.")


def raise_error_template(func_str):
    def _raise_error(*args, **kwargs):
        error_template = (
            "Can't call {func} when enable_fallback=True."
            "Use paddle.jit.to_static(enable_fallback=False) instead."
        )
        raise RuntimeError(error_template.format(func=func_str))

    return _raise_error


class SymbolicStaticFunction(StaticFunction):
    def __init__(self, function, input_spec=None, **kwargs):
        if input_spec is not None:
            warnings.warn(
                "\nSymbolic Trace don't support input_spec arguments. It will Will not produce any effect.\n"
                "1. You can disable fallback mode by `paddle.jit.to_static(enable_fallback=False)` to switch to AST to static, then you can assign input spec.\n"
            )
        super().__init__(function, input_spec, **kwargs)
        self.last_call_input_spec = None

    def _perform_call(self, *args, **kwargs):
        args, kwargs = self._function_spec.unified_args_and_kwargs(args, kwargs)
        (
            input_args_with_spec,
            input_kwargs_with_spec,
        ) = self._function_spec.args_to_input_spec(args, kwargs)
        self.last_call_input_spec = input_args_with_spec

        try:
            from sot import symbolic_translate
        except:
            import os

            os.system(
                "pip install git+https://github.com/PaddlePaddle/PaddleSOT@develop"
            )
            from sot import symbolic_translate

        build_strategy = self._kwargs.get("build_strategy", None)
715
        backend = self._kwargs.get("backend", None)
X
xiongkun 已提交
716
        traced_fun = symbolic_translate(
717 718 719
            self._dygraph_function,
            build_strategy=build_strategy,
            backend=backend,
X
xiongkun 已提交
720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809
        )
        if self._class_instance is not None:
            args = (self._class_instance,) + args
        return traced_fun(*args, **kwargs)

    @property
    def code(self):
        raise_error_template("code")()

    @property
    def concrete_program(self):
        raise_error_template("concrete_program")()

    concrete_program_specify_input_spec = raise_error_template(
        "concrete_program_specify_input_spec"
    )
    get_concrete_program = raise_error_template("get_concrete_program")
    get_concrete_program_with_cache_key = raise_error_template(
        "get_concrete_program_with_cache_key"
    )
    get_traced_count = raise_error_template("get_traced_count")

    @property
    def inputs(self):
        raise_error_template("inputs")()

    @property
    def outputs(self):
        raise_error_template("outputs")()

    @property
    def main_program(self):
        raise_error_template("main_program")()

    @property
    def program_cache(self):
        raise_error_template("program_cache")()

    @property
    def function_spec(self):
        raise_error_template("function_spec ")()


class ASTStaticFunction(StaticFunction):
    """
    Wrapper class to Manage program conversion of decorated function.

    """

    def __init__(self, function, input_spec=None, **kwargs):
        super().__init__(function, input_spec, **kwargs)

    def _perform_call(self, *args, **kwargs):
        # 1. trace ops from dygraph layers and cache the generated program.
        args, kwargs = self._function_spec.unified_args_and_kwargs(args, kwargs)

        try:
            concrete_program, partial_program_layer = self.get_concrete_program(
                *args, **kwargs, is_train=self._is_train_mode()
            )
            # 2. synchronize self.training attribute.
            if isinstance(self._class_instance, layers.Layer):
                partial_program_layer.training = self._class_instance.training
            else:
                partial_program_layer.training = self._training

            partial_program_layer._cuda_graph_capture_mode = (
                self._cuda_graph_capture_mode
            )
            partial_program_layer._cuda_graph_pool_id = self._cuda_graph_pool_id

            # 3. return outputs.
            try:
                return partial_program_layer(args)
            except Exception as e:
                if not hasattr(e, error.ERROR_DATA):
                    # runtime error
                    error.attach_error_data(e, in_runtime=True)
                    raise
        except Exception as e:
            error_data = getattr(e, error.ERROR_DATA, None)
            if error_data:
                error_data.raise_new_exception()
            else:
                logging_utils.warn(
                    "Please file an issue at 'https://github.com/PaddlePaddle/Paddle/issues'"
                    " if you can't handle this {} yourself.".format(type(e))
                )
                raise e

810 811 812 813 814 815 816 817 818 819 820
    def get_concrete_program(self, *args, **kwargs):
        """
        Returns traced concrete program and inner executable partial layer.

        Args:
            *args(tuple): input arguments values or InputSpec
            **kwargs(dict) : input kwargs values.

        Returns:
            Traced ConcreteProgram and executable translated Layer.
        """
821
        self._raise_when_property()
822

823
        with_hook = kwargs.get("with_hook", False)
824
        is_train = kwargs.get("is_train", True)
825
        is_prim_infer = kwargs.get("is_prim_infer", False)
826 827 828 829
        if "is_train" in kwargs:
            kwargs.pop("is_train")
        if "with_hook" in kwargs:
            kwargs.pop("with_hook")
830 831
        if "is_prim_infer" in kwargs:
            kwargs.pop("is_prim_infer")
832 833
        # 1. unify args/kwargs and replace Tensor with InputSpec
        if len(args) != len(self._function_spec.args_name):
834
            args, kwargs = self._function_spec.unified_args_and_kwargs(
835 836 837 838 839 840
                args, kwargs
            )
        (
            input_args_with_spec,
            input_kwargs_with_spec,
        ) = self._function_spec.args_to_input_spec(args, kwargs)
841 842

        # 2. generate cache key
843 844 845 846 847 848 849
        cache_key = CacheKey(
            self._function_spec,
            input_args_with_spec,
            input_kwargs_with_spec,
            self._class_instance,
            **self._kwargs,
            with_hook=with_hook,
850
            is_train=is_train,
851
        )
852 853 854 855 856 857 858 859 860 861 862
        if is_prim_infer:
            (
                concrete_program,
                partial_program_layer,
            ) = self._program_cache.get_program_without_cache(cache_key)
        else:
            # 3. check whether hit the cache or build a new program for the input arguments
            concrete_program, partial_program_layer = self._program_cache[
                cache_key
            ]
        return concrete_program, partial_program_layer
863

864 865 866 867 868 869 870 871 872 873 874 875 876 877 878
    def get_concrete_program_with_cache_key(self, cached_key):
        """
        Returns traced concrete program and inner executable partial layer by cached key.

        Args:
            cached_key(CacheKey): The cached key use to get concrete program.

        Returns:
            Traced ConcreteProgram and executable translated Layer.
        """
        self._raise_when_property()
        (
            concrete_program,
            partial_program_layer,
        ) = self._program_cache.get_program_without_cache(cached_key)
879 880 881 882 883 884 885 886 887 888 889 890 891
        return concrete_program, partial_program_layer

    def get_traced_count(self):
        """
        Returns the number of traced programs for the decorated function.
        """
        return len(self._program_cache)

    @property
    def code(self):
        """
        Returns the source code of transformed static function for debugging.
        """
892
        static_func = convert_to_static(self.dygraph_function)
893 894 895 896 897 898 899
        source_code = func_to_source_code(static_func)
        return source_code

    @property
    def concrete_program(self):
        """
        Returns recent ConcreteProgram instance of decorated function.
A
Aurelius84 已提交
900 901 902 903

        Examples:
            .. code-block:: python

904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922
                >>> # doctest: +SKIP
                >>> import paddle
                >>> from paddle.jit import to_static
                >>> from paddle.static import InputSpec

                >>> paddle.disable_static()

                >>> def foo(x, y):
                ...     z = x + y
                ...     return z
                ...
                >>> # usage 1:
                >>> decorated_foo = to_static(foo, input_spec=[InputSpec([10], name='x'), InputSpec([10], name='y')])
                >>> print(decorated_foo.concrete_program)

                >>> # usage 2:
                >>> decorated_foo = to_static(foo)
                >>> out_foo = decorated_foo(paddle.rand([10]), paddle.rand([10]))
                >>> print(decorated_foo.concrete_program)
923
        """
924 925
        return self.concrete_program_specify_input_spec(input_spec=None)

926
    def concrete_program_specify_input_spec(
927
        self, input_spec=None, with_hook=False, is_prim_infer=False
928
    ):
929 930 931
        """
        Returns recent ConcreteProgram instance of decorated function while
        specifying input_spec. If the self._function_spec already has
932
        input_spec, it will check the compatibility of input input_spec and
933 934 935 936 937 938 939
        the self._function_spec.input_spec. If input input_spec=None, then
        this method uses self._function_spec.input_spec

        args:
            input_spec (list[InputSpec], optional): Describes the input of
                the translate function.
        """
940
        self._raise_when_property()
941 942 943 944
        # if specific the `input_spec`, the length of program_cache will always 1,
        # else, return the last one.
        cached_program_len = len(self._program_cache)
        # If specific `input_spec`, apply convertion from dygraph layers into static Program.
945 946
        # NOTE(jiabin): is_prim_infer indicates this method called by paddle.jit.save and it is worked in prim mode

947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963
        desired_input_spec = input_spec
        if self._function_spec.input_spec is not None:
            if input_spec is not None and not input_specs_compatible(
                flatten(input_spec), flatten(self._function_spec.input_spec)
            ):
                raise ValueError(
                    "The `input_spec`: {} used to construct concrete_program is conflict with the `input_spec`: {} in `@paddle.jit.to_static`".format(
                        input_spec, self._function_spec.input_spec
                    )
                )
            # NOTE(chenweihang): we should always translated program based on the `input_spec`
            # decorated on forward if it is valid
            desired_input_spec = self._function_spec.input_spec
            if input_spec is not None:
                logging_utils.warn(
                    "\n\nYou have specified `input_spec` both in function definition (higher priority) and `paddle.jit.save` (will be ignored.)\n\n\t Using: {}\n\n\t Ignore: {}\n".format(
                        desired_input_spec, input_spec
964
                    )
965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981
                )

        has_input_spec = desired_input_spec is not None
        if has_input_spec:
            concrete_program, _ = self.get_concrete_program(
                *desired_input_spec,
                with_hook=with_hook,
                is_train=self._is_train_mode(),
                is_prim_infer=is_prim_infer,
            )
            return concrete_program
        else:
            if cached_program_len != 0:
                logging_utils.warn(
                    "No input_spec is found, save cached program instead"
                )
                if cached_program_len > 1:
982
                    logging_utils.warn(
983 984
                        "Current {} has more than one cached programs: {}, the last traced progam will be return by default.".format(
                            self._function_spec, cached_program_len
985 986
                        )
                    )
987

988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002
                cache_key = self._program_cache._recent_cache_key

                if with_hook:
                    cache_key.kwargs["with_hook"] = True

                if is_prim_infer:
                    (
                        concrete_program,
                        _,
                    ) = self.get_concrete_program_with_cache_key(cache_key)
                    return concrete_program
                else:
                    concrete_program, _ = self._program_cache[cache_key]
                    return concrete_program

1003
            else:
A
Aurelius84 已提交
1004
                raise ValueError(
1005 1006 1007 1008
                    "No valid transformed program for {}.\n\t    Please specific `input_spec` in `@paddle.jit.to_static` or feed input tensor to call the decorated function at once.\n".format(
                        self._function_spec
                    )
                )
1009

1010 1011 1012 1013 1014
    @property
    def inputs(self):
        """
        Returns input tensors of recent converted static program.
        """
1015
        self._raise_when_property()
1016 1017
        concrete_program = self.concrete_program
        inputs = [
1018 1019
            var
            for var in flatten(concrete_program.inputs)
1020 1021 1022
            if isinstance(var, framework.Variable)
        ]
        return inputs
1023

1024
    @property
1025 1026 1027 1028
    def outputs(self):
        """
        Returns output tensors of recent converted static program.
        """
1029
        self._raise_when_property()
1030 1031
        concrete_program = self.concrete_program
        outputs = [
1032 1033
            var
            for var in flatten(concrete_program.outputs)
1034 1035 1036 1037
            if isinstance(var, framework.Variable)
        ]

        return outputs
1038

1039
    @property
1040 1041 1042 1043
    def main_program(self):
        """
        Returns recent converted static main program.
        """
1044
        self._raise_when_property()
1045 1046 1047
        concrete_program = self.concrete_program
        main_program = concrete_program.main_program
        return main_program
1048

1049 1050 1051
    @property
    def program_cache(self):
        return self._program_cache
1052

1053 1054 1055
    @property
    def function_spec(self):
        return self._function_spec
1056 1057


1058 1059 1060 1061 1062 1063 1064 1065 1066 1067
def _verify_init_in_dynamic_mode(class_instance):
    """
    Verifies the instance is initialized in dynamic mode.
    """
    if isinstance(class_instance, layers.Layer):
        if not class_instance._init_in_dynamic_mode:
            raise RuntimeError(
                " `paddle.jit.to_static` is only available in dynamic mode. Please call `paddle.disable_static()` before "
                "initializing your Layer class `{}` . Because parameters of Layer class should be initialized firstly "
                "in dynamic mode while applying transformation.".format(
1068 1069 1070
                    class_instance
                )
            )
1071 1072


1073
class HookHelper:
1074 1075 1076 1077 1078 1079 1080 1081 1082
    """
    Only For converting pre/post hooks operation in outermost layer while jit.save.
    Because hooks in sublayer have been processed automatically.
    """

    def __init__(self, func, class_instance, with_hook=False):
        self.func = func
        self.class_instance = class_instance
        self.with_hook = with_hook
1083 1084 1085
        self.need_apply_hook = (
            with_hook
            and isinstance(self.class_instance, layers.Layer)
1086
            and func.__name__ == "forward"
1087
        )
1088 1089 1090 1091 1092

    def apply_pre_hooks(self, inputs):
        """
        Apply _forward_pre_hooks from outermost layer
        """
1093 1094
        if not self.need_apply_hook:
            return inputs
1095 1096 1097 1098 1099 1100

        inputs = inputs[1:]
        for forward_pre_hook in self.class_instance._forward_pre_hooks.values():
            hook_result = forward_pre_hook(self.class_instance, inputs)
            if hook_result is not None:
                if not isinstance(hook_result, tuple):
1101
                    hook_result = (hook_result,)
1102 1103 1104 1105 1106 1107 1108 1109
                inputs = hook_result

        return [self.class_instance] + list(inputs)

    def apply_post_hooks(self, inputs, outputs):
        """
        Apply _forward_post_hooks from outermost layer
        """
1110 1111
        if not self.need_apply_hook:
            return outputs
1112 1113

        inputs = inputs[1:]
1114 1115 1116 1117 1118 1119
        for (
            forward_post_hook
        ) in self.class_instance._forward_post_hooks.values():
            hook_result = forward_post_hook(
                self.class_instance, inputs, outputs
            )
1120 1121 1122 1123 1124 1125 1126
            if hook_result is not None:
                outputs = hook_result

        inputs.insert(0, self.class_instance)
        return outputs


1127
class ConcreteProgram:
1128
    __slots__ = [
1129 1130 1131 1132 1133 1134
        'inputs',
        'outputs',
        'main_program',
        "startup_program",
        "parameters",
        "function",
1135
        "name_generator",
1136
        'kwargs',
1137 1138
    ]

1139 1140 1141 1142 1143 1144
    def __init__(
        self,
        inputs,
        outputs,
        parameters,
        function,
1145
        name_generator,
1146 1147
        main_program,
        startup_program=None,
1148
        **kwargs,
1149
    ):
1150 1151 1152
        self.inputs = inputs
        self.outputs = outputs
        self.main_program = main_program
1153
        self.startup_program = startup_program
1154
        self.parameters = parameters
1155
        self.function = function
1156
        self.name_generator = name_generator
1157
        self.kwargs = kwargs
1158 1159 1160

    @staticmethod
    @switch_to_static_graph
1161 1162 1163
    def from_func_spec(
        func_spec, input_spec, input_kwargs_spec, class_instance, **kwargs
    ):
1164
        """
1165 1166
        Builds the main_program with specialized inputs and returns outputs
        of program as fetch_list.
1167 1168 1169

        Args:
            func_spec(FunctionSpec): A FunctionSpec instance for decorated function.
1170
            input_spec(list[InputSpec]):
1171
        """
1172 1173 1174
        # verify the instance is initialized in imperative mode.
        _verify_init_in_dynamic_mode(class_instance)

1175
        # Transforms dygraph function into static function and caches it.
1176
        dygraph_function = func_spec.dygraph_function
1177
        static_func = convert_to_static(dygraph_function)
1178
        # apply pre\post hook for outermost layer
1179 1180 1181
        hook_helper = HookHelper(
            dygraph_function, class_instance, kwargs.get("with_hook", False)
        )
1182

1183 1184
        main_program, startup_program = framework.Program(), framework.Program()
        # Note: The random seed should be synchronized into cached program
1185
        # if set in `fluid.dygraph_guard` because some ops rely on it, such as
1186
        # `fluid.layers.dropout`.
1187
        main_program.random_seed = framework.default_main_program().random_seed
1188 1189 1190
        startup_program.random_seed = (
            framework.default_startup_program().random_seed
        )
1191

1192 1193
        new_name_generator = UniqueNameGenerator()

1194
        with framework.program_guard(main_program, startup_program):
1195 1196 1197
            with _switch_declarative_mode_guard_(
                is_declarative=True
            ), UniqueNameGuard(new_name_generator):
1198
                # 1. Adds `paddle.static.data` layers for input if needed
1199
                static_inputs = func_spec.to_static_inputs_with_spec(
1200 1201
                    input_spec, main_program
                )
1202
                _kwargs = func_spec.to_static_inputs_with_spec(
1203 1204
                    input_kwargs_spec, main_program
                )
1205
                if class_instance:
1206 1207 1208
                    static_inputs = tuple(
                        [class_instance] + list(static_inputs)
                    )
1209

1210
                # 2. Builds program only once and returns the output Variables.
1211 1212 1213
                with param_guard(
                    get_parameters(class_instance, False)
                ), param_guard(get_buffers(class_instance, False)):
1214
                    try:
1215 1216
                        # only for jit.save, do nothing while train and eval process
                        inputs = hook_helper.apply_pre_hooks(static_inputs)
1217 1218
                        if _kwargs:
                            outputs = static_func(*inputs, **_kwargs)
1219 1220
                        else:
                            outputs = static_func(*inputs)
1221
                        outputs = hook_helper.apply_post_hooks(inputs, outputs)
1222 1223
                    except BaseException as e:
                        # NOTE: If e is raised in compile time, e should be attached to ERROR_DATA here.
1224
                        error.attach_error_data(e)
1225 1226 1227
                        error_data = getattr(e, error.ERROR_DATA, None)
                        if error_data:
                            error_data.raise_new_exception()
1228 1229
                        raise

1230 1231 1232 1233 1234 1235 1236
                # 3. Gets all ParamBases and buffered VarBases in the function
                all_parameters_and_buffers = (
                    ProgramTranslator.get_instance()._params_recorder.pop(
                        main_program
                    )
                )

1237
                if outputs is not None:
1238 1239 1240 1241
                    need_wrap_into_list = (
                        not isinstance(outputs, (tuple, list))
                        or len(outputs) == 1
                    )
1242 1243
                    if need_wrap_into_list:
                        outputs = [outputs]
1244

1245 1246
        main_program = update_op_callstack_with_origin_info(main_program)

1247 1248 1249 1250 1251
        return ConcreteProgram(
            inputs=static_inputs,
            outputs=outputs,
            parameters=all_parameters_and_buffers,
            function=dygraph_function,
1252
            name_generator=new_name_generator,
1253 1254
            main_program=main_program,
            startup_program=startup_program,
1255
            **kwargs,
1256
        )
1257 1258


1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286
class ParametersRecorder:
    def __init__(self):
        self.params_dict = {}

    @synchronized
    def add(self, program, param):
        """use the default_program as key, append param the parameter list."""
        key = self._program_hash(program)
        if key not in self.params_dict:
            self.params_dict[key] = set()
        params = self.params_dict[key]
        params.add(param)

    def pop(self, program):
        params = self.params_dict.get(self._program_hash(program))
        if params is None:
            return []
        del self.params_dict[self._program_hash(program)]
        return list(params)

    def _program_hash(self, program):
        """
        because program is not deleted while calling from_func_spec.
        so it's ok to use id(program)
        """
        return id(program)


1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304
class ParametersMap:
    def __init__(self):
        self.params_dict = {}

    @synchronized
    def add(self, program, id, param):
        """use the default_program as key, append param the parameter list."""
        key = self._program_hash(program)
        if key not in self.params_dict:
            self.params_dict[key] = {}

        params = self.params_dict[key]
        params[id] = param

    def get(self, program, id):
        params = self.params_dict.get(self._program_hash(program))
        if params is None:
            return None
1305 1306 1307 1308 1309 1310 1311 1312 1313 1314
        if id not in params:
            return None
        root_var = params[id]
        saved = []
        while root_var.desc.id() in params.keys():
            saved.append(root_var)
            root_var = params[root_var.desc.id()]
        for var in saved:
            params[var.desc.id()] = root_var
        return root_var
1315 1316 1317 1318 1319 1320 1321 1322 1323

    def _program_hash(self, program):
        """
        because program is not deleted while calling from_func_spec.
        so it's ok to use id(program)
        """
        return id(program)


1324
class FallbackProgramLayer:
1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366
    __slots__ = [
        '_instance',
        '_dy_func',
        'training',
        '_cuda_graph_capture_mode',
        '_cuda_graph_pool_id',
    ]

    def __init__(self, instance, dy_func):
        self._instance = instance
        self._dy_func = dy_func

    def __call__(self, inputs):
        return self._dy_func(*inputs)

    def __getattr__(self, key):
        if key not in self.__slots__:
            raise RuntimeError(
                "There raises a exception after applying `@paddle.jit.to_static()` and already switch into fallback mode. \n"
                "You can't get attribute for a fallback program layer. Please check `to_static.error` file for detail."
            )
        elif key in ['training']:
            if self._instance is not None:
                return getattr(self._instance, key)
            return

        return super().__getattr__(key)

    def __setattr__(self, key, value):
        if key not in self.__slots__:
            raise RuntimeError(
                "There raises a exception after applying `@paddle.jit.to_static()` and already switch into fallback mode. \n"
                "You can't get attribute for a fallback program layer. Please check `to_static.error` file for detail."
            )
        elif key in ['training']:
            if self._instance is not None:
                return setattr(self._instance, key, value)
            return

        return super().__setattr__(key, value)


1367
class ProgramCache:
1368 1369 1370
    """
    Wrapper class for the program functions defined by dygraph function.
    """
1371

1372 1373
    dy2static_error_file = "to_static.error"

1374
    def __init__(self):
1375
        # {hash_id : (concrete_program, partial_layer)}
1376
        self._caches = collections.OrderedDict()
1377
        # trace mostly recent used program
1378
        self._recent_key = None
1379
        self._recent_cache_key = None
1380

1381
    def _build_once(self, cache_key):
1382 1383
        # TODO(Aurelius84): Need a gloabl FLAGS to enable/disable to_prim
        enable_prim = cache_key.kwargs['build_strategy'].build_cinn_pass
1384

1385 1386 1387 1388 1389 1390 1391 1392
        # NOTE(xiongkun): Need a global FLAGS to enable/disable fallback
        enable_fallback = enable_prim
        try:
            concrete_program = ConcreteProgram.from_func_spec(
                func_spec=cache_key.function_spec,
                input_spec=cache_key.input_args_with_spec,
                input_kwargs_spec=cache_key.input_kwargs_with_spec,
                class_instance=cache_key.class_instance,
1393
                **cache_key.kwargs,
1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404
            )
        except Exception as e:
            if enable_fallback:
                warnings.warn(
                    "Exception is thrown while applying @paddle.jit.to_static. It will fallback into dygraph mode for training.\n"
                    "1. You can check `to_static.error` file in current workspace directory for detail.\n"
                    "2. In fallback mode, you can only do training, can't call paddle.jit.save(). Please modify model code according `to_static.error` firstly"
                )
                # TODO(xiongkun) change different file name to avoid overwrite.
                with open(self.dy2static_error_file, "w") as fp:
                    fp.write(str(e))
1405

1406 1407 1408 1409 1410 1411 1412
                fallback_layer = FallbackProgramLayer(
                    cache_key.class_instance,
                    cache_key.function_spec.dygraph_function,
                )
                return fallback_layer, fallback_layer
            else:
                raise
1413

1414 1415
        backend = cache_key.kwargs['backend']
        if prim_or_cinn_is_enabled(cache_key.kwargs['build_strategy'], backend):
1416
            for var in concrete_program.main_program.list_vars():
1417
                if var.type not in NO_SHAPE_VAR_TYPE and -1 in var.shape:
1418 1419 1420 1421 1422
                    warnings.warn(
                        "Now prim and cinn do not support -1 shape, but the shape of var {} is {}".format(
                            var.name, var.shape
                        )
                    )
1423

1424 1425 1426
        partial_program = partial_program_from(
            concrete_program, cache_key.class_instance is not None
        )
1427 1428 1429 1430 1431
        with backend_guard(backend):
            if core._is_fwd_prim_enabled():
                partial_program.set_hooker(
                    PrimHooker(concrete_program.main_program, backend)
                )
1432 1433
        return concrete_program, partial_program

1434
    def __getitem__(self, item):
1435
        if not isinstance(item, CacheKey):
1436 1437 1438 1439
            raise ValueError(
                'type(item) should be CacheKey, but received %s'
                % type_name(item)
            )
1440
        item_id = hash(item)
1441
        self._recent_cache_key = item
1442
        self._recent_key = item_id
1443 1444
        if item_id not in self._caches:
            self._caches[item_id] = self._build_once(item)
1445 1446 1447
            # Note: raise warnings if number of traced program is more than `max_tracing_count`
            current_tracing_count = len(self._caches)
            if current_tracing_count > MAX_TRACED_PROGRAM_COUNT:
1448
                logging_utils.warn(
1449
                    "Current traced program number: {} > `max_tracing_count`:{}. Too much cached programs will bring expensive overhead. "
1450 1451 1452 1453
                    "The reason may be: (1) passing tensors with different shapes, (2) passing python objects instead of tensors.".format(
                        current_tracing_count, MAX_TRACED_PROGRAM_COUNT
                    )
                )
1454

1455
        return self._caches[item_id]
1456

1457 1458 1459
    def get_program_without_cache(self, cache_key):
        return self._build_once(cache_key=cache_key)

1460
    def get_program(self, item):
1461
        if not isinstance(item, CacheKey):
1462
            raise ValueError(
1463 1464 1465
                "Input item's type should be FunctionSpec, but received %s"
                % type_name(item)
            )
1466 1467
        item_id = hash(item)
        if item_id not in self._caches:
1468
            raise RuntimeError(
1469
                "Failed to find program for input item, please decorate input function by `@paddle.jit.to_static`."
1470
            )
1471
        return self._caches[item_id]
1472

1473
    def last(self):
1474 1475 1476
        assert (
            len(self._caches) >= 1
        ), "No valid cached program in ProgramCache."
1477 1478
        assert self._recent_key is not None
        return self._recent_key, self._caches[self._recent_key]
1479

1480 1481 1482 1483
    def __len__(self):
        return len(self._caches)

    def concrete_programs(self):
1484
        return [cp for key, (cp, _) in self._caches.items()]
1485

1486 1487 1488
    def clear(self):
        self._caches = collections.OrderedDict()

1489

1490
class PrimHooker(PartialProgramLayerHook):
1491
    def __init__(self, original_program, backend):
1492 1493 1494 1495
        if len(original_program.blocks) > 1:
            raise ValueError(
                'The primitive mode only support one block currently.'
            )
1496
        self.backend = backend
1497
        self.custom_vjps = set()
1498 1499 1500 1501 1502 1503 1504
        with backend_guard(self.backend):
            if core._is_all_prim_enabled():
                self.custom_vjps = {
                    op.type
                    for op in original_program.block(0).ops
                    if core.has_comp_grad_op_maker(op.type)
                }
1505 1506

    def before_append_backward(self, forward_program):
1507 1508 1509 1510
        with backend_guard(self.backend):
            if core._is_fwd_prim_enabled():
                _to_prim(forward_program.blocks, blacklist=self.custom_vjps)
            return forward_program
1511 1512

    def after_append_backward(self, whole_program, backward_start_idx):
1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524
        with backend_guard(self.backend):
            backward_length = (
                len(whole_program.block(0).ops) - backward_start_idx
            )
            if core._is_fwd_prim_enabled() and len(self.custom_vjps) != 0:
                # only process backward part of block
                _to_prim(whole_program.blocks, backward_length=backward_length)
            new_start_index = len(whole_program.block(0).ops) - backward_length
            if backward_length > 0:
                # only process forward part of block
                _to_prim(whole_program.blocks, start_idx=new_start_index)
            return whole_program, new_start_index
1525 1526

    def after_infer(self, infer_program):
1527 1528 1529 1530
        with backend_guard(self.backend):
            if core._is_fwd_prim_enabled():
                _to_prim(infer_program.block(0))
            return infer_program
1531 1532


1533
class ProgramTranslator:
1534
    """
1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546
    Class to translate dygraph function into static graph function. The object
    of this class is a singleton.

    Args:
        None.

    Returns:
        ProgramTranslator: the singleton object.

    Examples:
        .. code-block:: python

1547
            >>> import paddle
1548

1549 1550 1551
            >>> # Two methods get same object because ProgramTranslator is a singleton
            >>> paddle.jit.dy2static.program_translator.ProgramTranslator()
            >>> paddle.jit.dy2static.program_translator.ProgramTranslator.get_instance()
1552

1553 1554
    """

1555
    _singleton_lock = threading.Lock()
1556 1557 1558 1559 1560 1561
    _instance = None

    @synchronized
    def __new__(cls, *args, **kwargs):
        if cls._instance is None:
            cls._instance = object.__new__(cls, *args, **kwargs)
1562
            cls._instance._initialized = False
1563 1564 1565 1566 1567
        return cls._instance

    @classmethod
    def get_instance(cls):
        if cls._instance is None:
1568 1569
            with cls._singleton_lock:
                cls._instance = cls()
1570 1571 1572 1573 1574
        return cls._instance

    @classmethod
    def reset(cls):
        if cls._instance is not None:
1575
            cls._instance._initialized = False
1576 1577
            cls._instance.__init__()

1578
    def __init__(self):
1579
        # To make sure that calls __init__ only once.
1580
        if self._initialized:
1581
            return
1582 1583
        self._initialized = True
        self._program_cache = ProgramCache()
1584
        self._params_recorder = ParametersRecorder()
1585
        self._params_map = ParametersMap()
1586
        self.enable_to_static = True
1587

1588
    def enable(self, enable_to_static):
1589
        """
1590
        Enable or disable the converting from imperative to static graph by
1591 1592 1593
        ProgramTranslator globally.

        Args:
1594
            enable_to_static (bool): True or False to enable or disable converting to static.
1595 1596 1597 1598 1599 1600 1601

        Returns:
            None.

        Examples:
            .. code-block:: python

1602 1603 1604 1605 1606 1607 1608 1609 1610 1611 1612 1613 1614 1615 1616 1617 1618
                >>> # doctest: +SKIP
                >>> import paddle
                >>> def func(x):
                ...     if paddle.mean(x) > 0:
                ...         x_v = x - 1
                ...     else:
                ...         x_v = x + 1
                ...     return x_v
                ...
                ...
                >>> prog_trans = paddle.jit.dy2static.program_translator.ProgramTranslator()

                >>> x = paddle.ones([1, 2])
                >>> x_v = prog_trans.get_output(func, x)
                >>> print(x_v)
                Tensor(shape=[1, 2], dtype=float32, place=Place(cpu), stop_gradient=True,
                [[0., 0.]])
1619
        """
1620 1621 1622 1623 1624 1625
        check_type(
            enable_to_static,
            "enable_to_static",
            bool,
            "ProgramTranslator.enable",
        )
1626
        self.enable_to_static = enable_to_static
1627

1628 1629
    def get_output(self, dygraph_func, *args, **kwargs):
        """
1630
        Returns the output dygraph Tensor for dygraph function. The dygraph
1631
        function will be translated into static graph function so the under
1632
        beneath numerical result will be calculated by static graph mode.
1633 1634 1635

        Args:
            dygraph_func (callable): the dygraph function.
1636 1637
            *args (tuple): the input argument of dygraph_func.
            **kwargs (dict): the input argument of dygraph_func.
1638 1639

        Returns:
1640
            Tensor or tuple of Tensors: the dygraph Tensor containing digital result.
1641 1642 1643 1644

        Examples:
            .. code-block:: python

1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661
                >>> # doctest: +SKIP
                >>> import paddle
                >>> def func(x):
                ...     if paddle.mean(x) > 0:
                ...         x_v = x - 1
                ...     else:
                ...         x_v = x + 1
                ...     return x_v
                ...
                ...
                >>> prog_trans = paddle.jit.dy2static.program_translator.ProgramTranslator()

                >>> x = paddle.ones([1, 2])
                >>> x_v = prog_trans.get_output(func, x)
                >>> print(x_v)
                Tensor(shape=[1, 2], dtype=float32, place=Place(cpu), stop_gradient=True,
                [[0., 0.]])
1662
        """
1663 1664 1665
        assert callable(
            dygraph_func
        ), "Input dygraph_func is not a callable in ProgramTranslator.get_output"
1666

1667
        if not self.enable_to_static:
1668 1669
            # Here calls `warnings.warn` but not `logging_utils.warn` because by default warnings.warn(message)
            # will show up **only once**.
1670
            logging_utils.warn(
1671 1672 1673 1674
                "The ProgramTranslator.get_output doesn't work when setting ProgramTranslator.enable to False. "
                "We will just return dygraph output. "
                "Please call ProgramTranslator.enable(True) if you would like to get static output."
            )
1675
            return dygraph_func(*args, **kwargs)
1676
        try:
1677
            function_spec = FunctionSpec(dygraph_func)
1678
            cache_key = CacheKey.from_func_and_args(
1679 1680 1681 1682 1683
                function_spec,
                args,
                kwargs,
                getattr(dygraph_func, '__self__', None),
            )
1684 1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699
            _, partial_program_layer = self._program_cache[cache_key]

            if args and isinstance(args[0], layers.Layer):
                # Synchronize self.training attribute.
                partial_program_layer.training = args[0].training
                args = args[1:]
            try:
                return partial_program_layer(args)
            except BaseException as e:
                # NOTE:
                # 1. If e is raised in compile time, e should have been attached to ERROR_DATA before;
                # 2. If e raised in runtime, e should be attached to ERROR_DATA here.
                if not hasattr(e, error.ERROR_DATA):
                    # runtime error
                    error.attach_error_data(e, in_runtime=True)
                raise
1700
        except BaseException as e:
1701 1702 1703 1704 1705 1706
            error_data = getattr(e, error.ERROR_DATA, None)
            if error_data:
                error_data.raise_new_exception()
            else:
                logging_utils.warn(
                    "Please file an issue at 'https://github.com/PaddlePaddle/Paddle/issues'"
1707 1708
                    " if you can't handle this {} yourself.".format(type(e))
                )
1709
                raise e
1710 1711 1712

    def get_func(self, dygraph_func):
        """
1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723
        Returns a callable function which converts imperative dygraph APIs of
        the input dygraph_func into declarative net-building APIs, which means
        it doesn't return immediate digital result as get_output does.
        Users should handle Program and Executor by themselves.

        Args:
            dygraph_func (callable): the dygraph function.

        Returns:
            callable: converting imperative dygraph APIs into declarative
            net-building APIs.
1724 1725 1726 1727

        Examples:
            .. code-block:: python

1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740
                >>> # doctest: +SKIP
                >>> import paddle
                >>> def func(x):
                ...     if paddle.mean(x) > 0:
                ...         x_v = x - 1
                ...     else:
                ...         x_v = x + 1
                ...     return x_v
                ...
                >>> prog_trans = paddle.jit.dy2static.program_translator.ProgramTranslator()
                >>> static_func = prog_trans.get_func(func)
                >>> print(callable(static_func))
                True
1741
        """
1742 1743 1744
        assert callable(
            dygraph_func
        ), "Input dygraph_func is not a callable in ProgramTranslator.get_func"
1745

1746
        if not self.enable_to_static:
1747
            logging_utils.warn(
1748 1749 1750
                "The ProgramTranslator.get_func doesn't work when setting ProgramTranslator.enable to False. We will "
                "just return dygraph output. Please call ProgramTranslator.enable(True) if you would like to get static output."
            )
1751
            return dygraph_func
1752

1753
        static_func = convert_to_static(dygraph_func)
1754 1755
        return static_func

1756 1757
    def get_program(self, dygraph_func, *args, **kwargs):
        """
1758
        Returns the translated static program and input/output Tensors from
1759 1760 1761 1762
        dygraph function. The users can use the program to run by executor.

        Args:
            dygraph_func (callable): the dygraph function.
1763 1764
            *args (tuple): the input argument of dygraph_func.
            **kwargs (dict): the input argument of dygraph_func.
1765 1766 1767

        Returns:
            tuple of (main_program, startup_program, inputs, outputs) whose
1768
            types are (Program, Program, list of Tensors, list of Tensors).
1769 1770
            main_program: the converted main program.
            startup_program: the converted startup program.
1771 1772
            inputs: list of input Tensors which need to be fed.
            outputs: list of output Tensors which users can fetch.
1773 1774 1775 1776

        Examples:
            .. code-block:: python

1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788 1789 1790 1791 1792
                >>> # doctest: +SKIP
                >>> import paddle
                >>> def func(x):
                ...     if paddle.mean(x) > 0:
                ...         x_v = x - 1
                ...     else:
                ...         x_v = x + 1
                ...     return x_v
                ...
                >>> prog_trans = paddle.jit.dy2static.program_translator.ProgramTranslator()
                >>> x = paddle.ones([1, 2])
                >>> main_prog, start_prog, inputs, outputs = prog_trans.get_program(func, x)
                >>> print([i.name for i in inputs])
                >>> # [u'generated_tensor_0'] the feed input Tensor name representing x
                >>> print([o.name for o in outputs])
                >>> # [u'_generated_var_4'] the fetch output Tensor name representing x_v
1793
        """
1794 1795 1796
        assert callable(
            dygraph_func
        ), "Input dygraph_func is not a callable in ProgramTranslator.get_program"
1797

1798
        if not self.enable_to_static:
1799
            logging_utils.warn(
1800 1801 1802 1803
                "The ProgramTranslator.get_program doesn't work when setting ProgramTranslator.enable to False."
                "We will just return dygraph output. "
                "Please call ProgramTranslator.enable(True) if you would like to get static output."
            )
1804
            return dygraph_func(*args, **kwargs)
1805

1806
        function_spec = FunctionSpec(dygraph_func)
1807
        cache_key = CacheKey.from_func_and_args(
1808 1809
            function_spec, args, kwargs, getattr(dygraph_func, '__self__', None)
        )
1810 1811
        concrete_program, partial_program_layer = self._program_cache[cache_key]

1812 1813
        # Note: concrete_program hold all input/output infos include non-Variable
        input_vars = [
1814 1815
            var
            for var in concrete_program.inputs
1816 1817 1818
            if isinstance(var, framework.Variable)
        ]
        output_vars = [
1819 1820
            var
            for var in concrete_program.outputs
1821 1822 1823
            if isinstance(var, framework.Variable)
        ]

1824 1825 1826 1827 1828 1829
        return (
            concrete_program.main_program,
            concrete_program.startup_program,
            input_vars,
            output_vars,
        )
1830

1831 1832
    def get_code(self, dygraph_func):
        """
1833 1834 1835 1836 1837 1838
        Returns the translated static function string code from dygraph function.

        Args:
            dygraph_func (callable): the dygraph function.

        Returns:
1839 1840 1841 1842 1843
            str: the string code of translated static function.

        Examples:
            .. code-block:: python

1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857
                >>> # doctest: +SKIP
                >>> import paddle
                >>> def func(x):
                ...     if paddle.mean(x) > 0:
                ...         x_v = x - 1
                ...     else:
                ...         x_v = x + 1
                ...     return x_v
                ...
                >>> prog_trans = paddle.jit.dy2static.program_translator.ProgramTranslator()

                >>> code = prog_trans.get_code(func)
                >>> print(type(code))
                <class 'str'>
1858
        """
1859 1860 1861
        assert callable(
            dygraph_func
        ), "Input dygraph_func is not a callable in ProgramTranslator.get_code"
1862
        # Gets AST from dygraph function
1863 1864 1865

        unwrap_func = unwrap(dygraph_func)
        raw_code = inspect.getsource(unwrap_func)
1866 1867 1868 1869 1870
        code = textwrap.dedent(raw_code)
        root = gast.parse(code)

        # Transform AST
        dygraph_to_static = DygraphToStaticAst()
1871
        root = dygraph_to_static.get_static_ast(root)
1872 1873

        # Get source_code
1874
        source_code = ast_to_source_code(root)
1875 1876
        return source_code

1877
    def get_program_cache(self):
1878
        """
1879 1880 1881 1882 1883 1884 1885 1886 1887
        Returns the ProgramCache instance. This method is used by PaddlePaddle
        developers to manage program cache in ProgramTranslator. Normal users
        don't have to call this method.

        Returns:
            ProgramCache: ProgramCache instance of ProgramTranslator.

        Examples:
            .. code-block:: python
1888

1889
                >>> import paddle
1890

1891 1892
                >>> prog_trans = paddle.jit.dy2static.program_translator.ProgramTranslator()
                >>> prog_cache = prog_trans.get_program_cache()
1893
        """
1894
        return self._program_cache
R
Ryan 已提交
1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906 1907 1908 1909 1910


def enable_to_static(enable_to_static_bool):
    """
    Enable or disable the converting from imperative to static graph by
    ProgramTranslator globally.

    Args:
        enable_to_static_bool (bool): True or False to enable or disable converting to static.

    Returns:
        None.

    Examples:
        .. code-block:: python

1911 1912 1913 1914 1915 1916 1917 1918 1919 1920 1921 1922 1923 1924 1925 1926
            >>> import paddle
            >>> @paddle.jit.to_static
            >>> def func(x):
            ...     if paddle.mean(x) > 0:
            ...         x_v = x - 1
            ...     else:
            ...         x_v = x + 1
            ...     return x_v
            ...
            >>> paddle.jit.enable_to_static(False)

            >>> x = paddle.ones([1, 2])
            >>> # ProgramTranslator is disabled so the func is run in dygraph
            >>> print(func(x))
            Tensor(shape=[1, 2], dtype=float32, place=Place(cpu), stop_gradient=True,
            [[0., 0.]])
R
Ryan 已提交
1927 1928 1929 1930 1931 1932 1933 1934 1935 1936

    """
    check_type(
        enable_to_static_bool,
        "enable_to_static_bool",
        bool,
        "paddle.jit.enable_to_static",
    )
    _program_trans = ProgramTranslator()
    _program_trans.enable(enable_to_static_bool)
1937 1938 1939


@switch_to_static_graph
1940 1941 1942 1943 1944 1945 1946
def _to_prim(
    blocks,
    blacklist=frozenset(),
    whitelist=frozenset(),
    start_idx=-1,
    backward_length=-1,
):
1947
    """Swith to static graph and call to_prim."""
1948 1949 1950
    # TODO(Aurelius84): Fix this cycle import problem
    from paddle.incubate.autograd import primapi

1951 1952 1953 1954 1955 1956 1957
    primapi.to_prim(
        blocks,
        blacklist=blacklist,
        whitelist=whitelist,
        start_idx=start_idx,
        backward_length=backward_length,
    )