diff --git a/python/paddle/fluid/contrib/decoder/beam_search_decoder.py b/python/paddle/fluid/contrib/decoder/beam_search_decoder.py index f2b7ac8375af25beed562b8279b6044f11c09d44..d0ca4fd485e66d07f7ced62d8e14f5fc257c08a8 100644 --- a/python/paddle/fluid/contrib/decoder/beam_search_decoder.py +++ b/python/paddle/fluid/contrib/decoder/beam_search_decoder.py @@ -22,7 +22,7 @@ This API is still under active development and may change drastically. from __future__ import print_function -import contextlib +from ...wrapped_decorator import contextmanager import numpy as np import six @@ -419,7 +419,7 @@ class TrainingDecoder(object): self._state_cell = state_cell self._state_cell._enter_decoder(self) - @contextlib.contextmanager + @contextmanager def block(self): """ Define the behavior of the decoder for each RNN time step. @@ -613,7 +613,7 @@ class BeamSearchDecoder(object): self._word_dim = word_dim self._input_var_dict = input_var_dict - @contextlib.contextmanager + @contextmanager def block(self): """ Define the behavior of the decoder for each RNN time step. diff --git a/python/paddle/fluid/contrib/inferencer.py b/python/paddle/fluid/contrib/inferencer.py index b8d5f4ffeadca0a7b103682f175d50dc46fa258a..41a0d55b57959c39ab2ab54ce25d996f6c6e563b 100644 --- a/python/paddle/fluid/contrib/inferencer.py +++ b/python/paddle/fluid/contrib/inferencer.py @@ -14,7 +14,7 @@ from __future__ import print_function -import contextlib +from ..wrapped_decorator import contextmanager from .. import core @@ -105,7 +105,7 @@ class Inferencer(object): return results - @contextlib.contextmanager + @contextmanager def _prog_and_scope_guard(self): with framework.program_guard(main_program=self.inference_program): with executor.scope_guard(self.scope): diff --git a/python/paddle/fluid/contrib/trainer.py b/python/paddle/fluid/contrib/trainer.py index 8569e486f91786b5562e84dcdccf6d91da0612cc..798014cb1eb454e3c896cc36be9989e924780e0d 100644 --- a/python/paddle/fluid/contrib/trainer.py +++ b/python/paddle/fluid/contrib/trainer.py @@ -14,7 +14,7 @@ from __future__ import print_function -import contextlib +from .wrapped_decorator import contextmanager import os import errno import shutil @@ -453,7 +453,7 @@ class Trainer(object): io.save_inference_model(param_path, feeded_var_names, target_vars, exe) - @contextlib.contextmanager + @contextmanager def _prog_and_scope_guard(self): with framework.program_guard( main_program=self.train_program, diff --git a/python/paddle/fluid/executor.py b/python/paddle/fluid/executor.py index d3ff14a17955990bff851e95bd61fbc370ea7aa5..6c49c56408f3033afd31082fd6f5aafb88a98f1e 100644 --- a/python/paddle/fluid/executor.py +++ b/python/paddle/fluid/executor.py @@ -17,7 +17,7 @@ from __future__ import print_function import os import multiprocessing import numpy as np -import contextlib +from .wrapped_decorator import contextmanager import six from .framework import Program, default_main_program, Variable from . import core @@ -49,7 +49,7 @@ def _switch_scope(scope): return ex -@contextlib.contextmanager +@contextmanager def scope_guard(scope): """ Change the global/default scope instance by Python `with` statement. All diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index c0b0ad8a202b82183de9ec1edd43cb10db10fb5c..f94c8136cae0111fdf050dc6bcbc4d429c333f3a 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -16,7 +16,7 @@ from __future__ import print_function import collections from collections import defaultdict -import contextlib +from .wrapped_decorator import contextmanager import os import re import traceback @@ -111,7 +111,7 @@ class NameScope(object): _name_scope = NameScope() -@contextlib.contextmanager +@contextmanager def name_scope(prefix=None): """ Generate hierarchical name prefix for the operators. @@ -1775,7 +1775,7 @@ class Program(object): def set_op_role_var(self, var_name): self._op_role_var = [var_name] - @contextlib.contextmanager + @contextmanager def _optimized_guard(self, param_and_grads): """ A with guard to set :code:`Optimization` :code:`OpRole` and @@ -1805,7 +1805,7 @@ class Program(object): self._op_role_var = tmp_var self._current_role = tmp_role - @contextlib.contextmanager + @contextmanager def _lr_schedule_guard(self, is_with_opt=False): """ A with guard to set :code:`LRSched` :code:`OpRole` and @@ -2459,7 +2459,7 @@ def switch_startup_program(program): return prev_program -@contextlib.contextmanager +@contextmanager def program_guard(main_program, startup_program=None): """ Change the global main program and startup program with `with` statement. @@ -2524,7 +2524,7 @@ def _get_var(name, program=None): return program.global_block().var(name) -@contextlib.contextmanager +@contextmanager def _imperative_guard(tracer): global _imperative_tracer_ tmp_trace = _imperative_tracer_ @@ -2535,7 +2535,7 @@ def _imperative_guard(tracer): _imperative_tracer_ = tmp_trace -@contextlib.contextmanager +@contextmanager def _imperative_place_guard(place): global _imperative_current_expected_place_ tmp_place = _imperative_current_expected_place_ diff --git a/python/paddle/fluid/imperative/base.py b/python/paddle/fluid/imperative/base.py index ff3984b11f42cf9e6ff49c8654c600c065effe1d..2f8b3534aad565e4ac1c43f5629ea5d2d9eb75c4 100644 --- a/python/paddle/fluid/imperative/base.py +++ b/python/paddle/fluid/imperative/base.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import contextlib +from ..wrapped_decorator import contextmanager import numpy as np from paddle.fluid import core @@ -24,7 +24,7 @@ def enabled(): return framework._in_imperative_mode() -@contextlib.contextmanager +@contextmanager def guard(place=None): train = framework.Program() startup = framework.Program() diff --git a/python/paddle/fluid/initializer.py b/python/paddle/fluid/initializer.py index 5be21ff7f7270f6ce950c069f61418c922bcedc5..8f3f03cb1a91ee6635c33a82562a0eadf31390cc 100644 --- a/python/paddle/fluid/initializer.py +++ b/python/paddle/fluid/initializer.py @@ -16,7 +16,7 @@ from __future__ import print_function from . import framework import numpy as np -import contextlib +from .wrapped_decorator import contextmanager from .core import VarDesc from . import unique_name @@ -49,7 +49,7 @@ def force_init_on_cpu(): return _force_init_on_cpu_ -@contextlib.contextmanager +@contextmanager def init_on_cpu(): """ Force the variable to be inited on CPU. diff --git a/python/paddle/fluid/layers/control_flow.py b/python/paddle/fluid/layers/control_flow.py index a7494aaceab42332cb4362ab1df43d9e0b139f4f..1d639144e2f3421f5074b9f3d8d5b0356fdb0f44 100644 --- a/python/paddle/fluid/layers/control_flow.py +++ b/python/paddle/fluid/layers/control_flow.py @@ -13,7 +13,7 @@ # limitations under the License. from __future__ import print_function -import contextlib +from ..wrapped_decorator import contextmanager from .layer_function_generator import autodoc, templatedoc from .tensor import assign, fill_constant @@ -1532,7 +1532,7 @@ class DynamicRNN(object): outputs={'Out': [x_reordered]}) return shrink_memory(x_reordered, self.step_idx, self.lod_rank_table) - @contextlib.contextmanager + @contextmanager def block(self): """ The block for user to define operators in RNN. See the class docstring diff --git a/python/paddle/fluid/layers/io.py b/python/paddle/fluid/layers/io.py index 1762bd3e343e8af6768dd23f8fbc58cd0182d3c9..58c892315f0fcef005a996079e297ac78cee6451 100644 --- a/python/paddle/fluid/layers/io.py +++ b/python/paddle/fluid/layers/io.py @@ -13,7 +13,7 @@ # limitations under the License. from __future__ import print_function -import contextlib +from ..wrapped_decorator import contextmanager import multiprocessing import os import six @@ -1116,7 +1116,7 @@ class Preprocessor(object): def _is_completed(self): return self.sub_block and self.source_var_names and self.sink_var_names - @contextlib.contextmanager + @contextmanager def block(self): self.status = Preprocessor.IN_SUB_BLOCK self.sub_block = self.main_prog._create_block() diff --git a/python/paddle/fluid/optimizer.py b/python/paddle/fluid/optimizer.py index e0e781a322b3eb68e3f54a66252a8d8b11a9a56f..e89103f18d2a2e368586c33f1504ec13982e590e 100644 --- a/python/paddle/fluid/optimizer.py +++ b/python/paddle/fluid/optimizer.py @@ -15,7 +15,7 @@ from __future__ import print_function from collections import defaultdict -from contextlib import contextmanager +from .wrapped_decorator import contextmanager from paddle.fluid.framework import Program, Variable, name_scope, default_main_program from paddle.fluid.distribute_lookup_table import find_distributed_lookup_table diff --git a/python/paddle/fluid/profiler.py b/python/paddle/fluid/profiler.py index e05885f5f5bfc169828c1c6e723dffff098c3c2e..08f5b383102bcc008d78b5a15563582dce52708e 100644 --- a/python/paddle/fluid/profiler.py +++ b/python/paddle/fluid/profiler.py @@ -15,7 +15,7 @@ from __future__ import print_function from . import core -from contextlib import contextmanager +from .wrapped_decorator import contextmanager import os import six diff --git a/python/paddle/fluid/recordio_writer.py b/python/paddle/fluid/recordio_writer.py index 076a942cdde5623faa570bf98f889e8145b60f8b..5302dbb3560f8f66e6ba3c4247a9514877ab489a 100644 --- a/python/paddle/fluid/recordio_writer.py +++ b/python/paddle/fluid/recordio_writer.py @@ -15,14 +15,14 @@ from __future__ import print_function import os -import contextlib +from .wrapped_decorator import contextmanager from . import core __all__ = [ 'convert_reader_to_recordio_file', 'convert_reader_to_recordio_files' ] -@contextlib.contextmanager +@contextmanager def create_recordio_writer(filename, compressor=core.RecordIOWriter.Compressor.Snappy, max_num_records=1000): diff --git a/python/paddle/fluid/unique_name.py b/python/paddle/fluid/unique_name.py index b9957a699e597898bee75ce0e7283f7224293f0c..e1ec726ec492e96de29faef64a85bcdde88da8e0 100644 --- a/python/paddle/fluid/unique_name.py +++ b/python/paddle/fluid/unique_name.py @@ -15,7 +15,7 @@ from __future__ import print_function import collections -import contextlib +from .wrapped_decorator import contextmanager import six import sys @@ -68,7 +68,7 @@ def switch(new_generator=None): return old -@contextlib.contextmanager +@contextmanager def guard(new_generator=None): if isinstance(new_generator, six.string_types): new_generator = UniqueNameGenerator(new_generator) diff --git a/python/paddle/fluid/wrapped_decorator.py b/python/paddle/fluid/wrapped_decorator.py new file mode 100644 index 0000000000000000000000000000000000000000..224afcca5a51759fe0905b6003aa87ba90b899d1 --- /dev/null +++ b/python/paddle/fluid/wrapped_decorator.py @@ -0,0 +1,30 @@ +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import decorator +import contextlib + +__all__ = ['wrap_decorator', 'contextmanager'] + + +def wrap_decorator(decorator_func): + @decorator.decorator + def __impl__(func, *args, **kwargs): + wrapped_func = decorator_func(func) + return wrapped_func(*args, **kwargs) + + return __impl__ + + +contextmanager = wrap_decorator(contextlib.contextmanager)