提交 f85245b4 编写于 作者: S sneaxiy

test=develop

上级 ac4d08b3
......@@ -22,7 +22,7 @@ This API is still under active development and may change drastically.
from __future__ import print_function
import contextlib
from ...wrapped_decorator import contextmanager
import numpy as np
import six
......@@ -419,7 +419,7 @@ class TrainingDecoder(object):
self._state_cell = state_cell
self._state_cell._enter_decoder(self)
@contextlib.contextmanager
@contextmanager
def block(self):
"""
Define the behavior of the decoder for each RNN time step.
......@@ -613,7 +613,7 @@ class BeamSearchDecoder(object):
self._word_dim = word_dim
self._input_var_dict = input_var_dict
@contextlib.contextmanager
@contextmanager
def block(self):
"""
Define the behavior of the decoder for each RNN time step.
......
......@@ -14,7 +14,7 @@
from __future__ import print_function
import contextlib
from ..wrapped_decorator import contextmanager
from .. import core
......@@ -105,7 +105,7 @@ class Inferencer(object):
return results
@contextlib.contextmanager
@contextmanager
def _prog_and_scope_guard(self):
with framework.program_guard(main_program=self.inference_program):
with executor.scope_guard(self.scope):
......
......@@ -14,7 +14,7 @@
from __future__ import print_function
import contextlib
from .wrapped_decorator import contextmanager
import os
import errno
import shutil
......@@ -453,7 +453,7 @@ class Trainer(object):
io.save_inference_model(param_path, feeded_var_names, target_vars,
exe)
@contextlib.contextmanager
@contextmanager
def _prog_and_scope_guard(self):
with framework.program_guard(
main_program=self.train_program,
......
......@@ -17,7 +17,7 @@ from __future__ import print_function
import os
import multiprocessing
import numpy as np
import contextlib
from .wrapped_decorator import contextmanager
import six
from .framework import Program, default_main_program, Variable
from . import core
......@@ -49,7 +49,7 @@ def _switch_scope(scope):
return ex
@contextlib.contextmanager
@contextmanager
def scope_guard(scope):
"""
Change the global/default scope instance by Python `with` statement. All
......
......@@ -16,7 +16,7 @@ from __future__ import print_function
import collections
from collections import defaultdict
import contextlib
from .wrapped_decorator import contextmanager
import os
import re
import traceback
......@@ -111,7 +111,7 @@ class NameScope(object):
_name_scope = NameScope()
@contextlib.contextmanager
@contextmanager
def name_scope(prefix=None):
"""
Generate hierarchical name prefix for the operators.
......@@ -1775,7 +1775,7 @@ class Program(object):
def set_op_role_var(self, var_name):
self._op_role_var = [var_name]
@contextlib.contextmanager
@contextmanager
def _optimized_guard(self, param_and_grads):
"""
A with guard to set :code:`Optimization` :code:`OpRole` and
......@@ -1805,7 +1805,7 @@ class Program(object):
self._op_role_var = tmp_var
self._current_role = tmp_role
@contextlib.contextmanager
@contextmanager
def _lr_schedule_guard(self, is_with_opt=False):
"""
A with guard to set :code:`LRSched` :code:`OpRole` and
......@@ -2459,7 +2459,7 @@ def switch_startup_program(program):
return prev_program
@contextlib.contextmanager
@contextmanager
def program_guard(main_program, startup_program=None):
"""
Change the global main program and startup program with `with` statement.
......@@ -2524,7 +2524,7 @@ def _get_var(name, program=None):
return program.global_block().var(name)
@contextlib.contextmanager
@contextmanager
def _imperative_guard(tracer):
global _imperative_tracer_
tmp_trace = _imperative_tracer_
......@@ -2535,7 +2535,7 @@ def _imperative_guard(tracer):
_imperative_tracer_ = tmp_trace
@contextlib.contextmanager
@contextmanager
def _imperative_place_guard(place):
global _imperative_current_expected_place_
tmp_place = _imperative_current_expected_place_
......
......@@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
from ..wrapped_decorator import contextmanager
import numpy as np
from paddle.fluid import core
......@@ -24,7 +24,7 @@ def enabled():
return framework._in_imperative_mode()
@contextlib.contextmanager
@contextmanager
def guard(place=None):
train = framework.Program()
startup = framework.Program()
......
......@@ -16,7 +16,7 @@ from __future__ import print_function
from . import framework
import numpy as np
import contextlib
from .wrapped_decorator import contextmanager
from .core import VarDesc
from . import unique_name
......@@ -49,7 +49,7 @@ def force_init_on_cpu():
return _force_init_on_cpu_
@contextlib.contextmanager
@contextmanager
def init_on_cpu():
"""
Force the variable to be inited on CPU.
......
......@@ -13,7 +13,7 @@
# limitations under the License.
from __future__ import print_function
import contextlib
from ..wrapped_decorator import contextmanager
from .layer_function_generator import autodoc, templatedoc
from .tensor import assign, fill_constant
......@@ -1532,7 +1532,7 @@ class DynamicRNN(object):
outputs={'Out': [x_reordered]})
return shrink_memory(x_reordered, self.step_idx, self.lod_rank_table)
@contextlib.contextmanager
@contextmanager
def block(self):
"""
The block for user to define operators in RNN. See the class docstring
......
......@@ -13,7 +13,7 @@
# limitations under the License.
from __future__ import print_function
import contextlib
from ..wrapped_decorator import contextmanager
import multiprocessing
import os
import six
......@@ -1116,7 +1116,7 @@ class Preprocessor(object):
def _is_completed(self):
return self.sub_block and self.source_var_names and self.sink_var_names
@contextlib.contextmanager
@contextmanager
def block(self):
self.status = Preprocessor.IN_SUB_BLOCK
self.sub_block = self.main_prog._create_block()
......
......@@ -15,7 +15,7 @@
from __future__ import print_function
from collections import defaultdict
from contextlib import contextmanager
from .wrapped_decorator import contextmanager
from paddle.fluid.framework import Program, Variable, name_scope, default_main_program
from paddle.fluid.distribute_lookup_table import find_distributed_lookup_table
......
......@@ -15,7 +15,7 @@
from __future__ import print_function
from . import core
from contextlib import contextmanager
from .wrapped_decorator import contextmanager
import os
import six
......
......@@ -15,14 +15,14 @@
from __future__ import print_function
import os
import contextlib
from .wrapped_decorator import contextmanager
from . import core
__all__ = [
'convert_reader_to_recordio_file', 'convert_reader_to_recordio_files'
]
@contextlib.contextmanager
@contextmanager
def create_recordio_writer(filename,
compressor=core.RecordIOWriter.Compressor.Snappy,
max_num_records=1000):
......
......@@ -15,7 +15,7 @@
from __future__ import print_function
import collections
import contextlib
from .wrapped_decorator import contextmanager
import six
import sys
......@@ -68,7 +68,7 @@ def switch(new_generator=None):
return old
@contextlib.contextmanager
@contextmanager
def guard(new_generator=None):
if isinstance(new_generator, six.string_types):
new_generator = UniqueNameGenerator(new_generator)
......
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import decorator
import contextlib
__all__ = ['wrap_decorator', 'contextmanager']
def wrap_decorator(decorator_func):
@decorator.decorator
def __impl__(func, *args, **kwargs):
wrapped_func = decorator_func(func)
return wrapped_func(*args, **kwargs)
return __impl__
contextmanager = wrap_decorator(contextlib.contextmanager)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册