diff --git a/python/paddle/fluid/layers/io.py b/python/paddle/fluid/layers/io.py index 34cdac52d33000cfb87a97a1486abe7a4a583bbd..0665c09bfb52c932219be68ca801cfa951d672d3 100644 --- a/python/paddle/fluid/layers/io.py +++ b/python/paddle/fluid/layers/io.py @@ -22,8 +22,7 @@ from ..executor import global_scope from layer_function_generator import generate_layer_fn, templatedoc __all__ = [ - 'data', 'BlockGuardServ', 'ListenAndServ', 'Send', 'Recv', - 'open_recordio_file', 'open_files', 'read_file', 'shuffle', 'batch', + 'data', 'open_recordio_file', 'open_files', 'read_file', 'shuffle', 'batch', 'double_buffer', 'random_data_generator', 'py_reader', 'Preprocessor', 'load' ] @@ -802,7 +801,7 @@ class Preprocessor(object): self.sink_var_names = None self.status = Preprocessor.BEFORE_SUB_BLOCK - def is_completed(self): + def _is_completed(self): return self.sub_block and self.source_var_names and self.sink_var_names @contextlib.contextmanager @@ -812,7 +811,7 @@ class Preprocessor(object): yield self.main_prog.rollback() self.status = Preprocessor.AFTER_SUB_BLOCK - if not self.is_completed(): + if not self._is_completed(): raise RuntimeError( "The definition of preprocessor is incompleted! " "Please make sure that you have set input and output " diff --git a/python/paddle/fluid/tests/unittests/test_dist_train.py b/python/paddle/fluid/tests/unittests/test_dist_train.py index 562e66b0625083fe840d64967249f0215cfda1f9..aab8969a96ff69d1a306506337a0e009f14758b9 100644 --- a/python/paddle/fluid/tests/unittests/test_dist_train.py +++ b/python/paddle/fluid/tests/unittests/test_dist_train.py @@ -22,6 +22,9 @@ import numpy import paddle.fluid as fluid import paddle.fluid.layers as layers +from paddle.fluid.layers.io import ListenAndServ +from paddle.fluid.layers.io import Recv +from paddle.fluid.layers.io import Send class TestSendOp(unittest.TestCase): @@ -65,8 +68,7 @@ class TestSendOp(unittest.TestCase): main = fluid.Program() with fluid.program_guard(main): - serv = layers.ListenAndServ( - "127.0.0.1:0", ["X"], optimizer_mode=False) + serv = ListenAndServ("127.0.0.1:0", ["X"], optimizer_mode=False) with serv.do(): out_var = main.global_block().create_var( name="scale_0.tmp_0", @@ -99,8 +101,8 @@ class TestSendOp(unittest.TestCase): persistable=False, shape=[32, 32]) fluid.initializer.Constant(value=2.3)(get_var, main.global_block()) - layers.Send("127.0.0.1:%d" % port, [x]) - o = layers.Recv("127.0.0.1:%d" % port, [get_var]) + Send("127.0.0.1:%d" % port, [x]) + o = Recv("127.0.0.1:%d" % port, [get_var]) exe = fluid.Executor(place) self.dist_out = exe.run(main, fetch_list=o) # o is a list