diff --git a/python/paddle/fluid/dygraph/layers.py b/python/paddle/fluid/dygraph/layers.py index 6b78e2abb32ab3f134fc7bae6fbd203a2730cf66..c772e50899b8705e38585ad5c1493a878c7ef2ba 100644 --- a/python/paddle/fluid/dygraph/layers.py +++ b/python/paddle/fluid/dygraph/layers.py @@ -49,10 +49,10 @@ class Layer(core.Layer): self._helper = LayerObjectHelper(self._full_name) def train(self): - framework._dygraph_tracer()._train_mode() + framework._dygraph_tracer().train_mode() def eval(self): - framework._dygraph_tracer()._eval_mode() + framework._dygraph_tracer().eval_mode() def full_name(self): """Full name for this layers. @@ -261,10 +261,10 @@ class PyLayer(core.PyLayer): super(PyLayer, self).__init__() def train(self): - framework._dygraph_tracer()._train_mode() + framework._dygraph_tracer().train_mode() def eval(self): - framework._dygraph_tracer()._eval_mode() + framework._dygraph_tracer().eval_mode() @classmethod def _do_forward(cls, inputs): diff --git a/python/paddle/fluid/dygraph/tracer.py b/python/paddle/fluid/dygraph/tracer.py index ee37ffab2cb7521b83108a40febcfe88cab28633..9d2cbb4f03fdc807e1609f46eac44a0bb92af785 100644 --- a/python/paddle/fluid/dygraph/tracer.py +++ b/python/paddle/fluid/dygraph/tracer.py @@ -118,8 +118,8 @@ class Tracer(core.Tracer): if k in backward_refs: op.backward_refs[k] = outputs[k] - def _train_mode(self): + def train_mode(self): self._train_mode = True - def _eval_mode(self): + def eval_mode(self): self._train_mode = False diff --git a/python/paddle/fluid/tests/unittests/test_imperative_mnist.py b/python/paddle/fluid/tests/unittests/test_imperative_mnist.py index 76b8d3aa3943e44a17ab822618d8d1cb85aaa551..908237b88736da112b7001708bbca19b534baef1 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_mnist.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_mnist.py @@ -117,6 +117,7 @@ class TestImperativeMnist(unittest.TestCase): train_reader = paddle.batch( paddle.dataset.mnist.train(), batch_size=128, drop_last=True) + mnist.train() dy_param_init_value = {} for epoch in range(epoch_num): for batch_id, data in enumerate(train_reader()):