From 3e8974895bdffd0284ddba96f48e7142f43d723a Mon Sep 17 00:00:00 2001 From: 0x45f <23097963+0x45f@users.noreply.github.com> Date: Thu, 16 Sep 2021 15:26:59 +0800 Subject: [PATCH] [Dy2stat]fix no_grad context error in dy2stat (#35725) * fix no_grad context error in dy2stat * remove useless comments * fix error by drop_kids in python * add test and fix review --- paddle/fluid/pybind/pybind.cc | 11 +++++++++++ .../dygraph/dygraph_to_static/partial_program.py | 7 ++++++- .../dygraph_to_static/test_partial_program.py | 15 +++++++++++++++ 3 files changed, 32 insertions(+), 1 deletion(-) diff --git a/paddle/fluid/pybind/pybind.cc b/paddle/fluid/pybind/pybind.cc index f56968d5c28..599e694d660 100644 --- a/paddle/fluid/pybind/pybind.cc +++ b/paddle/fluid/pybind/pybind.cc @@ -1241,6 +1241,17 @@ All parameter, weight, gradient are variables in Paddle. return self.GetMutable(); }, py::return_value_policy::reference) + .def("get_scope", + [](Variable &self) -> Scope * { + auto scope_vec = + self.GetMutable>(); + PADDLE_ENFORCE_GT( + scope_vec->size(), 0, + platform::errors::InvalidArgument( + "The size of scope_vec should be greater than 0")); + return scope_vec->front(); + }, + py::return_value_policy::reference) .def("set_scope", [](Variable &self, Scope &scope) { auto scope_vec = self.GetMutable>(); scope_vec->emplace_back(&scope); diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/partial_program.py b/python/paddle/fluid/dygraph/dygraph_to_static/partial_program.py index 663f95af89c..9ccd2321b63 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/partial_program.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/partial_program.py @@ -290,10 +290,15 @@ class PartialProgramLayer: self._valid_vars(self._params), self._valid_vars(out_vars), self._tmp_scope_vec, self._double_grads, *attrs) - + self.drop_scope_if_no_grad() restored_nest_out = self._restore_out(out_vars) return self._remove_no_value(restored_nest_out) + def drop_scope_if_no_grad(self): + tracer = framework._dygraph_tracer() + if self.training and not tracer._has_grad: + self._tmp_scope_vec.value().get_scope().drop_kids() + @property def program(self): if self.training: diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_partial_program.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_partial_program.py index 91067f36099..220347909f9 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_partial_program.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_partial_program.py @@ -152,6 +152,21 @@ class TestWithTrainAndEval(unittest.TestCase): partial_layer._train_program) +class TestWithNoGrad(unittest.TestCase): + def test_with_no_grad(self): + with fluid.dygraph.guard(): + linear_net = Linear() + x_data = np.random.random((5, 10)).astype('float32') + x = fluid.dygraph.to_variable(x_data) + + with paddle.no_grad(): + linear_net.train() + linear_net(x) + _, partial_layer = linear_net.forward.program_cache.last()[-1] + self.assertEqual(partial_layer.program, + partial_layer._train_program) + + class GPT2LMHeadModel(fluid.dygraph.Layer): def __init__(self): super(GPT2LMHeadModel, self).__init__() -- GitLab