From 797b2dfda8decc54b71fe856cf901ce1308a08c1 Mon Sep 17 00:00:00 2001 From: WeiXin Date: Tue, 27 Apr 2021 16:30:32 +0800 Subject: [PATCH] clear 'BasicEngine' when an exception occurs in the backward. (#32546) * clear 'BasicEngine' when an exception occurs in the backward. * deal with conflict. * deal with conflict. --- paddle/fluid/imperative/basic_engine.cc | 20 +++++++---- .../fluid/tests/unittests/test_pylayer_op.py | 33 ++++++++----------- 2 files changed, 28 insertions(+), 25 deletions(-) diff --git a/paddle/fluid/imperative/basic_engine.cc b/paddle/fluid/imperative/basic_engine.cc index d5350744e4c..023a148763d 100644 --- a/paddle/fluid/imperative/basic_engine.cc +++ b/paddle/fluid/imperative/basic_engine.cc @@ -470,12 +470,20 @@ void BasicEngine::Execute() { { VLOG(3) << "Start to execute grad op " << cur_op.Type(); - if (tmp_ins_ptr == nullptr) { - OpBase::Run(cur_op.InnerOp(), bwd_ins, tmp_outs, cur_op.Attrs(), - cur_op.place()); - } else { - OpBase::Run(cur_op.InnerOp(), *tmp_ins_ptr, tmp_outs, cur_op.Attrs(), - cur_op.place()); + try { + if (tmp_ins_ptr == nullptr) { + OpBase::Run(cur_op.InnerOp(), bwd_ins, tmp_outs, cur_op.Attrs(), + cur_op.place()); + } else { + OpBase::Run(cur_op.InnerOp(), *tmp_ins_ptr, tmp_outs, + cur_op.Attrs(), cur_op.place()); + } + } catch (platform::EnforceNotMet& exception) { + Clear(); + throw std::move(exception); + } catch (std::exception& ex) { + Clear(); + PADDLE_THROW(platform::errors::External("%s", ex.what())); } } diff --git a/python/paddle/fluid/tests/unittests/test_pylayer_op.py b/python/paddle/fluid/tests/unittests/test_pylayer_op.py index d329bf570a5..e3374c15a0a 100644 --- a/python/paddle/fluid/tests/unittests/test_pylayer_op.py +++ b/python/paddle/fluid/tests/unittests/test_pylayer_op.py @@ -234,8 +234,7 @@ class TestPyLayer(unittest.TestCase): z = Layer_bk_none1.apply(input2) with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z.sum().backward() + z.sum().backward() class Layer_bk_none2(PyLayer): @staticmethod @@ -249,9 +248,9 @@ class TestPyLayer(unittest.TestCase): input1 = paddle.randn([2, 3]).astype("float64") input1.stop_gradient = False z = Layer_bk_none2.apply(input1, input1) + with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z.mean().backward() + z.mean().backward() class Layer_bk_one1(PyLayer): @staticmethod @@ -265,9 +264,9 @@ class TestPyLayer(unittest.TestCase): input1 = paddle.randn([2, 3]).astype("float64") input1.stop_gradient = False z = Layer_bk_one1.apply(input1) + with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z.mean().backward() + z.mean().backward() class Layer_bk_one2(PyLayer): @staticmethod @@ -280,11 +279,11 @@ class TestPyLayer(unittest.TestCase): input1 = paddle.randn([2, 3]).astype("float64") input1.stop_gradient = False + y = Layer_bk_one2.apply(input1, input1) z = y[0] + y[1] with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z.mean().backward() + z.mean().backward() class Layer_no_bk(PyLayer): @staticmethod @@ -295,10 +294,9 @@ class TestPyLayer(unittest.TestCase): input1.stop_gradient = False z = Layer_no_bk.apply(input1) - with self.assertRaises(NotImplementedError): - with paddle.fluid.dygraph.guard(): - z = z[0] + z[1] - z.mean().backward() + with self.assertRaises(OSError): + z = z[0] + z[1] + z.mean().backward() class Layer_bk_match(PyLayer): @staticmethod @@ -313,9 +311,8 @@ class TestPyLayer(unittest.TestCase): input1.stop_gradient = False z = Layer_bk_match.apply(input1) with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z = z[0] + z[1] - z.mean().backward() + z = z[0] + z[1] + z.mean().backward() def test_pylayer_bk_return_none(self): class Layer_bk_none1(PyLayer): @@ -334,8 +331,7 @@ class TestPyLayer(unittest.TestCase): z = Layer_bk_none1.apply(input1, input2) with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z.mean().backward() + z.mean().backward() class Layer_bk_none2(PyLayer): @staticmethod @@ -353,8 +349,7 @@ class TestPyLayer(unittest.TestCase): z = Layer_bk_none2.apply(input1, input2) z = z[0] + z[1] with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z.mean().backward() + z.mean().backward() def test_pylayer_inplace(self): class cus_tanh(PyLayer): -- GitLab