diff --git a/paddle/fluid/imperative/basic_engine.cc b/paddle/fluid/imperative/basic_engine.cc index d5350744e4c55384e14e4ff5f06bc90abed87ce2..023a148763d89f6f27f4fcaa098cd42399c4e148 100644 --- a/paddle/fluid/imperative/basic_engine.cc +++ b/paddle/fluid/imperative/basic_engine.cc @@ -470,12 +470,20 @@ void BasicEngine::Execute() { { VLOG(3) << "Start to execute grad op " << cur_op.Type(); - if (tmp_ins_ptr == nullptr) { - OpBase::Run(cur_op.InnerOp(), bwd_ins, tmp_outs, cur_op.Attrs(), - cur_op.place()); - } else { - OpBase::Run(cur_op.InnerOp(), *tmp_ins_ptr, tmp_outs, cur_op.Attrs(), - cur_op.place()); + try { + if (tmp_ins_ptr == nullptr) { + OpBase::Run(cur_op.InnerOp(), bwd_ins, tmp_outs, cur_op.Attrs(), + cur_op.place()); + } else { + OpBase::Run(cur_op.InnerOp(), *tmp_ins_ptr, tmp_outs, + cur_op.Attrs(), cur_op.place()); + } + } catch (platform::EnforceNotMet& exception) { + Clear(); + throw std::move(exception); + } catch (std::exception& ex) { + Clear(); + PADDLE_THROW(platform::errors::External("%s", ex.what())); } } diff --git a/python/paddle/fluid/tests/unittests/test_pylayer_op.py b/python/paddle/fluid/tests/unittests/test_pylayer_op.py index d329bf570a5845c7c261e53e9bd0c064a908ae09..e3374c15a0abe200d8154c6a5c2c98d89bea0413 100644 --- a/python/paddle/fluid/tests/unittests/test_pylayer_op.py +++ b/python/paddle/fluid/tests/unittests/test_pylayer_op.py @@ -234,8 +234,7 @@ class TestPyLayer(unittest.TestCase): z = Layer_bk_none1.apply(input2) with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z.sum().backward() + z.sum().backward() class Layer_bk_none2(PyLayer): @staticmethod @@ -249,9 +248,9 @@ class TestPyLayer(unittest.TestCase): input1 = paddle.randn([2, 3]).astype("float64") input1.stop_gradient = False z = Layer_bk_none2.apply(input1, input1) + with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z.mean().backward() + z.mean().backward() class Layer_bk_one1(PyLayer): @staticmethod @@ -265,9 +264,9 @@ class TestPyLayer(unittest.TestCase): input1 = paddle.randn([2, 3]).astype("float64") input1.stop_gradient = False z = Layer_bk_one1.apply(input1) + with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z.mean().backward() + z.mean().backward() class Layer_bk_one2(PyLayer): @staticmethod @@ -280,11 +279,11 @@ class TestPyLayer(unittest.TestCase): input1 = paddle.randn([2, 3]).astype("float64") input1.stop_gradient = False + y = Layer_bk_one2.apply(input1, input1) z = y[0] + y[1] with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z.mean().backward() + z.mean().backward() class Layer_no_bk(PyLayer): @staticmethod @@ -295,10 +294,9 @@ class TestPyLayer(unittest.TestCase): input1.stop_gradient = False z = Layer_no_bk.apply(input1) - with self.assertRaises(NotImplementedError): - with paddle.fluid.dygraph.guard(): - z = z[0] + z[1] - z.mean().backward() + with self.assertRaises(OSError): + z = z[0] + z[1] + z.mean().backward() class Layer_bk_match(PyLayer): @staticmethod @@ -313,9 +311,8 @@ class TestPyLayer(unittest.TestCase): input1.stop_gradient = False z = Layer_bk_match.apply(input1) with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z = z[0] + z[1] - z.mean().backward() + z = z[0] + z[1] + z.mean().backward() def test_pylayer_bk_return_none(self): class Layer_bk_none1(PyLayer): @@ -334,8 +331,7 @@ class TestPyLayer(unittest.TestCase): z = Layer_bk_none1.apply(input1, input2) with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z.mean().backward() + z.mean().backward() class Layer_bk_none2(PyLayer): @staticmethod @@ -353,8 +349,7 @@ class TestPyLayer(unittest.TestCase): z = Layer_bk_none2.apply(input1, input2) z = z[0] + z[1] with self.assertRaises(ValueError): - with paddle.fluid.dygraph.guard(): - z.mean().backward() + z.mean().backward() def test_pylayer_inplace(self): class cus_tanh(PyLayer):