From 76103c8846eedc9e4fadebe9fc01e8a09cb88188 Mon Sep 17 00:00:00 2001 From: Zhanlue Yang Date: Fri, 28 Jan 2022 17:24:22 +0800 Subject: [PATCH] Added Eager Dygraph support for user_defined_grads (#39309) --- .../paddle/fluid/tests/unittests/op_test.py | 21 ++++++++++++++----- .../fluid/tests/unittests/test_diag_v2.py | 6 ++++++ .../fluid/tests/unittests/test_diagonal_op.py | 12 ++++++++--- .../fluid/tests/unittests/test_digamma_op.py | 12 +++++++++++ .../fluid/tests/unittests/test_trunc_op.py | 5 +++++ 5 files changed, 48 insertions(+), 8 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/op_test.py b/python/paddle/fluid/tests/unittests/op_test.py index e05acdd6b42..754d7bd54b9 100644 --- a/python/paddle/fluid/tests/unittests/op_test.py +++ b/python/paddle/fluid/tests/unittests/op_test.py @@ -30,6 +30,7 @@ from copy import copy import paddle import paddle.fluid as fluid import paddle.fluid.core as core +from paddle.fluid.framework import _in_eager_mode from paddle.fluid.framework import _test_eager_guard from paddle.fluid.backward import append_backward from paddle.fluid.op import Operator @@ -1831,11 +1832,21 @@ class OpTest(unittest.TestCase): for no_grad_val in no_grad_set: del (inputs[no_grad_val]) - grad_inputs = paddle.grad( - outputs=fluid.layers.utils.flatten(outputs), - inputs=fluid.layers.utils.flatten(inputs), - grad_outputs=grad_outputs) - return [grad.numpy() for grad in grad_inputs] + if _in_eager_mode(): + core.eager.run_backward( + fluid.layers.utils.flatten(outputs), grad_outputs, + False) + grad_inputs = [] + for inputs_list in inputs.values(): + for inp in inputs_list: + grad_inputs.append(inp.grad.numpy()) + return grad_inputs + else: + grad_inputs = paddle.grad( + outputs=fluid.layers.utils.flatten(outputs), + inputs=fluid.layers.utils.flatten(inputs), + grad_outputs=grad_outputs) + return [grad.numpy() for grad in grad_inputs] @staticmethod def _numpy_to_lod_tensor(np_value, lod, place): diff --git a/python/paddle/fluid/tests/unittests/test_diag_v2.py b/python/paddle/fluid/tests/unittests/test_diag_v2.py index 1478cd888c4..0371fa05428 100644 --- a/python/paddle/fluid/tests/unittests/test_diag_v2.py +++ b/python/paddle/fluid/tests/unittests/test_diag_v2.py @@ -21,6 +21,7 @@ import paddle import paddle.fluid as fluid from paddle.fluid import core from paddle.fluid import Program, program_guard +from paddle.fluid.framework import _test_eager_guard class TestDiagV2Op(OpTest): @@ -239,6 +240,9 @@ class TestDiagV2API(unittest.TestCase): def test_cpu(self): paddle.disable_static(place=paddle.fluid.CPUPlace()) self.run_imperative() + with _test_eager_guard(): + self.run_imperative() + paddle.enable_static() with fluid.program_guard(fluid.Program()): @@ -250,6 +254,8 @@ class TestDiagV2API(unittest.TestCase): paddle.disable_static(place=paddle.fluid.CUDAPlace(0)) self.run_imperative() + with _test_eager_guard(): + self.run_imperative() paddle.enable_static() with fluid.program_guard(fluid.Program()): diff --git a/python/paddle/fluid/tests/unittests/test_diagonal_op.py b/python/paddle/fluid/tests/unittests/test_diagonal_op.py index 5617716ecb6..4dab7c0df40 100644 --- a/python/paddle/fluid/tests/unittests/test_diagonal_op.py +++ b/python/paddle/fluid/tests/unittests/test_diagonal_op.py @@ -22,6 +22,7 @@ import paddle.nn.functional as F import paddle.fluid as fluid import paddle.fluid.core as core import paddle.tensor as tensor +from paddle.fluid.framework import _test_eager_guard paddle.enable_static() @@ -33,10 +34,10 @@ class TestDiagonalOp(OpTest): self.outputs = {'Out': self.target} def test_check_output(self): - self.check_output() + self.check_output(check_eager=True) def test_check_grad(self): - self.check_grad(['Input'], 'Out') + self.check_grad(['Input'], 'Out', check_eager=True) def init_config(self): self.case = np.random.randn(10, 5, 2).astype('float64') @@ -79,7 +80,8 @@ class TestDiagonalOpCase2(TestDiagonalOp): ['Input'], 'Out', user_defined_grads=[self.grad_x], - user_defined_grad_outputs=[self.grad_out]) + user_defined_grad_outputs=[self.grad_out], + check_eager=True) class TestDiagonalOpCase3(TestDiagonalOp): @@ -122,6 +124,10 @@ class TestDiagonalAPI(unittest.TestCase): self.assertEqual(np.allclose(out.numpy(), out_ref, rtol=1e-08), True) paddle.enable_static() + def test_api_eager_dygraph(self): + with _test_eager_guard(): + self.test_api_dygraph() + if __name__ == '__main__': unittest.main() diff --git a/python/paddle/fluid/tests/unittests/test_digamma_op.py b/python/paddle/fluid/tests/unittests/test_digamma_op.py index 503094779a3..3cb31b888f4 100644 --- a/python/paddle/fluid/tests/unittests/test_digamma_op.py +++ b/python/paddle/fluid/tests/unittests/test_digamma_op.py @@ -20,6 +20,7 @@ import paddle import paddle.fluid as fluid import paddle.static as static from op_test import OpTest +from paddle.fluid.framework import _test_eager_guard class TestDigammaOp(OpTest): @@ -94,6 +95,10 @@ class TestDigammaAPI(unittest.TestCase): res = paddle.digamma(input_t).numpy() self.assertEqual(np.allclose(res, sc_res, rtol=1e-05), True) + def test_in_eager_dynamic_mode(self): + with _test_eager_guard(): + self.test_in_dynamic_mode() + def test_name_argument(self): with static.program_guard(static.Program()): x = static.data(name="x", shape=self._shape, dtype=self.dtypes[0]) @@ -114,6 +119,13 @@ class TestDigammaAPI(unittest.TestCase): input_t = paddle.to_tensor(input) res = paddle.digamma(input_t) + with self.assertRaises(RuntimeError): + with fluid.dygraph.guard(): + with _test_eager_guard(): + input = np.random.random(self._shape).astype("int32") + input_t = paddle.to_tensor(input) + res = paddle.digamma(input_t) + if __name__ == "__main__": unittest.main() diff --git a/python/paddle/fluid/tests/unittests/test_trunc_op.py b/python/paddle/fluid/tests/unittests/test_trunc_op.py index b4482b402ea..08a35db3ac4 100644 --- a/python/paddle/fluid/tests/unittests/test_trunc_op.py +++ b/python/paddle/fluid/tests/unittests/test_trunc_op.py @@ -21,6 +21,7 @@ import paddle import paddle.fluid.core as core import paddle.fluid as fluid from paddle.fluid import Program, program_guard +from paddle.fluid.framework import _test_eager_guard paddle.enable_static() @@ -78,6 +79,10 @@ class TestTruncAPI(unittest.TestCase): self.assertEqual(np.allclose(out.numpy(), out_ref, rtol=1e-08), True) paddle.enable_static() + def test_api_eager_dygraph(self): + with _test_eager_guard(): + self.test_api_dygraph() + def test_errors(self): with paddle.static.program_guard(paddle.static.Program()): x = paddle.fluid.data('X', [20, 20], 'bool') -- GitLab