From 3b8bcd5a0cc44b82ef676c2ed1bf7dca391ea3c0 Mon Sep 17 00:00:00 2001 From: Weilong Wu Date: Tue, 22 Mar 2022 16:57:23 +0800 Subject: [PATCH] Update unit tests by using _test_eager_guard (#40760) --- .../test_imperative_partitial_backward.py | 8 ++++++- .../unittests/test_tensor_register_hook.py | 21 ++++++++++++++----- 2 files changed, 23 insertions(+), 6 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/test_imperative_partitial_backward.py b/python/paddle/fluid/tests/unittests/test_imperative_partitial_backward.py index 5e3d3c81188..cd31b13083d 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_partitial_backward.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_partitial_backward.py @@ -17,10 +17,11 @@ from __future__ import print_function import unittest import paddle.fluid as fluid import numpy as np +from paddle.fluid.framework import _test_eager_guard class TestImperativePartitialBackward(unittest.TestCase): - def test_partitial_backward(self): + def func_partitial_backward(self): with fluid.dygraph.guard(): x = np.random.randn(2, 4, 5).astype("float32") x = fluid.dygraph.to_variable(x) @@ -49,6 +50,11 @@ class TestImperativePartitialBackward(unittest.TestCase): linear1.clear_gradients() linear2.clear_gradients() + def test_partitial_backward(self): + with _test_eager_guard(): + self.func_partitial_backward() + self.func_partitial_backward() + if __name__ == '__main__': unittest.main() diff --git a/python/paddle/fluid/tests/unittests/test_tensor_register_hook.py b/python/paddle/fluid/tests/unittests/test_tensor_register_hook.py index aac8b6a99b6..086527ab554 100644 --- a/python/paddle/fluid/tests/unittests/test_tensor_register_hook.py +++ b/python/paddle/fluid/tests/unittests/test_tensor_register_hook.py @@ -20,6 +20,8 @@ import numpy as np import paddle import paddle.nn as nn from paddle.fluid.framework import _test_eager_guard, _in_eager_mode +import paddle.fluid as fluid +import paddle.fluid.core as core class SimpleNet(nn.Layer): @@ -445,8 +447,7 @@ class TestTensorRegisterHook(unittest.TestCase): self.func_multiple_hooks_for_interior_var() self.func_multiple_hooks_for_interior_var() - # TODO(wuweilong): enable this case when DoubleGrad in eager mode is ready - def test_hook_in_double_grad(self): + def func_hook_in_double_grad(self): def double_print_hook(grad): grad = grad * 2 print(grad) @@ -461,10 +462,11 @@ class TestTensorRegisterHook(unittest.TestCase): x.register_hook(double_print_hook) y = x * x - + fluid.set_flags({'FLAGS_retain_grad_for_all_tensor': False}) # Since y = x * x, dx = 2 * x dx = paddle.grad( outputs=[y], inputs=[x], create_graph=True, retain_graph=True)[0] + fluid.set_flags({'FLAGS_retain_grad_for_all_tensor': True}) z = y + dx self.assertTrue(x.grad is None) @@ -475,8 +477,17 @@ class TestTensorRegisterHook(unittest.TestCase): # x.gradient() = 2 * x + 2 = 4.0 # after changed by hook: 8.0 - z.backward() - self.assertTrue(np.array_equal(x.grad.numpy(), np.array([8.]))) + # TODO(wuweilong): enable this case when DoubleGrad in eager mode is ready + if core._in_eager_mode(): + pass + else: + z.backward() + self.assertTrue(np.array_equal(x.grad.numpy(), np.array([8.]))) + + def test_hook_in_double_grad(self): + with _test_eager_guard(): + self.func_hook_in_double_grad() + self.func_hook_in_double_grad() def func_remove_one_hook_multiple_times(self): for device in self.devices: -- GitLab