From 5c9feef146d1b90c6868cd5143a9aae0d4615352 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=A7=9C=E6=B0=B8=E4=B9=85?= <34344716+yjjiang11@users.noreply.github.com> Date: Mon, 19 Dec 2022 17:02:39 +0800 Subject: [PATCH] rm unittest eager guard part1 (#48797) * rm unittest eager guard part1 * review * reset spwan runner base * reset op_test * rm test_type_core --- .../unittests/check_nan_inf_base_dygraph.py | 3 - .../tests/unittests/dygraph_fleet_api.py | 3 - .../fluid/tests/unittests/gradient_checker.py | 11 +-- ...el_dygraph_gradient_check_in_eager_mode.py | 92 +++++++++---------- .../fluid/tests/unittests/test_Tensor_type.py | 34 +------ 5 files changed, 49 insertions(+), 94 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/check_nan_inf_base_dygraph.py b/python/paddle/fluid/tests/unittests/check_nan_inf_base_dygraph.py index 35d1c843b8..e46b861c71 100644 --- a/python/paddle/fluid/tests/unittests/check_nan_inf_base_dygraph.py +++ b/python/paddle/fluid/tests/unittests/check_nan_inf_base_dygraph.py @@ -21,7 +21,6 @@ os.environ[str("GLOG_vmodule")] = str("nan_inf_utils_detail=10") import paddle import paddle.nn as nn -from paddle.fluid.framework import _test_eager_guard np.random.seed(0) @@ -114,6 +113,4 @@ def run_check(): if __name__ == '__main__': - with _test_eager_guard(): - run_check() run_check() diff --git a/python/paddle/fluid/tests/unittests/dygraph_fleet_api.py b/python/paddle/fluid/tests/unittests/dygraph_fleet_api.py index 4900b063d2..bbc3f4ed98 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_fleet_api.py +++ b/python/paddle/fluid/tests/unittests/dygraph_fleet_api.py @@ -18,7 +18,6 @@ import unittest import numpy as np import paddle -from paddle.fluid.framework import _test_eager_guard class TestDygraphFleetAPI(unittest.TestCase): @@ -50,6 +49,4 @@ class TestDygraphFleetAPI(unittest.TestCase): if __name__ == "__main__": - with _test_eager_guard(): - pass unittest.main() diff --git a/python/paddle/fluid/tests/unittests/gradient_checker.py b/python/paddle/fluid/tests/unittests/gradient_checker.py index dbfb40bd5c..146771d1ed 100644 --- a/python/paddle/fluid/tests/unittests/gradient_checker.py +++ b/python/paddle/fluid/tests/unittests/gradient_checker.py @@ -22,7 +22,6 @@ import paddle import paddle.fluid as fluid import paddle.fluid.core as core from paddle.fluid.backward import _append_grad_suffix_, _as_list -from paddle.fluid.framework import _test_eager_guard def _product(t): @@ -769,10 +768,7 @@ def double_grad_check_for_dygraph( x_init = _as_list(x_init) paddle.disable_static() - with _test_eager_guard(): - eager_double_grad = get_eager_double_grad( - func, x_init, y_grads_init, place - ) + eager_double_grad = get_eager_double_grad(func, x_init, y_grads_init, place) paddle.enable_static() static_double_grad = get_static_double_grad( @@ -935,10 +931,7 @@ def triple_grad_check_for_dygraph( x_init = _as_list(x_init) paddle.disable_static() - with _test_eager_guard(): - eager_triple_grad = get_eager_triple_grad( - func, x_init, y_grads_init, place - ) + eager_triple_grad = get_eager_triple_grad(func, x_init, y_grads_init, place) paddle.enable_static() static_triple_grad = get_static_triple_grad( diff --git a/python/paddle/fluid/tests/unittests/parallel_dygraph_gradient_check_in_eager_mode.py b/python/paddle/fluid/tests/unittests/parallel_dygraph_gradient_check_in_eager_mode.py index 5680f7a40e..533b9c04cd 100644 --- a/python/paddle/fluid/tests/unittests/parallel_dygraph_gradient_check_in_eager_mode.py +++ b/python/paddle/fluid/tests/unittests/parallel_dygraph_gradient_check_in_eager_mode.py @@ -19,7 +19,6 @@ import numpy as np import paddle import paddle.distributed as dist import paddle.fluid as fluid -from paddle.fluid.framework import _test_eager_guard from paddle.nn import Linear paddle.seed(1024) @@ -69,58 +68,57 @@ class SimpleNet(fluid.Layer): class TestDistTraning(unittest.TestCase): def test_multiple_gpus(self): self.trainer_id = dist.get_rank() - with _test_eager_guard(): - self.pg = dist.init_parallel_env() + self.pg = dist.init_parallel_env() - model_a = SimpleNet(self.trainer_id) - model_b = SimpleNet(self.trainer_id) + model_a = SimpleNet(self.trainer_id) + model_b = SimpleNet(self.trainer_id) - state_dict = model_a.state_dict() - model_b.set_state_dict(state_dict) + state_dict = model_a.state_dict() + model_b.set_state_dict(state_dict) - model_a = paddle.DataParallel( - model_a, find_unused_parameters=True, group=self.pg + model_a = paddle.DataParallel( + model_a, find_unused_parameters=True, group=self.pg + ) + model_b = paddle.DataParallel( + model_b, find_unused_parameters=True, group=self.pg + ) + + ones_input = paddle.ones(shape=(batch, in_dim)) + ones_input.stop_gradient = True + + w1_grad_sum = np.zeros((in_dim, out_dim), dtype='float32') + w2_grad_sum = np.zeros((in_dim, out_dim), dtype='float32') + + for step_id in range(5): + random_input = paddle.rand(shape=(batch, in_dim)) + random_input.stop_gradient = True + + if step_id % 2 == 0: + out_a = model_a(random_input) + out_b = model_b(random_input) + else: + out_a = model_a(ones_input) + out_b = model_b(ones_input) + + out_a.sum().backward() + out_b.sum().backward() + + self.check_gradient(model_a.parameters()) + self.check_gradient(model_b.parameters()) + + # test acc gradient + w1_grad_sum = self.check_acc( + model_a._layers.w1.grad, + w1_grad_sum, + model_b._layers.w1.grad, ) - model_b = paddle.DataParallel( - model_b, find_unused_parameters=True, group=self.pg + w2_grad_sum = self.check_acc( + model_a._layers.w2.grad, + w2_grad_sum, + model_b._layers.w2.grad, ) - ones_input = paddle.ones(shape=(batch, in_dim)) - ones_input.stop_gradient = True - - w1_grad_sum = np.zeros((in_dim, out_dim), dtype='float32') - w2_grad_sum = np.zeros((in_dim, out_dim), dtype='float32') - - for step_id in range(5): - random_input = paddle.rand(shape=(batch, in_dim)) - random_input.stop_gradient = True - - if step_id % 2 == 0: - out_a = model_a(random_input) - out_b = model_b(random_input) - else: - out_a = model_a(ones_input) - out_b = model_b(ones_input) - - out_a.sum().backward() - out_b.sum().backward() - - self.check_gradient(model_a.parameters()) - self.check_gradient(model_b.parameters()) - - # test acc gradient - w1_grad_sum = self.check_acc( - model_a._layers.w1.grad, - w1_grad_sum, - model_b._layers.w1.grad, - ) - w2_grad_sum = self.check_acc( - model_a._layers.w2.grad, - w2_grad_sum, - model_b._layers.w2.grad, - ) - - model_a.clear_gradients() + model_a.clear_gradients() def check_acc(self, grad, grad_sum, acc_grad): if grad is not None: diff --git a/python/paddle/fluid/tests/unittests/test_Tensor_type.py b/python/paddle/fluid/tests/unittests/test_Tensor_type.py index 35efdf7ed4..9a059cb9ae 100644 --- a/python/paddle/fluid/tests/unittests/test_Tensor_type.py +++ b/python/paddle/fluid/tests/unittests/test_Tensor_type.py @@ -17,12 +17,10 @@ import unittest import numpy as np import paddle -import paddle.fluid.core as core -from paddle.fluid.framework import _test_eager_guard class TensorTypeTest(unittest.TestCase): - def func_type_totensor(self): + def test_type_totensor(self): paddle.disable_static() inx = np.array([1, 2]) tensorx = paddle.to_tensor(inx) @@ -30,12 +28,7 @@ class TensorTypeTest(unittest.TestCase): expectx = "" self.assertEqual((typex_str == expectx), True) - def test_type_totensor(self): - with _test_eager_guard(): - self.func_type_totensor() - self.func_type_totensor() - - def func_type_Tensor(self): + def test_type_Tensor(self): paddle.disable_static() inx = np.array([1, 2]) tensorx = paddle.Tensor(inx) @@ -49,29 +42,6 @@ class TensorTypeTest(unittest.TestCase): expectx = "" self.assertEqual((typex_str == expectx), True) - def test_type_Tensor(self): - with _test_eager_guard(): - self.func_type_Tensor() - self.func_type_Tensor() - - def func_type_core(self): - paddle.disable_static() - inx = np.array([1, 2]) - tensorx = core.VarBase(inx) - typex_str = str(type(tensorx)) - expectx = "" - self.assertEqual((typex_str == expectx), True) - - tensorx = paddle.framework.VarBase(inx) - typex_str = str(type(tensorx)) - expectx = "" - self.assertEqual((typex_str == expectx), True) - - def test_type_core(self): - with _test_eager_guard(): - pass - self.func_type_core() - if __name__ == '__main__': unittest.main() -- GitLab