From 2a3ddce0ed0881d422b2132e88262b62c83b2df1 Mon Sep 17 00:00:00 2001 From: Weilong Wu Date: Mon, 5 Dec 2022 11:14:18 +0800 Subject: [PATCH] rm _enable_legacy_dygraph (#48677) * rm _enable_legacy * recover original code --- .../tests/unittests/test_linalg_lstsq_op.py | 21 ----- .../tests/unittests/test_pairwise_distance.py | 86 +------------------ .../fluid/tests/unittests/test_slice_op.py | 49 +++++------ .../test_tensor_fill_diagonal_tensor.py | 5 -- .../test_uniform_random_inplace_op.py | 9 -- 5 files changed, 24 insertions(+), 146 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/test_linalg_lstsq_op.py b/python/paddle/fluid/tests/unittests/test_linalg_lstsq_op.py index b82fb8ed09..bae9094a7f 100644 --- a/python/paddle/fluid/tests/unittests/test_linalg_lstsq_op.py +++ b/python/paddle/fluid/tests/unittests/test_linalg_lstsq_op.py @@ -92,27 +92,6 @@ class LinalgLstsqTestCase(unittest.TestCase): self._result_sg_values = results[3].numpy() self.assert_np_close() - def test_legacy_dygraph(self): - paddle.disable_static() - paddle.fluid.framework._enable_legacy_dygraph() - for dev in self.devices: - paddle.set_device(dev) - place = paddle.CPUPlace() if dev == "cpu" else paddle.CUDAPlace(0) - x = paddle.to_tensor( - self._input_data_1, place=place, dtype=self.dtype - ) - y = paddle.to_tensor( - self._input_data_2, place=place, dtype=self.dtype - ) - results = paddle.linalg.lstsq( - x, y, rcond=self.rcond, driver=self.driver - ) - self._result_solution = results[0].numpy() - self._result_residuals = results[1].numpy() - self._result_rank = results[2].numpy() - self._result_sg_values = results[3].numpy() - self.assert_np_close() - def test_static(self): paddle.enable_static() for dev in self.devices: diff --git a/python/paddle/fluid/tests/unittests/test_pairwise_distance.py b/python/paddle/fluid/tests/unittests/test_pairwise_distance.py index 8e7463abd9..a764612cd9 100644 --- a/python/paddle/fluid/tests/unittests/test_pairwise_distance.py +++ b/python/paddle/fluid/tests/unittests/test_pairwise_distance.py @@ -95,25 +95,6 @@ def test_dygraph( return dygraph_ret -def test_legacy_dygraph( - place, x_np, y_np, p=2.0, epsilon=1e-6, keepdim=False, functional=False -): - paddle.fluid.framework._enable_legacy_dygraph() - x = paddle.to_tensor(x_np) - y = paddle.to_tensor(y_np) - if functional: - legacy_distance = call_pairwise_distance_functional( - x=x, y=y, p=p, epsilon=epsilon, keepdim=keepdim - ) - else: - legacy_distance = call_pairwise_distance_layer( - x=x, y=y, p=p, epsilon=epsilon, keepdim=keepdim - ) - legacy_ret = legacy_distance.numpy() - paddle.fluid.framework._disable_legacy_dygraph() - return legacy_ret - - class TestPairwiseDistance(unittest.TestCase): def test_pairwise_distance(self): epsilon = 1e-6 @@ -148,14 +129,6 @@ class TestPairwiseDistance(unittest.TestCase): epsilon=epsilon, keepdim=keepdim, ) - legacy_ret = test_legacy_dygraph( - place, - x_np, - y_np, - p, - epsilon=epsilon, - keepdim=keepdim, - ) excepted_value = np_pairwise_distance( x_np, y_np, p, epsilon=epsilon, keepdim=keepdim ) @@ -166,9 +139,6 @@ class TestPairwiseDistance(unittest.TestCase): self.assertEqual( dygraph_ret.shape, excepted_value.shape ) - self.assertEqual( - legacy_ret.shape, excepted_value.shape - ) np.testing.assert_allclose( static_ret, excepted_value, rtol=1e-05 @@ -176,10 +146,6 @@ class TestPairwiseDistance(unittest.TestCase): np.testing.assert_allclose( dygraph_ret, excepted_value, rtol=1e-05 ) - np.testing.assert_allclose( - legacy_ret, excepted_value, rtol=1e-05 - ) - static_functional_ret = test_static( place, x_np, @@ -196,14 +162,6 @@ class TestPairwiseDistance(unittest.TestCase): epsilon=epsilon, keepdim=keepdim, ) - legacy_functional_ret = test_legacy_dygraph( - place, - x_np, - y_np, - p, - epsilon=epsilon, - keepdim=keepdim, - ) self.assertEqual( static_functional_ret.shape, @@ -213,10 +171,6 @@ class TestPairwiseDistance(unittest.TestCase): dygraph_functional_ret.shape, excepted_value.shape, ) - self.assertEqual( - legacy_functional_ret.shape, - excepted_value.shape, - ) np.testing.assert_allclose( static_functional_ret, @@ -228,11 +182,6 @@ class TestPairwiseDistance(unittest.TestCase): excepted_value, rtol=1e-05, ) - np.testing.assert_allclose( - legacy_functional_ret, - excepted_value, - rtol=1e-05, - ) def test_pairwise_distance_broadcast_1(self): shape_x = [100, 100] @@ -248,20 +197,15 @@ class TestPairwiseDistance(unittest.TestCase): dygraph_ret = test_dygraph( place=place, x_np=x_np, y_np=y_np, epsilon=epsilon, keepdim=keepdim ) - legacy_ret = test_legacy_dygraph( - place=place, x_np=x_np, y_np=y_np, epsilon=epsilon, keepdim=keepdim - ) excepted_value = np_pairwise_distance( x_np, y_np, epsilon=epsilon, keepdim=keepdim ) self.assertEqual(static_ret.shape, excepted_value.shape) self.assertEqual(dygraph_ret.shape, excepted_value.shape) - self.assertEqual(legacy_ret.shape, excepted_value.shape) np.testing.assert_allclose(static_ret, excepted_value, rtol=1e-05) np.testing.assert_allclose(dygraph_ret, excepted_value, rtol=1e-05) - np.testing.assert_allclose(legacy_ret, excepted_value, rtol=1e-05) static_functional_ret = test_static( place=place, @@ -279,18 +223,9 @@ class TestPairwiseDistance(unittest.TestCase): keepdim=keepdim, functional=True, ) - legacy_functional_ret = test_legacy_dygraph( - place=place, - x_np=x_np, - y_np=y_np, - epsilon=epsilon, - keepdim=keepdim, - functional=True, - ) self.assertEqual(static_functional_ret.shape, excepted_value.shape) self.assertEqual(dygraph_functional_ret.shape, excepted_value.shape) - self.assertEqual(legacy_functional_ret.shape, excepted_value.shape) np.testing.assert_allclose( static_functional_ret, excepted_value, rtol=1e-05 @@ -298,9 +233,6 @@ class TestPairwiseDistance(unittest.TestCase): np.testing.assert_allclose( dygraph_functional_ret, excepted_value, rtol=1e-05 ) - np.testing.assert_allclose( - legacy_functional_ret, excepted_value, rtol=1e-05 - ) def test_pairwise_distance_broadcast_2(self): shape_x = [100, 100] @@ -316,20 +248,16 @@ class TestPairwiseDistance(unittest.TestCase): dygraph_ret = test_dygraph( place=place, x_np=x_np, y_np=y_np, epsilon=epsilon, keepdim=keepdim ) - legacy_ret = test_legacy_dygraph( - place=place, x_np=x_np, y_np=y_np, epsilon=epsilon, keepdim=keepdim - ) + excepted_value = np_pairwise_distance( x_np, y_np, epsilon=epsilon, keepdim=keepdim ) self.assertEqual(static_ret.shape, excepted_value.shape) self.assertEqual(dygraph_ret.shape, excepted_value.shape) - self.assertEqual(legacy_ret.shape, excepted_value.shape) np.testing.assert_allclose(static_ret, excepted_value, rtol=1e-05) np.testing.assert_allclose(dygraph_ret, excepted_value, rtol=1e-05) - np.testing.assert_allclose(legacy_ret, excepted_value, rtol=1e-05) static_functional_ret = test_static( place=place, @@ -347,18 +275,9 @@ class TestPairwiseDistance(unittest.TestCase): keepdim=keepdim, functional=True, ) - legacy_functional_ret = test_legacy_dygraph( - place=place, - x_np=x_np, - y_np=y_np, - epsilon=epsilon, - keepdim=keepdim, - functional=True, - ) self.assertEqual(static_functional_ret.shape, excepted_value.shape) self.assertEqual(dygraph_functional_ret.shape, excepted_value.shape) - self.assertEqual(legacy_functional_ret.shape, excepted_value.shape) np.testing.assert_allclose( static_functional_ret, excepted_value, rtol=1e-05 @@ -366,9 +285,6 @@ class TestPairwiseDistance(unittest.TestCase): np.testing.assert_allclose( dygraph_functional_ret, excepted_value, rtol=1e-05 ) - np.testing.assert_allclose( - legacy_functional_ret, excepted_value, rtol=1e-05 - ) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/test_slice_op.py b/python/paddle/fluid/tests/unittests/test_slice_op.py index 371db6edd7..4538ef65c1 100644 --- a/python/paddle/fluid/tests/unittests/test_slice_op.py +++ b/python/paddle/fluid/tests/unittests/test_slice_op.py @@ -23,7 +23,6 @@ import paddle import paddle.fluid as fluid import paddle.fluid.core as core import paddle.fluid.layers as layers -from paddle.fluid.framework import _enable_legacy_dygraph, _test_eager_guard paddle.enable_static() @@ -640,29 +639,28 @@ class TestSliceApiWithTensor(unittest.TestCase): class TestSliceApiEager(unittest.TestCase): def test_slice_api(self): with paddle.fluid.dygraph.guard(): - with _test_eager_guard(): - a = paddle.rand(shape=[4, 5, 6], dtype='float32') - a.stop_gradient = False - axes = [0, 1, 2] - starts = [-3, 0, 2] - ends = [3, 2, 4] - a_1 = paddle.slice(a, axes=axes, starts=starts, ends=ends) - - a_2 = paddle.slice( - a, - axes=axes, - starts=paddle.to_tensor(starts), - ends=paddle.to_tensor(ends), - ) - np.testing.assert_array_equal(a_1.numpy(), a_2.numpy()) - a_1.backward() - grad_truth = paddle.zeros_like(a) - grad_truth[-3:3, 0:2, 2:4] = 1 - np.testing.assert_array_equal(grad_truth, a.gradient()) - - np.testing.assert_allclose( - a_1.numpy(), a[-3:3, 0:2, 2:4], rtol=1e-05 - ) + a = paddle.rand(shape=[4, 5, 6], dtype='float32') + a.stop_gradient = False + axes = [0, 1, 2] + starts = [-3, 0, 2] + ends = [3, 2, 4] + a_1 = paddle.slice(a, axes=axes, starts=starts, ends=ends) + + a_2 = paddle.slice( + a, + axes=axes, + starts=paddle.to_tensor(starts), + ends=paddle.to_tensor(ends), + ) + np.testing.assert_array_equal(a_1.numpy(), a_2.numpy()) + a_1.backward() + grad_truth = paddle.zeros_like(a) + grad_truth[-3:3, 0:2, 2:4] = 1 + np.testing.assert_array_equal(grad_truth, a.gradient()) + + np.testing.assert_allclose( + a_1.numpy(), a[-3:3, 0:2, 2:4], rtol=1e-05 + ) class TestSliceApiWithLoDTensorArray(unittest.TestCase): @@ -861,10 +859,9 @@ class TestInferShape(unittest.TestCase): ) class TestImperativeCUDAPinnedInput(unittest.TestCase): def test_input_cuda_pinned_var(self): - _enable_legacy_dygraph() with fluid.dygraph.guard(): data = np.random.random((2, 80, 16128)).astype('float32') - var = core.VarBase( + var = core.eager.Tensor( value=data, name='', persistable=False, diff --git a/python/paddle/fluid/tests/unittests/test_tensor_fill_diagonal_tensor.py b/python/paddle/fluid/tests/unittests/test_tensor_fill_diagonal_tensor.py index f5902fadf4..0f375cc0ae 100644 --- a/python/paddle/fluid/tests/unittests/test_tensor_fill_diagonal_tensor.py +++ b/python/paddle/fluid/tests/unittests/test_tensor_fill_diagonal_tensor.py @@ -19,7 +19,6 @@ import numpy as np import paddle import paddle.fluid as fluid import paddle.nn.functional as F -from paddle.fluid.framework import _enable_legacy_dygraph class TensorFillDiagTensor_Test(unittest.TestCase): @@ -216,9 +215,5 @@ class TensorFillDiagTensor_Test(unittest.TestCase): fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False}) -class TensorFillDiagTensor_Test_legacy(TensorFillDiagTensor_Test): - _enable_legacy_dygraph() - - if __name__ == '__main__': unittest.main() diff --git a/python/paddle/fluid/tests/unittests/test_uniform_random_inplace_op.py b/python/paddle/fluid/tests/unittests/test_uniform_random_inplace_op.py index 446df7cd9f..f420209dda 100644 --- a/python/paddle/fluid/tests/unittests/test_uniform_random_inplace_op.py +++ b/python/paddle/fluid/tests/unittests/test_uniform_random_inplace_op.py @@ -18,10 +18,6 @@ import numpy as np import paddle import paddle.fluid as fluid -from paddle.fluid.framework import ( - _disable_legacy_dygraph, - _enable_legacy_dygraph, -) class TestUniformRandomInplaceOpDtype(unittest.TestCase): @@ -188,11 +184,6 @@ class TestUniformRandomInplaceGrad(unittest.TestCase): def test_uniform_random_inplace_grad(self): self.run_() - def test_uniform_random_inplace_grad_old_dygraph(self): - _enable_legacy_dygraph() - self.run_() - _disable_legacy_dygraph() - if __name__ == '__main__': unittest.main() -- GitLab