From a6dcaf64017f6b11825e15e4f0988eb1b835e7a2 Mon Sep 17 00:00:00 2001 From: Wen Sun <35923278+HermitSun@users.noreply.github.com> Date: Wed, 21 Dec 2022 10:31:38 +0800 Subject: [PATCH] Replace `assert np.close` with `np.testing.assert_allclose` in collective communication unittests (#49195) * refactor: replace `assert` with `assert_allclose` * chore: add coverage conf * revert: remove incorrect coverage conf --- ...munication_stream_allgather_api_dygraph.py | 8 ++++--- ...munication_stream_allreduce_api_dygraph.py | 2 +- ...mmunication_stream_alltoall_api_dygraph.py | 16 ++++++++----- ...tion_stream_alltoall_single_api_dygraph.py | 8 +++++-- ...munication_stream_broadcast_api_dygraph.py | 2 +- ...communication_stream_reduce_api_dygraph.py | 4 ++-- ...ation_stream_reduce_scatter_api_dygraph.py | 24 ++++++++++++++----- ...ommunication_stream_scatter_api_dygraph.py | 8 +++---- ...mmunication_stream_sendrecv_api_dygraph.py | 2 +- .../unittests/test_collective_api_base.py | 2 +- 10 files changed, 49 insertions(+), 27 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_allgather_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_allgather_api_dygraph.py index b570063578..5be1a4efa4 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_allgather_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_allgather_api_dygraph.py @@ -59,7 +59,7 @@ class StreamAllgatherTestCase: ) if not self._sync_op: task.wait() - assert np.allclose( + np.testing.assert_allclose( empty_tensor_list, test_data_list, rtol=1e-05, atol=1e-05 ) @@ -73,7 +73,7 @@ class StreamAllgatherTestCase: ) if not self._sync_op: task.wait() - assert np.allclose( + np.testing.assert_allclose( full_tensor_list, test_data_list, rtol=1e-05, atol=1e-05 ) @@ -90,7 +90,9 @@ class StreamAllgatherTestCase: ) if not self._sync_op: task.wait() - assert np.allclose(out_tensor, result_tensor, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + out_tensor, result_tensor, rtol=1e-05, atol=1e-05 + ) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_allreduce_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_allreduce_api_dygraph.py index faea563179..2aa73ce0b4 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_allreduce_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_allreduce_api_dygraph.py @@ -58,7 +58,7 @@ class StreamAllReduceTestCase: for i in range(1, len(test_data_list)): result += test_data_list[i] - assert np.allclose(tensor, result, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(tensor, result, rtol=1e-05, atol=1e-05) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_api_dygraph.py index d5e03662f3..ac95db06d4 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_api_dygraph.py @@ -75,11 +75,11 @@ class StreamAllToAllTestCase: task.wait() result_tensor_list = np.vstack(empty_tensor_list) if rank == 0: - assert np.allclose( + np.testing.assert_allclose( result_tensor_list, result1, rtol=1e-05, atol=1e-05 ) else: - assert np.allclose( + np.testing.assert_allclose( result_tensor_list, result2, rtol=1e-05, atol=1e-05 ) @@ -95,11 +95,11 @@ class StreamAllToAllTestCase: task.wait() result_tensor_list = np.vstack(full_tensor_list) if rank == 0: - assert np.allclose( + np.testing.assert_allclose( result_tensor_list, result1, rtol=1e-05, atol=1e-05 ) else: - assert np.allclose( + np.testing.assert_allclose( result_tensor_list, result2, rtol=1e-05, atol=1e-05 ) @@ -114,9 +114,13 @@ class StreamAllToAllTestCase: if not self._sync_op: task.wait() if rank == 0: - assert np.allclose(out_tensor, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + out_tensor, result1, rtol=1e-05, atol=1e-05 + ) else: - assert np.allclose(out_tensor, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + out_tensor, result2, rtol=1e-05, atol=1e-05 + ) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_single_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_single_api_dygraph.py index 2ccf0d0146..aa225451b2 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_single_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_single_api_dygraph.py @@ -72,9 +72,13 @@ class StreamAllToAllSingleTestCase: if not self._sync_op: task.wait() if rank == 0: - assert np.allclose(out_tensor, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + out_tensor, result1, rtol=1e-05, atol=1e-05 + ) else: - assert np.allclose(out_tensor, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + out_tensor, result2, rtol=1e-05, atol=1e-05 + ) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_broadcast_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_broadcast_api_dygraph.py index fdc5bbd3af..9e129e3a4d 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_broadcast_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_broadcast_api_dygraph.py @@ -52,7 +52,7 @@ class StreamBroadcastTestCase: if not self._sync_op: task.wait() - assert np.allclose(tensor, result, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(tensor, result, rtol=1e-05, atol=1e-05) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_api_dygraph.py index 8145a93cfe..e030b1d0e4 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_api_dygraph.py @@ -59,9 +59,9 @@ class StreamReduceTestCase: result = sum(test_data_list) if rank == 1: - assert np.allclose(tensor, result, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(tensor, result, rtol=1e-05, atol=1e-05) else: - assert np.allclose( + np.testing.assert_allclose( tensor, test_data_list[rank], rtol=1e-05, atol=1e-05 ) diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_scatter_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_scatter_api_dygraph.py index b204dad74e..7a2d50938c 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_scatter_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_scatter_api_dygraph.py @@ -67,9 +67,13 @@ class StreamReduceScatterTestCase: if not self._sync_op: task.wait() if rank == 0: - assert np.allclose(result_tensor, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + result_tensor, result1, rtol=1e-05, atol=1e-05 + ) else: - assert np.allclose(result_tensor, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + result_tensor, result2, rtol=1e-05, atol=1e-05 + ) # case 2: pass a pre-sized tensor result_tensor = paddle.empty_like(t1) @@ -82,9 +86,13 @@ class StreamReduceScatterTestCase: if not self._sync_op: task.wait() if rank == 0: - assert np.allclose(result_tensor, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + result_tensor, result1, rtol=1e-05, atol=1e-05 + ) else: - assert np.allclose(result_tensor, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + result_tensor, result2, rtol=1e-05, atol=1e-05 + ) # case 3: test the legacy API result_tensor = paddle.empty_like(t1) @@ -97,9 +105,13 @@ class StreamReduceScatterTestCase: if not self._sync_op: task.wait() if rank == 0: - assert np.allclose(result_tensor, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + result_tensor, result1, rtol=1e-05, atol=1e-05 + ) else: - assert np.allclose(result_tensor, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + result_tensor, result2, rtol=1e-05, atol=1e-05 + ) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_scatter_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_scatter_api_dygraph.py index 155a977a6d..89f816c939 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_scatter_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_scatter_api_dygraph.py @@ -66,9 +66,9 @@ class StreamScatterTestCase: if not self._sync_op: task.wait() if rank == src_rank: - assert np.allclose(t1, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(t1, result2, rtol=1e-05, atol=1e-05) else: - assert np.allclose(t1, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(t1, result1, rtol=1e-05, atol=1e-05) # case 2: pass a pre-sized tensor tensor = paddle.to_tensor(src_data) @@ -83,9 +83,9 @@ class StreamScatterTestCase: if not self._sync_op: task.wait() if rank == src_rank: - assert np.allclose(t1, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(t1, result2, rtol=1e-05, atol=1e-05) else: - assert np.allclose(t1, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(t1, result1, rtol=1e-05, atol=1e-05) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_sendrecv_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_sendrecv_api_dygraph.py index a64fe1e7c2..f419e79bf8 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_sendrecv_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_sendrecv_api_dygraph.py @@ -69,7 +69,7 @@ class StreamSendRecvTestCase: task.wait() result = test_data_list[src_rank] - assert np.allclose(tensor, result, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(tensor, result, rtol=1e-05, atol=1e-05) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/test_collective_api_base.py b/python/paddle/fluid/tests/unittests/test_collective_api_base.py index 89fed82b87..d18469c903 100644 --- a/python/paddle/fluid/tests/unittests/test_collective_api_base.py +++ b/python/paddle/fluid/tests/unittests/test_collective_api_base.py @@ -396,7 +396,7 @@ class TestDistBase(unittest.TestCase): for i in range(result_data.shape[0]): for j in range(result_data.shape[1]): data = result_data[i][j] - assert np.allclose( + np.testing.assert_allclose( tr0_out[1][i][j], need_result[data], atol=1e-08 ) elif col_type == "row_parallel_linear": -- GitLab