diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_allgather_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_allgather_api_dygraph.py index b5700635787f49beb353ecd36ae6ba964b94eb94..5be1a4efa4522d0177a7b1d0d7e38df4e97e71f6 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_allgather_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_allgather_api_dygraph.py @@ -59,7 +59,7 @@ class StreamAllgatherTestCase: ) if not self._sync_op: task.wait() - assert np.allclose( + np.testing.assert_allclose( empty_tensor_list, test_data_list, rtol=1e-05, atol=1e-05 ) @@ -73,7 +73,7 @@ class StreamAllgatherTestCase: ) if not self._sync_op: task.wait() - assert np.allclose( + np.testing.assert_allclose( full_tensor_list, test_data_list, rtol=1e-05, atol=1e-05 ) @@ -90,7 +90,9 @@ class StreamAllgatherTestCase: ) if not self._sync_op: task.wait() - assert np.allclose(out_tensor, result_tensor, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + out_tensor, result_tensor, rtol=1e-05, atol=1e-05 + ) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_allreduce_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_allreduce_api_dygraph.py index faea563179201f25290929a287cf7951724e6126..2aa73ce0b4d742c624477991e63fdad41df61265 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_allreduce_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_allreduce_api_dygraph.py @@ -58,7 +58,7 @@ class StreamAllReduceTestCase: for i in range(1, len(test_data_list)): result += test_data_list[i] - assert np.allclose(tensor, result, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(tensor, result, rtol=1e-05, atol=1e-05) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_api_dygraph.py index d5e03662f3622739d20757a3159e87545d33e1a9..ac95db06d472663e8939282c0655e2cabc0fa6eb 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_api_dygraph.py @@ -75,11 +75,11 @@ class StreamAllToAllTestCase: task.wait() result_tensor_list = np.vstack(empty_tensor_list) if rank == 0: - assert np.allclose( + np.testing.assert_allclose( result_tensor_list, result1, rtol=1e-05, atol=1e-05 ) else: - assert np.allclose( + np.testing.assert_allclose( result_tensor_list, result2, rtol=1e-05, atol=1e-05 ) @@ -95,11 +95,11 @@ class StreamAllToAllTestCase: task.wait() result_tensor_list = np.vstack(full_tensor_list) if rank == 0: - assert np.allclose( + np.testing.assert_allclose( result_tensor_list, result1, rtol=1e-05, atol=1e-05 ) else: - assert np.allclose( + np.testing.assert_allclose( result_tensor_list, result2, rtol=1e-05, atol=1e-05 ) @@ -114,9 +114,13 @@ class StreamAllToAllTestCase: if not self._sync_op: task.wait() if rank == 0: - assert np.allclose(out_tensor, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + out_tensor, result1, rtol=1e-05, atol=1e-05 + ) else: - assert np.allclose(out_tensor, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + out_tensor, result2, rtol=1e-05, atol=1e-05 + ) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_single_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_single_api_dygraph.py index 2ccf0d0146a12463fa3a04cb26d9e886bd16113c..aa225451b24081f6426c11910705b5bfe8106634 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_single_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_alltoall_single_api_dygraph.py @@ -72,9 +72,13 @@ class StreamAllToAllSingleTestCase: if not self._sync_op: task.wait() if rank == 0: - assert np.allclose(out_tensor, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + out_tensor, result1, rtol=1e-05, atol=1e-05 + ) else: - assert np.allclose(out_tensor, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + out_tensor, result2, rtol=1e-05, atol=1e-05 + ) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_broadcast_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_broadcast_api_dygraph.py index fdc5bbd3af5f908232682ee3e0289ee4cba0cc7a..9e129e3a4da846ae31917751b915b6f48345e18d 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_broadcast_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_broadcast_api_dygraph.py @@ -52,7 +52,7 @@ class StreamBroadcastTestCase: if not self._sync_op: task.wait() - assert np.allclose(tensor, result, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(tensor, result, rtol=1e-05, atol=1e-05) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_api_dygraph.py index 8145a93cfea11240c687627e6fedbea3cb82f46d..e030b1d0e4ed0803f4de19a1bcc37e60582f5768 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_api_dygraph.py @@ -59,9 +59,9 @@ class StreamReduceTestCase: result = sum(test_data_list) if rank == 1: - assert np.allclose(tensor, result, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(tensor, result, rtol=1e-05, atol=1e-05) else: - assert np.allclose( + np.testing.assert_allclose( tensor, test_data_list[rank], rtol=1e-05, atol=1e-05 ) diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_scatter_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_scatter_api_dygraph.py index b204dad74e754f6418ff178ebd2ef4127b69d489..7a2d50938c238fa8ab36a1c51656583d1cf6f7f7 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_scatter_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_reduce_scatter_api_dygraph.py @@ -67,9 +67,13 @@ class StreamReduceScatterTestCase: if not self._sync_op: task.wait() if rank == 0: - assert np.allclose(result_tensor, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + result_tensor, result1, rtol=1e-05, atol=1e-05 + ) else: - assert np.allclose(result_tensor, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + result_tensor, result2, rtol=1e-05, atol=1e-05 + ) # case 2: pass a pre-sized tensor result_tensor = paddle.empty_like(t1) @@ -82,9 +86,13 @@ class StreamReduceScatterTestCase: if not self._sync_op: task.wait() if rank == 0: - assert np.allclose(result_tensor, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + result_tensor, result1, rtol=1e-05, atol=1e-05 + ) else: - assert np.allclose(result_tensor, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + result_tensor, result2, rtol=1e-05, atol=1e-05 + ) # case 3: test the legacy API result_tensor = paddle.empty_like(t1) @@ -97,9 +105,13 @@ class StreamReduceScatterTestCase: if not self._sync_op: task.wait() if rank == 0: - assert np.allclose(result_tensor, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + result_tensor, result1, rtol=1e-05, atol=1e-05 + ) else: - assert np.allclose(result_tensor, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose( + result_tensor, result2, rtol=1e-05, atol=1e-05 + ) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_scatter_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_scatter_api_dygraph.py index 155a977a6d95e10bde222fa3a4d7b9272cf8664a..89f816c939a01406d9f3203b86ea25d9fe35251a 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_scatter_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_scatter_api_dygraph.py @@ -66,9 +66,9 @@ class StreamScatterTestCase: if not self._sync_op: task.wait() if rank == src_rank: - assert np.allclose(t1, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(t1, result2, rtol=1e-05, atol=1e-05) else: - assert np.allclose(t1, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(t1, result1, rtol=1e-05, atol=1e-05) # case 2: pass a pre-sized tensor tensor = paddle.to_tensor(src_data) @@ -83,9 +83,9 @@ class StreamScatterTestCase: if not self._sync_op: task.wait() if rank == src_rank: - assert np.allclose(t1, result2, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(t1, result2, rtol=1e-05, atol=1e-05) else: - assert np.allclose(t1, result1, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(t1, result1, rtol=1e-05, atol=1e-05) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/collective/communication_stream_sendrecv_api_dygraph.py b/python/paddle/fluid/tests/unittests/collective/communication_stream_sendrecv_api_dygraph.py index a64fe1e7c2a2d304f13da54e71075c2e00ff34a0..f419e79bf86bbee0afb2dcd25881868e9a882501 100644 --- a/python/paddle/fluid/tests/unittests/collective/communication_stream_sendrecv_api_dygraph.py +++ b/python/paddle/fluid/tests/unittests/collective/communication_stream_sendrecv_api_dygraph.py @@ -69,7 +69,7 @@ class StreamSendRecvTestCase: task.wait() result = test_data_list[src_rank] - assert np.allclose(tensor, result, rtol=1e-05, atol=1e-05) + np.testing.assert_allclose(tensor, result, rtol=1e-05, atol=1e-05) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/test_collective_api_base.py b/python/paddle/fluid/tests/unittests/test_collective_api_base.py index 89fed82b87cc32153ce81c4451a9a7a5e779e911..d18469c90393c4988f5aa73ed124ae690f0b192e 100644 --- a/python/paddle/fluid/tests/unittests/test_collective_api_base.py +++ b/python/paddle/fluid/tests/unittests/test_collective_api_base.py @@ -396,7 +396,7 @@ class TestDistBase(unittest.TestCase): for i in range(result_data.shape[0]): for j in range(result_data.shape[1]): data = result_data[i][j] - assert np.allclose( + np.testing.assert_allclose( tr0_out[1][i][j], need_result[data], atol=1e-08 ) elif col_type == "row_parallel_linear":