From 17fec4e9aae895c513d9b07af55adf56059c5e42 Mon Sep 17 00:00:00 2001 From: cyberslack_lee Date: Tue, 11 Apr 2023 11:19:51 +0800 Subject: [PATCH] =?UTF-8?q?=E3=80=90Hackathon4=20No58=E3=80=91empty=5Flike?= =?UTF-8?q?=20fp16&bf16=20API=20test=20(#52668)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tests/unittests/test_empty_like_op.py | 90 +++++++++++++++++-- python/paddle/tensor/attribute.py | 1 + python/paddle/tensor/creation.py | 20 ++++- 3 files changed, 102 insertions(+), 9 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/test_empty_like_op.py b/python/paddle/fluid/tests/unittests/test_empty_like_op.py index 8ccaabd7c2c..164275b1a7d 100644 --- a/python/paddle/fluid/tests/unittests/test_empty_like_op.py +++ b/python/paddle/fluid/tests/unittests/test_empty_like_op.py @@ -15,6 +15,7 @@ import unittest import numpy as np +from eager_op_test import convert_uint16_to_float import paddle from paddle.fluid import core @@ -38,7 +39,7 @@ class TestEmptyLikeAPICommon(unittest.TestCase): f'shape should be {self.dst_shape}, but get {shape}', ) - if data_type in ['float32', 'float64', 'int32', 'int64']: + if data_type in ['float16', 'float32', 'float64', 'int32', 'int64']: max_value = np.nanmax(out) min_value = np.nanmin(out) always_non_full_zero = max_value >= min_value @@ -47,6 +48,16 @@ class TestEmptyLikeAPICommon(unittest.TestCase): always_full_zero or always_non_full_zero, 'always_full_zero or always_non_full_zero.', ) + elif data_type in ['uint16']: + uout = convert_uint16_to_float(out) + max_value = np.nanmax(uout) + min_value = np.nanmin(uout) + always_non_full_zero = max_value >= min_value + always_full_zero = max_value == 0.0 and min_value == 0.0 + self.assertTrue( + always_full_zero or always_non_full_zero, + 'always_full_zero or always_non_full_zero.', + ) elif data_type in ['bool']: total_num = out.size true_num = np.sum(out) @@ -154,16 +165,13 @@ class TestEmptyLikeAPI_Static(TestEmptyLikeAPICommon): def test_static_graph(self): paddle.enable_static() - - dtype = 'float32' - train_program = Program() startup_program = Program() with program_guard(train_program, startup_program): - x = np.random.random(self.x_shape).astype(dtype) + x = np.random.random(self.x_shape).astype(self.dtype) data_x = paddle.static.data( - 'x', shape=self.data_x_shape, dtype=dtype + 'x', shape=self.data_x_shape, dtype=self.dtype ) out = paddle.empty_like(data_x) @@ -176,7 +184,7 @@ class TestEmptyLikeAPI_Static(TestEmptyLikeAPICommon): exe = paddle.static.Executor(place) res = exe.run(train_program, feed={'x': x}, fetch_list=[out]) - self.dst_dtype = dtype + self.dst_dtype = self.dtype self.dst_shape = x.shape self.__check_out__(res[0]) @@ -185,12 +193,80 @@ class TestEmptyLikeAPI_Static(TestEmptyLikeAPICommon): def init_config(self): self.x_shape = (200, 3) self.data_x_shape = [200, 3] + self.dtype = 'float32' class TestEmptyLikeAPI_Static2(TestEmptyLikeAPI_Static): def init_config(self): self.x_shape = (3, 200, 3) self.data_x_shape = [-1, 200, 3] + self.dtype = 'float32' + + +class TestEmptyLikeAPI_StaticForFP16Op(TestEmptyLikeAPICommon): + def setUp(self): + self.init_config() + + def init_config(self): + self.x_shape = (200, 3) + self.data_x_shape = [200, 3] + self.dtype = 'float16' + + def test_static_graph(self): + paddle.enable_static() + if paddle.fluid.core.is_compiled_with_cuda(): + place = paddle.CUDAPlace(0) + with paddle.static.program_guard( + paddle.static.Program(), paddle.static.Program() + ): + x = np.random.random([200, 3]).astype(self.dtype) + data_x = paddle.static.data( + name="x", shape=[200, 3], dtype=self.dtype + ) + out = paddle.empty_like(data_x) + exe = paddle.static.Executor(place) + res = exe.run( + paddle.static.default_main_program(), + feed={'x': x}, + fetch_list=[out], + ) + + self.dst_dtype = self.dtype + self.dst_shape = x.shape + self.__check_out__(res[0]) + + +class TestEmptyLikeAPI_StaticForBF16Op(TestEmptyLikeAPICommon): + def setUp(self): + self.init_config() + + def init_config(self): + self.x_shape = (200, 3) + self.data_x_shape = [200, 3] + self.dtype = 'uint16' + + def test_static_graph(self): + paddle.enable_static() + if paddle.fluid.core.is_compiled_with_cuda(): + place = paddle.CUDAPlace(0) + with paddle.static.program_guard( + paddle.static.Program(), paddle.static.Program() + ): + x = np.random.random([200, 3]).astype(np.uint16) + data_x = paddle.static.data( + name="x", shape=[200, 3], dtype=np.uint16 + ) + out = paddle.empty_like(data_x) + exe = paddle.static.Executor(place) + res = exe.run( + paddle.static.default_main_program(), + feed={'x': x}, + fetch_list=[out], + ) + + self.dst_dtype = self.dtype + self.dst_shape = x.shape + self.__check_out__(res[0]) class TestEmptyError(unittest.TestCase): diff --git a/python/paddle/tensor/attribute.py b/python/paddle/tensor/attribute.py index 7a859d64d0c..63af833747b 100644 --- a/python/paddle/tensor/attribute.py +++ b/python/paddle/tensor/attribute.py @@ -120,6 +120,7 @@ def shape(input): 'int64', 'complex64', 'complex128', + 'uint16', ], 'shape', ) diff --git a/python/paddle/tensor/creation.py b/python/paddle/tensor/creation.py index 602fa7186ec..99d9ad594c1 100644 --- a/python/paddle/tensor/creation.py +++ b/python/paddle/tensor/creation.py @@ -1954,13 +1954,29 @@ def empty_like(x, dtype=None, name=None): check_variable_and_dtype( x, 'x', - ['bool', 'float16', 'float32', 'float64', 'int32', 'int64'], + [ + 'bool', + 'float16', + 'float32', + 'float64', + 'int32', + 'int64', + 'uint16', + ], 'empty_like', ) check_dtype( dtype, 'dtype', - ['bool', 'float16', 'float32', 'float64', 'int32', 'int64'], + [ + 'bool', + 'float16', + 'float32', + 'float64', + 'int32', + 'int64', + 'uint16', + ], 'empty_like', ) out = helper.create_variable_for_type_inference(dtype=dtype) -- GitLab