From f1cdd6546985d2bc7e7edcf36658dcba94141869 Mon Sep 17 00:00:00 2001 From: Wang Xinyu Date: Thu, 30 Mar 2023 20:23:12 +0800 Subject: [PATCH] [AMP OP&Test] Transpose OP fp16 unitest (#52315) * transpose fp16 test * transpose auto tune fp16 test --- .../tests/unittests/test_transpose_op.py | 73 +++++++++++++++++++ 1 file changed, 73 insertions(+) diff --git a/python/paddle/fluid/tests/unittests/test_transpose_op.py b/python/paddle/fluid/tests/unittests/test_transpose_op.py index 30c0e917764..4530bd175a0 100644 --- a/python/paddle/fluid/tests/unittests/test_transpose_op.py +++ b/python/paddle/fluid/tests/unittests/test_transpose_op.py @@ -200,6 +200,43 @@ class TestAutoTuneTransposeOp(OpTest): self.check_grad(['X'], 'Out', check_prim=True) +class TestAutoTuneTransposeFP16Op(OpTest): + def setUp(self): + self.init_op_type() + self.initTestCase() + self.dtype = np.float16 + self.python_api = paddle.transpose + self.public_python_api = paddle.transpose + self.prim_op_type = "prim" + self.inputs = {'X': np.random.random(self.shape).astype(self.dtype)} + self.attrs = { + 'axis': list(self.axis), + 'use_mkldnn': self.use_mkldnn, + } + self.outputs = { + 'XShape': np.random.random(self.shape).astype(self.dtype), + 'Out': self.inputs['X'].transpose(self.axis), + } + + def initTestCase(self): + fluid.core.set_autotune_range(0, 3) + fluid.core.update_autotune_status() + fluid.core.enable_autotune() + self.shape = (1, 12, 256, 1) + self.axis = (0, 3, 2, 1) + + def init_op_type(self): + self.op_type = "transpose2" + self.use_mkldnn = False + + def test_check_output(self): + self.check_output(no_check_set=['XShape']) + fluid.core.disable_autotune() + + def test_check_grad(self): + self.check_grad(['X'], 'Out', check_prim=True) + + class TestAutoTuneTransposeBF16Op(OpTest): def setUp(self): self.init_op_type() @@ -241,6 +278,42 @@ class TestAutoTuneTransposeBF16Op(OpTest): self.check_grad(['X'], 'Out', check_prim=True) +class TestTransposeFP16Op(OpTest): + def setUp(self): + self.init_op_type() + self.initTestCase() + self.dtype = np.float16 + self.prim_op_type = "prim" + self.enable_cinn = False + self.python_api = paddle.transpose + self.public_python_api = paddle.transpose + x = np.random.random(self.shape).astype(self.dtype) + + self.inputs = {'X': x} + self.attrs = { + 'axis': list(self.axis), + 'use_mkldnn': self.use_mkldnn, + } + self.outputs = { + 'XShape': np.random.random(self.shape).astype(self.dtype), + 'Out': self.inputs['X'].transpose(self.axis), + } + + def init_op_type(self): + self.op_type = "transpose2" + self.use_mkldnn = False + + def test_check_output(self): + self.check_output(no_check_set=['XShape']) + + def test_check_grad(self): + self.check_grad(['X'], 'Out', check_prim=True) + + def initTestCase(self): + self.shape = (3, 40) + self.axis = (1, 0) + + class TestTransposeBF16Op(OpTest): def setUp(self): self.init_op_type() -- GitLab