未验证 提交 f1cdd654 编写于 作者: W Wang Xinyu 提交者: GitHub

[AMP OP&Test] Transpose OP fp16 unitest (#52315)

* transpose fp16 test

* transpose auto tune fp16 test
上级 aeb8c2e2
...@@ -200,6 +200,43 @@ class TestAutoTuneTransposeOp(OpTest): ...@@ -200,6 +200,43 @@ class TestAutoTuneTransposeOp(OpTest):
self.check_grad(['X'], 'Out', check_prim=True) self.check_grad(['X'], 'Out', check_prim=True)
class TestAutoTuneTransposeFP16Op(OpTest):
def setUp(self):
self.init_op_type()
self.initTestCase()
self.dtype = np.float16
self.python_api = paddle.transpose
self.public_python_api = paddle.transpose
self.prim_op_type = "prim"
self.inputs = {'X': np.random.random(self.shape).astype(self.dtype)}
self.attrs = {
'axis': list(self.axis),
'use_mkldnn': self.use_mkldnn,
}
self.outputs = {
'XShape': np.random.random(self.shape).astype(self.dtype),
'Out': self.inputs['X'].transpose(self.axis),
}
def initTestCase(self):
fluid.core.set_autotune_range(0, 3)
fluid.core.update_autotune_status()
fluid.core.enable_autotune()
self.shape = (1, 12, 256, 1)
self.axis = (0, 3, 2, 1)
def init_op_type(self):
self.op_type = "transpose2"
self.use_mkldnn = False
def test_check_output(self):
self.check_output(no_check_set=['XShape'])
fluid.core.disable_autotune()
def test_check_grad(self):
self.check_grad(['X'], 'Out', check_prim=True)
class TestAutoTuneTransposeBF16Op(OpTest): class TestAutoTuneTransposeBF16Op(OpTest):
def setUp(self): def setUp(self):
self.init_op_type() self.init_op_type()
...@@ -241,6 +278,42 @@ class TestAutoTuneTransposeBF16Op(OpTest): ...@@ -241,6 +278,42 @@ class TestAutoTuneTransposeBF16Op(OpTest):
self.check_grad(['X'], 'Out', check_prim=True) self.check_grad(['X'], 'Out', check_prim=True)
class TestTransposeFP16Op(OpTest):
def setUp(self):
self.init_op_type()
self.initTestCase()
self.dtype = np.float16
self.prim_op_type = "prim"
self.enable_cinn = False
self.python_api = paddle.transpose
self.public_python_api = paddle.transpose
x = np.random.random(self.shape).astype(self.dtype)
self.inputs = {'X': x}
self.attrs = {
'axis': list(self.axis),
'use_mkldnn': self.use_mkldnn,
}
self.outputs = {
'XShape': np.random.random(self.shape).astype(self.dtype),
'Out': self.inputs['X'].transpose(self.axis),
}
def init_op_type(self):
self.op_type = "transpose2"
self.use_mkldnn = False
def test_check_output(self):
self.check_output(no_check_set=['XShape'])
def test_check_grad(self):
self.check_grad(['X'], 'Out', check_prim=True)
def initTestCase(self):
self.shape = (3, 40)
self.axis = (1, 0)
class TestTransposeBF16Op(OpTest): class TestTransposeBF16Op(OpTest):
def setUp(self): def setUp(self):
self.init_op_type() self.init_op_type()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册