未验证 提交 9a1fdad3 编写于 作者: Y ykkk2333 提交者: GitHub

update new unittests of flatten ops and layernorm, *test=kunlun (#43895)

上级 37f2151f
......@@ -23,22 +23,32 @@ import paddle
import paddle.fluid as fluid
from op_test import OpTest
from op_test_xpu import XPUOpTest
from xpu.get_test_cover_info import create_test_class, get_xpu_op_support_types, XPUOpTestWrapper
paddle.enable_static()
class TestFlatten2Op(XPUOpTest):
class XPUTestFlatten2Op(XPUOpTestWrapper):
def __init__(self):
self.op_name = 'flatten2'
self.use_dynamic_create_class = False
class TestFlatten2Op(XPUOpTest):
def setUp(self):
self.set_xpu()
self.op_type = "flatten2"
self.dtype = self.in_type
self.place = paddle.XPUPlace(0)
self.init_test_case()
self.inputs = {"X": np.random.random(self.in_shape).astype("float32")}
self.inputs = {
"X": np.random.random(self.in_shape).astype(self.dtype)
}
self.init_attrs()
self.outputs = {
"Out": self.inputs["X"].reshape(self.new_shape),
"XShape": np.random.random(self.in_shape).astype("float32")
"XShape": np.random.random(self.in_shape).astype(self.dtype)
}
def set_xpu(self):
......@@ -58,16 +68,14 @@ class TestFlatten2Op(XPUOpTest):
def init_attrs(self):
self.attrs = {"axis": self.axis}
class TestFlatten2OpWithCornerAxis(TestFlatten2Op):
class TestFlatten2OpWithCornerAxis(TestFlatten2Op):
def init_test_case(self):
self.in_shape = (3, 2, 5, 4)
self.axis = 0
self.new_shape = (1, 120)
class TestFlatten2OpWithDefaultAxis(TestFlatten2Op):
class TestFlatten2OpWithDefaultAxis(TestFlatten2Op):
def init_test_case(self):
self.in_shape = (10, 2, 2, 3)
......@@ -76,8 +84,7 @@ class TestFlatten2OpWithDefaultAxis(TestFlatten2Op):
def init_attrs(self):
self.attrs = {}
class TestFlatten2OpSixDims(TestFlatten2Op):
class TestFlatten2OpSixDims(TestFlatten2Op):
def init_test_case(self):
self.in_shape = (3, 2, 3, 2, 4, 4)
......@@ -85,5 +92,11 @@ class TestFlatten2OpSixDims(TestFlatten2Op):
self.new_shape = (36, 16)
support_types = get_xpu_op_support_types('flatten2')
support_types_for_grad = get_xpu_op_support_types('mean')
for stype in support_types:
if stype in support_types_for_grad:
create_test_class(globals(), XPUTestFlatten2Op, stype)
if __name__ == "__main__":
unittest.main()
......@@ -17,7 +17,6 @@ from __future__ import print_function
import sys
sys.path.append("..")
import numpy as np
import unittest
import sys
......@@ -27,11 +26,18 @@ from op_test import OpTest
from op_test_xpu import XPUOpTest
import paddle
import paddle.fluid as fluid
from xpu.get_test_cover_info import create_test_class, get_xpu_op_support_types, XPUOpTestWrapper
paddle.enable_static()
class TestFlattenOp(XPUOpTest):
class XPUTestFlattenOp(XPUOpTestWrapper):
def __init__(self):
self.op_name = 'flatten_contiguous_range'
self.use_dynamic_create_class = False
class TestFlattenOp(XPUOpTest):
def setUp(self):
self.set_xpu()
......@@ -42,13 +48,15 @@ class TestFlattenOp(XPUOpTest):
self.start_axis = 0
self.stop_axis = -1
self.dtype = np.float32
self.dtype = self.in_type
self.init_test_case()
self.inputs = {"X": np.random.random(self.in_shape).astype(self.dtype)}
self.inputs = {
"X": np.random.random(self.in_shape).astype(self.dtype)
}
self.init_attrs()
self.outputs = {
"Out": self.inputs["X"].reshape(self.new_shape),
"XShape": np.random.random(self.in_shape).astype("float32")
"XShape": np.random.random(self.in_shape).astype(self.dtype)
}
def set_xpu(self):
......@@ -73,8 +81,7 @@ class TestFlattenOp(XPUOpTest):
'use_xpu': True,
}
class TestFlattenOp_1(TestFlattenOp):
class TestFlattenOp_1(TestFlattenOp):
def init_test_case(self):
self.in_shape = (3, 2, 5, 4)
......@@ -88,8 +95,7 @@ class TestFlattenOp_1(TestFlattenOp):
"stop_axis": self.stop_axis
}
class TestFlattenOp_2(TestFlattenOp):
class TestFlattenOp_2(TestFlattenOp):
def init_test_case(self):
self.in_shape = (3, 2, 5, 4)
......@@ -103,8 +109,7 @@ class TestFlattenOp_2(TestFlattenOp):
"stop_axis": self.stop_axis
}
class TestFlattenOp_3(TestFlattenOp):
class TestFlattenOp_3(TestFlattenOp):
def init_test_case(self):
self.in_shape = (3, 2, 5, 4)
......@@ -118,8 +123,7 @@ class TestFlattenOp_3(TestFlattenOp):
"stop_axis": self.stop_axis
}
class TestFlattenOp_4(TestFlattenOp):
class TestFlattenOp_4(TestFlattenOp):
def init_test_case(self):
self.in_shape = (3, 2, 5, 4)
......@@ -133,8 +137,7 @@ class TestFlattenOp_4(TestFlattenOp):
"stop_axis": self.stop_axis
}
class TestFlattenOp_5(TestFlattenOp):
class TestFlattenOp_5(TestFlattenOp):
def init_test_case(self):
self.in_shape = (3, 2, 5, 4)
......@@ -148,8 +151,7 @@ class TestFlattenOp_5(TestFlattenOp):
"stop_axis": self.stop_axis
}
class TestFlattenOpSixDims(TestFlattenOp):
class TestFlattenOpSixDims(TestFlattenOp):
def init_test_case(self):
self.in_shape = (3, 2, 3, 2, 4, 4)
......@@ -163,8 +165,7 @@ class TestFlattenOpSixDims(TestFlattenOp):
"stop_axis": self.stop_axis
}
class TestFlattenOp_Float32(TestFlattenOp):
class TestFlattenOp_Float32(TestFlattenOp):
def init_test_case(self):
self.in_shape = (3, 2, 5, 4)
......@@ -179,8 +180,7 @@ class TestFlattenOp_Float32(TestFlattenOp):
"stop_axis": self.stop_axis
}
class TestFlattenOp_int32(TestFlattenOp):
class TestFlattenOp_int32(TestFlattenOp):
def init_test_case(self):
self.in_shape = (3, 2, 5, 4)
......@@ -199,8 +199,7 @@ class TestFlattenOp_int32(TestFlattenOp):
def test_check_grad(self):
pass
class TestFlattenOp_int8(TestFlattenOp):
class TestFlattenOp_int8(TestFlattenOp):
def init_test_case(self):
self.in_shape = (3, 2, 5, 4)
......@@ -218,8 +217,7 @@ class TestFlattenOp_int8(TestFlattenOp):
def test_check_grad(self):
pass
class TestFlattenOp_int64(TestFlattenOp):
class TestFlattenOp_int64(TestFlattenOp):
def init_test_case(self):
self.in_shape = (3, 2, 5, 4)
......@@ -338,5 +336,11 @@ class TestFlattenPython(unittest.TestCase):
self.assertTrue((2, 3, 16) == res_shape)
support_types = get_xpu_op_support_types('flatten_contiguous_range')
support_types_for_grad = get_xpu_op_support_types('mean')
for stype in support_types:
if stype in support_types_for_grad:
create_test_class(globals(), XPUTestFlattenOp, stype)
if __name__ == "__main__":
unittest.main()
......@@ -23,18 +23,28 @@ import paddle
import paddle.fluid as fluid
from op_test import OpTest
from op_test_xpu import XPUOpTest
from xpu.get_test_cover_info import create_test_class, get_xpu_op_support_types, XPUOpTestWrapper
paddle.enable_static()
class TestFlattenOp(XPUOpTest):
class XPUTestFlattenOp(XPUOpTestWrapper):
def __init__(self):
self.op_name = 'flatten'
self.use_dynamic_create_class = False
class TestFlattenOp(XPUOpTest):
def setUp(self):
self.op_type = "flatten"
self.use_xpu = True
self.place = paddle.XPUPlace(0)
self.init_test_case()
self.inputs = {"X": np.random.random(self.in_shape).astype("float32")}
self.dtype = self.in_type
self.inputs = {
"X": np.random.random(self.in_shape).astype(self.dtype)
}
self.init_attrs()
self.outputs = {"Out": self.inputs["X"].reshape(self.new_shape)}
......@@ -52,16 +62,14 @@ class TestFlattenOp(XPUOpTest):
def init_attrs(self):
self.attrs = {"axis": self.axis}
class TestFlattenOp1(TestFlattenOp):
class TestFlattenOp1(TestFlattenOp):
def init_test_case(self):
self.in_shape = (3, 2, 2, 10)
self.axis = 0
self.new_shape = (1, 120)
class TestFlattenOpWithDefaultAxis(TestFlattenOp):
class TestFlattenOpWithDefaultAxis(TestFlattenOp):
def init_test_case(self):
self.in_shape = (10, 2, 2, 3)
......@@ -70,8 +78,7 @@ class TestFlattenOpWithDefaultAxis(TestFlattenOp):
def init_attrs(self):
self.attrs = {}
class TestFlattenOpSixDims(TestFlattenOp):
class TestFlattenOpSixDims(TestFlattenOp):
def init_test_case(self):
self.in_shape = (3, 2, 3, 2, 4, 4)
......@@ -79,5 +86,11 @@ class TestFlattenOpSixDims(TestFlattenOp):
self.new_shape = (36, 16)
support_types = get_xpu_op_support_types('flatten')
support_types_for_grad = get_xpu_op_support_types('mean')
for stype in support_types:
if stype in support_types_for_grad:
create_test_class(globals(), XPUTestFlattenOp, stype)
if __name__ == "__main__":
unittest.main()
......@@ -20,7 +20,9 @@ from functools import reduce
sys.path.append("..")
from op_test import OpTest
from op_test_xpu import XPUOpTest
from operator import mul
from xpu.get_test_cover_info import create_test_class, get_xpu_op_support_types, XPUOpTestWrapper
paddle.enable_static()
......@@ -42,19 +44,24 @@ def ref_layer_norm(x, scale, bias, epsilon, begin_norm_axis=1):
return y, mean, variance
@unittest.skipIf(not paddle.is_compiled_with_xpu(),
"core is not compiled with XPU")
class TestXPULayerNormOp(OpTest):
class XPUTestLayerNormOp(XPUOpTestWrapper):
def __init__(self):
self.op_name = 'layer_norm'
self.use_dynamic_create_class = False
class TestXPULayerNormOp(XPUOpTest):
def setUp(self):
self.op_type = "layer_norm"
self.dtype = np.float32
self.dtype = self.in_type
self.shape = [2, 3, 4, 5]
self.epsilon = 1e-05
self.begin_norm_axis = 1
self.set_attrs()
right = reduce(mul, self.shape[self.begin_norm_axis:len(self.shape)], 1)
right = reduce(mul,
self.shape[self.begin_norm_axis:len(self.shape)], 1)
np.random.seed(10)
x_np = np.random.uniform(0.1, 1, self.shape).astype(self.dtype)
scale_np = np.random.uniform(0.1, 1, [right]).astype(self.dtype)
......@@ -68,7 +75,10 @@ class TestXPULayerNormOp(OpTest):
'Mean': ref_mean_np,
'Variance': ref_variance_np
}
self.attrs = {'begin_norm_axis': self.begin_norm_axis, 'use_xpu': True}
self.attrs = {
'begin_norm_axis': self.begin_norm_axis,
'use_xpu': True
}
def set_attrs(self):
pass
......@@ -81,38 +91,30 @@ class TestXPULayerNormOp(OpTest):
'Y',
max_relative_error=0.02)
@unittest.skipIf(not paddle.is_compiled_with_xpu(),
"core is not compiled with XPU")
class TestXPULayerNormOpAxis2(TestXPULayerNormOp):
class TestXPULayerNormOpAxis2(TestXPULayerNormOp):
def set_attrs(self):
self.begin_norm_axis = 2
@unittest.skipIf(not paddle.is_compiled_with_xpu(),
"core is not compiled with XPU")
class TestXPULayerNormOpAxis3(TestXPULayerNormOp):
class TestXPULayerNormOpAxis3(TestXPULayerNormOp):
def set_attrs(self):
self.begin_norm_axis = 3
@unittest.skipIf(not paddle.is_compiled_with_xpu(),
"core is not compiled with XPU")
class TestXPULayerNormOp2D(TestXPULayerNormOp):
class TestXPULayerNormOp2D(TestXPULayerNormOp):
def set_attrs(self):
self.shape = [10, 12]
@unittest.skipIf(not paddle.is_compiled_with_xpu(),
"core is not compiled with XPU")
class TestXPULayerNormOp3D(TestXPULayerNormOp):
class TestXPULayerNormOp3D(TestXPULayerNormOp):
def set_attrs(self):
self.shape = [4, 5, 6]
support_types = get_xpu_op_support_types('layer_norm')
for stype in support_types:
create_test_class(globals(), XPUTestLayerNormOp, stype)
if __name__ == "__main__":
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册