diff --git a/test/legacy_test/CMakeLists.txt b/test/legacy_test/CMakeLists.txt index 3e4984832ec0c476971ad91108b51aba63003180..d666116b6ad27907ffedb5fcddc76519ccbc1683 100644 --- a/test/legacy_test/CMakeLists.txt +++ b/test/legacy_test/CMakeLists.txt @@ -1189,7 +1189,13 @@ set(TEST_CINN_OPS test_tile_op test_roll_op test_sum_op - test_elementwise_min_op) + test_elementwise_min_op + test_arg_min_max_op + test_reverse_op + test_flip + test_triangular_solve_op + test_scatter_nd_op + test_strided_slice_op) foreach(TEST_CINN_OPS ${TEST_CINN_OPS}) if(WITH_CINN) diff --git a/test/legacy_test/test_arg_min_max_op.py b/test/legacy_test/test_arg_min_max_op.py index 769fd7eb5da02f0b1b4343f513cb32774f05baf8..fba469a0e333e88162964e46bdd850d7c38b566a 100644 --- a/test/legacy_test/test_arg_min_max_op.py +++ b/test/legacy_test/test_arg_min_max_op.py @@ -42,7 +42,7 @@ class BaseTestCase(OpTest): self.outputs = {'Out': np.argmax(self.x, axis=self.axis)} def test_check_output(self): - self.check_output() + self.check_output(check_cinn=True) class TestCase0(BaseTestCase): diff --git a/test/legacy_test/test_flip.py b/test/legacy_test/test_flip.py index a06ef10ca061304b1354eca03f9b013e450e2227..e899511b150c1e5cdfa15cab23db0a30c2ff6918 100644 --- a/test/legacy_test/test_flip.py +++ b/test/legacy_test/test_flip.py @@ -100,10 +100,10 @@ class TestFlipOp(OpTest): self.attrs = {"axis": self.axis} def test_check_output(self): - self.check_output() + self.check_output(check_cinn=True) def test_check_grad(self): - self.check_grad(["X"], "Out") + self.check_grad(["X"], "Out", check_cinn=True) def init_test_case(self): self.in_shape = (6, 4, 2, 3) @@ -167,12 +167,12 @@ def create_test_fp16_class(parent): if core.is_compiled_with_cuda(): place = core.CUDAPlace(0) if core.is_float16_supported(place): - self.check_output_with_place(place) + self.check_output_with_place(place, check_cinn=True) def test_check_grad(self): place = core.CUDAPlace(0) if core.is_float16_supported(place): - self.check_grad_with_place(place, ["X"], "Out") + self.check_grad_with_place(place, ["X"], "Out", check_cinn=True) cls_name = "{}_{}".format(parent.__name__, "FP16OP") TestFlipFP16.__name__ = cls_name diff --git a/test/legacy_test/test_reverse_op.py b/test/legacy_test/test_reverse_op.py index 2b19496d582b23c02f04de853771e82a9041ef3a..91e90db95468c8dc6680b305522cd292d5c8520f 100644 --- a/test/legacy_test/test_reverse_op.py +++ b/test/legacy_test/test_reverse_op.py @@ -37,10 +37,10 @@ class TestReverseOp(OpTest): self.outputs = {'Out': out} def test_check_output(self): - self.check_output() + self.check_output(check_cinn=True) def test_check_grad(self): - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_cinn=True) class TestCase0(TestReverseOp): diff --git a/test/legacy_test/test_scatter_nd_op.py b/test/legacy_test/test_scatter_nd_op.py index 66799466c59e4b6356fc33742b281c2722a45edc..ee6a2423e0d2002a66a4c69df2cd790bd1a881b9 100644 --- a/test/legacy_test/test_scatter_nd_op.py +++ b/test/legacy_test/test_scatter_nd_op.py @@ -93,7 +93,7 @@ class TestScatterNdAddSimpleOp(OpTest): self.dtype = np.float64 def test_check_output(self): - self.check_output() + self.check_output(check_cinn=True) def test_check_grad(self): self.check_grad(['X', 'Updates'], 'Out', check_prim=True) @@ -169,7 +169,7 @@ class TestScatterNdAddWithEmptyIndex(OpTest): self.dtype = np.float64 def test_check_output(self): - self.check_output() + self.check_output(check_cinn=True) def test_check_grad(self): self.check_grad(['X', 'Updates'], 'Out', check_prim=True) @@ -248,7 +248,7 @@ class TestScatterNdAddWithHighRankSame(OpTest): self.dtype = np.float64 def test_check_output(self): - self.check_output() + self.check_output(check_cinn=True) def test_check_grad(self): self.check_grad(['X', 'Updates'], 'Out', check_prim=True) @@ -311,7 +311,7 @@ class TestScatterNdAddWithHighRankDiff(OpTest): self.outputs = {'Out': expect_np} def test_check_output(self): - self.check_output() + self.check_output(check_cinn=True) def test_check_grad(self): self.check_grad(['X', 'Updates'], 'Out', check_prim=True) diff --git a/test/legacy_test/test_strided_slice_op.py b/test/legacy_test/test_strided_slice_op.py index 304066687814f86fd44e46b45aadc529295fd320..85d71e63a579acf1b5109065b274cf54b72d2b31 100644 --- a/test/legacy_test/test_strided_slice_op.py +++ b/test/legacy_test/test_strided_slice_op.py @@ -96,10 +96,10 @@ class TestStrideSliceOp(OpTest): } def test_check_output(self): - self.check_output() + self.check_output(check_cinn=True) def test_check_grad(self): - self.check_grad({'Input'}, 'Out') + self.check_grad({'Input'}, 'Out', check_cinn=True) def initTestCase(self): self.input = np.random.rand(100) @@ -1032,10 +1032,10 @@ class TestStrideSliceFP16Op(OpTest): } def test_check_output(self): - self.check_output() + self.check_output(check_cinn=True) def test_check_grad(self): - self.check_grad({'Input'}, 'Out') + self.check_grad({'Input'}, 'Out', check_cinn=True) def initTestCase(self): self.input = np.random.rand(100) diff --git a/test/legacy_test/test_triangular_solve_op.py b/test/legacy_test/test_triangular_solve_op.py index 520f7baf2580f94b8fcf4e8f8e7673a3c6d6761c..23cb650b14bbe34dd0cd46dbf084dd2fe64140f3 100644 --- a/test/legacy_test/test_triangular_solve_op.py +++ b/test/legacy_test/test_triangular_solve_op.py @@ -64,10 +64,10 @@ class TestTriangularSolveOp(OpTest): self.outputs = {'Out': self.output} def test_check_output(self): - self.check_output() + self.check_output(check_cinn=True) def test_check_grad_normal(self): - self.check_grad(['X', 'Y'], 'Out') + self.check_grad(['X', 'Y'], 'Out', check_cinn=True) # 2D(broadcast) + 3D, test 'transpose'