diff --git a/python/paddle/fluid/tests/unittests/test_fusion_gru_op.py b/python/paddle/fluid/tests/unittests/test_fusion_gru_op.py index 764f83b534c8a183dbf21511f0b05741c13c9528..36ebc8fb6ea9efdcd1807f5c8917ab1428b3381e 100644 --- a/python/paddle/fluid/tests/unittests/test_fusion_gru_op.py +++ b/python/paddle/fluid/tests/unittests/test_fusion_gru_op.py @@ -37,7 +37,7 @@ def fusion_gru( h0, wh, np.zeros( - (1, wh.shape[1]), dtype='float64'), + (1, wh.shape[1]), dtype='float32'), is_reverse, act_state, act_gate) @@ -62,15 +62,15 @@ class TestFusionGRUOp(OpTest): T = sum(self.lod[0]) N = len(self.lod[0]) - x = np.random.rand(T, self.M).astype('float64') - wx = np.random.rand(self.M, 3 * self.D).astype('float64') - wh = np.random.rand(self.D, 3 * self.D).astype('float64') + x = np.random.rand(T, self.M).astype('float32') + wx = np.random.rand(self.M, 3 * self.D).astype('float32') + wh = np.random.rand(self.D, 3 * self.D).astype('float32') bias = np.random.rand( - 1, 3 * self.D).astype('float64') if self.with_bias else np.zeros( - (1, 3 * self.D), dtype='float64') + 1, 3 * self.D).astype('float32') if self.with_bias else np.zeros( + (1, 3 * self.D), dtype='float32') h0 = np.random.rand( - N, self.D).astype('float64') if self.with_h0 else np.zeros( - (N, self.D), dtype='float64') + N, self.D).astype('float32') if self.with_h0 else np.zeros( + (N, self.D), dtype='float32') _, _, _, hidden = fusion_gru( x, self.lod, h0, wx, wh, bias, self.is_reverse, @@ -93,7 +93,9 @@ class TestFusionGRUOp(OpTest): } def test_check_output(self): - self.check_output(atol=1e-8) + for use_seq in {True, False}: + self.attrs['use_seq'] = use_seq + self.check_output() class TestFusionGRUOpNoInitial(TestFusionGRUOp): diff --git a/python/paddle/fluid/tests/unittests/test_fusion_lstm_op.py b/python/paddle/fluid/tests/unittests/test_fusion_lstm_op.py index 5805bdf461998e90611dec05b079cd55feda520d..1f1eb37667e304351a6a85edde09e7da32cf1630 100644 --- a/python/paddle/fluid/tests/unittests/test_fusion_lstm_op.py +++ b/python/paddle/fluid/tests/unittests/test_fusion_lstm_op.py @@ -114,7 +114,9 @@ class TestFusionLSTMOp(OpTest): } def test_check_output(self): - self.check_output() + for use_seq in {True, False}: + self.attrs['use_seq'] = use_seq + self.check_output() class TestFusionLSTMOpInit(TestFusionLSTMOp):