diff --git a/python/paddle/fluid/tests/unittests/test_activation_nn_grad.py b/python/paddle/fluid/tests/unittests/test_activation_nn_grad.py index e8b8a45fb677568947be82a1c77e6f2e7a17cdc1..c97cca654a7c47c1581c94a242eac9554bc87887 100644 --- a/python/paddle/fluid/tests/unittests/test_activation_nn_grad.py +++ b/python/paddle/fluid/tests/unittests/test_activation_nn_grad.py @@ -78,15 +78,17 @@ class TestLeakyReluDoubleGradCheck(unittest.TestCase): class TestELUDoubleGradCheck(unittest.TestCase): @prog_scope() def func(self, place): - shape = [2, 3, 7, 9] + shape = [2, 3, 6, 6] eps = 1e-6 alpha = 1.1 dtype = np.float64 + SEED = 0 x = layers.data('x', shape, False, dtype) x.persistable = True y = layers.elu(x, alpha=alpha) + np.random.RandomState(SEED) x_arr = np.random.uniform(-1, 1, shape).astype(dtype) gradient_checker.double_grad_check( [x], y, x_init=x_arr, place=place, eps=eps)