未验证 提交 42065ba3 编写于 作者: D Double_V 提交者: GitHub

fix activate_nn_grad, test=develop (#27555)

上级 b9d739a7
...@@ -78,15 +78,17 @@ class TestLeakyReluDoubleGradCheck(unittest.TestCase): ...@@ -78,15 +78,17 @@ class TestLeakyReluDoubleGradCheck(unittest.TestCase):
class TestELUDoubleGradCheck(unittest.TestCase): class TestELUDoubleGradCheck(unittest.TestCase):
@prog_scope() @prog_scope()
def func(self, place): def func(self, place):
shape = [2, 3, 7, 9] shape = [2, 3, 6, 6]
eps = 1e-6 eps = 1e-6
alpha = 1.1 alpha = 1.1
dtype = np.float64 dtype = np.float64
SEED = 0
x = layers.data('x', shape, False, dtype) x = layers.data('x', shape, False, dtype)
x.persistable = True x.persistable = True
y = layers.elu(x, alpha=alpha) y = layers.elu(x, alpha=alpha)
np.random.RandomState(SEED)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype) x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
gradient_checker.double_grad_check( gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps) [x], y, x_init=x_arr, place=place, eps=eps)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册