diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index 5c6f8139ca938627f9cc97b84726a49c1e5515dc..a3abca8417db8374170b98c68467a3bf00a7a6b5 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -419,7 +419,7 @@ def leaky_relu(x, negative_slope=0.01, name=None): paddle.disable_static() - x = paddle.to_tensor(np.array([-2, 0, 1])) + x = paddle.to_tensor(np.array([-2, 0, 1], 'float32')) out = F.leaky_relu(x) # [-0.02, 0., 1.] """ @@ -466,11 +466,11 @@ def prelu(x, weight, name=None): paddle.disable_static() data = np.array([[[[-2.0, 3.0, -4.0, 5.0], - [ 3.0, -4.0, 5.0, -6.0], - [-7.0, -8.0, 8.0, 9.0]], - [[ 1.0, -2.0, -3.0, 4.0], - [-5.0, 6.0, 7.0, -8.0], - [ 6.0, 7.0, 8.0, 9.0]]]], 'float32') + [ 3.0, -4.0, 5.0, -6.0], + [-7.0, -8.0, 8.0, 9.0]], + [[ 1.0, -2.0, -3.0, 4.0], + [-5.0, 6.0, 7.0, -8.0], + [ 6.0, 7.0, 8.0, 9.0]]]], 'float32') x = paddle.to_tensor(data) w = paddle.to_tensor(np.array([0.25]).astype('float32')) out = F.prelu(x, w) diff --git a/python/paddle/nn/layer/activation.py b/python/paddle/nn/layer/activation.py index ed5913565e98db4eef13b2d313e258ad0617dbd9..dcf037a38d73de86b17819619ca04c684115d684 100644 --- a/python/paddle/nn/layer/activation.py +++ b/python/paddle/nn/layer/activation.py @@ -625,7 +625,7 @@ class LeakyReLU(layers.Layer): paddle.disable_static() m = paddle.nn.LeakyReLU() - x = paddle.to_tensor(np.array([-2, 0, 1])) + x = paddle.to_tensor(np.array([-2, 0, 1], 'float32')) out = m(x) # [-0.02, 0., 1.] """