From 2de034e1468076e2b0c5d6eeab462dfc8afee048 Mon Sep 17 00:00:00 2001 From: Qi Li Date: Wed, 26 Aug 2020 09:21:13 +0800 Subject: [PATCH] fix prelu, test=develop (#26613) --- python/paddle/nn/layer/activation.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/python/paddle/nn/layer/activation.py b/python/paddle/nn/layer/activation.py index bb0bd5f70f1..d30547ffdbe 100644 --- a/python/paddle/nn/layer/activation.py +++ b/python/paddle/nn/layer/activation.py @@ -41,6 +41,7 @@ from ...fluid import core from ...fluid.framework import in_dygraph_mode from ...fluid.param_attr import ParamAttr from ...fluid.initializer import Constant +from paddle.framework import get_default_dtype from .. import functional as F @@ -423,7 +424,7 @@ class PReLU(layers.Layer): For more information, please refer to :ref:`api_guide_Name`. Shape: - - input: Tensor with any shape. + - input: Tensor with any shape. Default dtype is float32. - output: Tensor with the same shape as input. Examples: @@ -433,13 +434,14 @@ class PReLU(layers.Layer): import numpy as np paddle.disable_static() + paddle.set_default_dtype("float64") data = np.array([[[[-2.0, 3.0, -4.0, 5.0], [ 3.0, -4.0, 5.0, -6.0], [-7.0, -8.0, 8.0, 9.0]], [[ 1.0, -2.0, -3.0, 4.0], [-5.0, 6.0, 7.0, -8.0], - [ 6.0, 7.0, 8.0, 9.0]]]], 'float32') + [ 6.0, 7.0, 8.0, 9.0]]]], 'float64') x = paddle.to_tensor(data) m = paddle.nn.PReLU(1, 0.25) out = m(x) @@ -461,10 +463,10 @@ class PReLU(layers.Layer): self._weight = self.create_parameter( attr=self._weight_attr, - shape=[num_parameters], - dtype='float32', + shape=[self._num_parameters], + dtype=get_default_dtype(), is_bias=False, - default_initializer=Constant(init)) + default_initializer=Constant(self._init)) def forward(self, x): return F.prelu(x, self._weight) -- GitLab