diff --git a/python/paddle/fluid/tests/unittests/test_selu_op.py b/python/paddle/fluid/tests/unittests/test_selu_op.py index 590ef11e9cb5de7414ff8745b719e3ffb4e044d8..b5a2e84a53ef621f3be81b90d02c10d28fe18162 100644 --- a/python/paddle/fluid/tests/unittests/test_selu_op.py +++ b/python/paddle/fluid/tests/unittests/test_selu_op.py @@ -130,6 +130,11 @@ class TestSeluAPI(unittest.TestCase): # The input dtype must be float16, float32, float64. x_int32 = paddle.data(name='x_int32', shape=[12, 10], dtype='int32') self.assertRaises(TypeError, F.selu, x_int32) + # The scale must be greater than 1.0 + x_fp32 = paddle.data(name='x_fp32', shape=[12, 10], dtype='float32') + self.assertRaises(ValueError, F.selu, x_fp32, -1.0) + # The alpha must be no less than 0 + self.assertRaises(ValueError, F.selu, x_fp32, 1.6, -1.0) # support the input dtype is float16 x_fp16 = paddle.data(name='x_fp16', shape=[12, 10], dtype='float16') F.selu(x_fp16) diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index 1e9a2e6eb7dd5ac2580478e8c276ee58096995eb..ffedb027330bda94db86dc0943a5c4a7281f254f 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -652,8 +652,8 @@ def selu(x, Parameters: x (Tensor): The input Tensor with data type float32, float64. - scale (float, optional): The value of scale for selu. Default is 1.0507009873554804934193349852946 - alpha (float, optional): The value of alpha for selu. Default is 1.6732632423543772848170429916717 + scale (float, optional): The value of scale(must be greater than 1.0) for selu. Default is 1.0507009873554804934193349852946 + alpha (float, optional): The value of alpha(must be no less than zero) for selu. Default is 1.6732632423543772848170429916717 name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`. @@ -672,6 +672,14 @@ def selu(x, x = paddle.to_tensor(np.array([[0.0, 1.0],[2.0, 3.0]])) out = F.selu(x) # [[0, 1.050701],[2.101402, 3.152103]] """ + if scale <= 1.0: + raise ValueError( + "The scale must be greater than 1.0. Received: {}.".format(scale)) + + if alpha < 0: + raise ValueError( + "The alpha must be no less than zero. Received: {}.".format(alpha)) + if in_dygraph_mode(): return core.ops.selu(x, 'scale', scale, 'alpha', alpha) diff --git a/python/paddle/nn/layer/activation.py b/python/paddle/nn/layer/activation.py index 3dc7bf7115412ad68d5f4ffb4487ba1c0224cd2b..c38d6018a2500111280a482aa60d072e65e27742 100644 --- a/python/paddle/nn/layer/activation.py +++ b/python/paddle/nn/layer/activation.py @@ -559,8 +559,8 @@ class SELU(layers.Layer): \\end{cases} Parameters: - scale (float, optional): The value of scale for SELU. Default is 1.0507009873554804934193349852946 - alpha (float, optional): The value of alpha for SELU. Default is 1.6732632423543772848170429916717 + scale (float, optional): The value of scale(must be greater than 1.0) for SELU. Default is 1.0507009873554804934193349852946 + alpha (float, optional): The value of alpha(must be no less than zero) for SELU. Default is 1.6732632423543772848170429916717 name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.