From a7db9acc2fe0a6c1427053f3f977551d923043c2 Mon Sep 17 00:00:00 2001 From: hong19860320 <9973393+hong19860320@users.noreply.github.com> Date: Fri, 28 Aug 2020 11:43:48 +0800 Subject: [PATCH] Add the constraint for the scale of SELU/selu (#26686) --- python/paddle/fluid/tests/unittests/test_selu_op.py | 5 +++++ python/paddle/nn/functional/activation.py | 12 ++++++++++-- python/paddle/nn/layer/activation.py | 4 ++-- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/test_selu_op.py b/python/paddle/fluid/tests/unittests/test_selu_op.py index 590ef11e9cb..b5a2e84a53e 100644 --- a/python/paddle/fluid/tests/unittests/test_selu_op.py +++ b/python/paddle/fluid/tests/unittests/test_selu_op.py @@ -130,6 +130,11 @@ class TestSeluAPI(unittest.TestCase): # The input dtype must be float16, float32, float64. x_int32 = paddle.data(name='x_int32', shape=[12, 10], dtype='int32') self.assertRaises(TypeError, F.selu, x_int32) + # The scale must be greater than 1.0 + x_fp32 = paddle.data(name='x_fp32', shape=[12, 10], dtype='float32') + self.assertRaises(ValueError, F.selu, x_fp32, -1.0) + # The alpha must be no less than 0 + self.assertRaises(ValueError, F.selu, x_fp32, 1.6, -1.0) # support the input dtype is float16 x_fp16 = paddle.data(name='x_fp16', shape=[12, 10], dtype='float16') F.selu(x_fp16) diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index 1e9a2e6eb7d..ffedb027330 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -652,8 +652,8 @@ def selu(x, Parameters: x (Tensor): The input Tensor with data type float32, float64. - scale (float, optional): The value of scale for selu. Default is 1.0507009873554804934193349852946 - alpha (float, optional): The value of alpha for selu. Default is 1.6732632423543772848170429916717 + scale (float, optional): The value of scale(must be greater than 1.0) for selu. Default is 1.0507009873554804934193349852946 + alpha (float, optional): The value of alpha(must be no less than zero) for selu. Default is 1.6732632423543772848170429916717 name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`. @@ -672,6 +672,14 @@ def selu(x, x = paddle.to_tensor(np.array([[0.0, 1.0],[2.0, 3.0]])) out = F.selu(x) # [[0, 1.050701],[2.101402, 3.152103]] """ + if scale <= 1.0: + raise ValueError( + "The scale must be greater than 1.0. Received: {}.".format(scale)) + + if alpha < 0: + raise ValueError( + "The alpha must be no less than zero. Received: {}.".format(alpha)) + if in_dygraph_mode(): return core.ops.selu(x, 'scale', scale, 'alpha', alpha) diff --git a/python/paddle/nn/layer/activation.py b/python/paddle/nn/layer/activation.py index 3dc7bf71154..c38d6018a25 100644 --- a/python/paddle/nn/layer/activation.py +++ b/python/paddle/nn/layer/activation.py @@ -559,8 +559,8 @@ class SELU(layers.Layer): \\end{cases} Parameters: - scale (float, optional): The value of scale for SELU. Default is 1.0507009873554804934193349852946 - alpha (float, optional): The value of alpha for SELU. Default is 1.6732632423543772848170429916717 + scale (float, optional): The value of scale(must be greater than 1.0) for SELU. Default is 1.0507009873554804934193349852946 + alpha (float, optional): The value of alpha(must be no less than zero) for SELU. Default is 1.6732632423543772848170429916717 name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`. -- GitLab