From dbcef732d8eeb5b2e323480c87f3dd2e0f4c3d55 Mon Sep 17 00:00:00 2001 From: hong19860320 <9973393+hong19860320@users.noreply.github.com> Date: Wed, 26 Aug 2020 10:34:14 +0800 Subject: [PATCH] Fix the formula of SELU/selu (#26675) --- python/paddle/nn/functional/activation.py | 6 +++++- python/paddle/nn/layer/activation.py | 6 +++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index 6acb806403e..2e399db2a9a 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -644,7 +644,11 @@ def selu(x, .. math:: - selu(x) = scale * (max(0,x) + min(0, alpha * (e^{x} - 1))) + selu(x)= scale * + \\begin{cases} + x, \\text{if } x > 0 \\\\ + alpha * e^{x} - alpha, \\text{if } x <= 0 + \\end{cases} Parameters: x (Tensor): The input Tensor with data type float32, float64. diff --git a/python/paddle/nn/layer/activation.py b/python/paddle/nn/layer/activation.py index d30547ffdbe..6ce732d95ad 100644 --- a/python/paddle/nn/layer/activation.py +++ b/python/paddle/nn/layer/activation.py @@ -552,7 +552,11 @@ class SELU(layers.Layer): .. math:: - SELU(x) = scale * (max(0,x) + min(0, alpha * (e^{x} - 1))) + SELU(x)= scale * + \\begin{cases} + x, \\text{if } x > 0 \\\\ + alpha * e^{x} - alpha, \\text{if } x <= 0 + \\end{cases} Parameters: scale (float, optional): The value of scale for SELU. Default is 1.0507009873554804934193349852946 -- GitLab