diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index 6acb806403ec782e664b9c173abbc29537fea3eb..2e399db2a9aba4edce5ebd42df83df16937a80d9 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -644,7 +644,11 @@ def selu(x, .. math:: - selu(x) = scale * (max(0,x) + min(0, alpha * (e^{x} - 1))) + selu(x)= scale * + \\begin{cases} + x, \\text{if } x > 0 \\\\ + alpha * e^{x} - alpha, \\text{if } x <= 0 + \\end{cases} Parameters: x (Tensor): The input Tensor with data type float32, float64. diff --git a/python/paddle/nn/layer/activation.py b/python/paddle/nn/layer/activation.py index d30547ffdbe357e5524573df631d2fe02a35eb37..6ce732d95addba1af10ae38506ba0969975ae95d 100644 --- a/python/paddle/nn/layer/activation.py +++ b/python/paddle/nn/layer/activation.py @@ -552,7 +552,11 @@ class SELU(layers.Layer): .. math:: - SELU(x) = scale * (max(0,x) + min(0, alpha * (e^{x} - 1))) + SELU(x)= scale * + \\begin{cases} + x, \\text{if } x > 0 \\\\ + alpha * e^{x} - alpha, \\text{if } x <= 0 + \\end{cases} Parameters: scale (float, optional): The value of scale for SELU. Default is 1.0507009873554804934193349852946