From 1fb4d90b710090f38ebcda82d6dae4559642737d Mon Sep 17 00:00:00 2001 From: Dandelight <55911877+Dandelight@users.noreply.github.com> Date: Fri, 18 Nov 2022 14:25:31 +0800 Subject: [PATCH] Add description to `nn.functional.celu` (#48074) --- python/paddle/nn/functional/activation.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index f8eb9d35d5..af5fa1336f 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -35,17 +35,19 @@ def celu(x, alpha=1.0, name=None): r""" celu activation. + Apply the following operation to each element of the input Tensor accroding to the `Continuously Differentiable Exponential Linear Units `_. + .. math:: - celu(x) = max(0, x) + min(0, \alpha * (e^{x/\alpha}-1)) + \operatorname{celu}(x) = \max(0, x) + \min(0, \alpha * (\mathrm{e}^{x/\alpha}-1)) Parameters: - x (Tensor): The input Tensor with data type float32, float64. - alpha (float, optional): The 'alpha' value of the CELU formulation. Default is 1.0. + x (Tensor): The input Tensor with data type float16, float32, or float64. + alpha (float, optional): The 'alpha' value of the CELU formula. Default is 1.0. name (str, optional): For details, please refer to :ref:`api_guide_Name`. Generally, no setting is required. Default: None. Returns: - A Tensor with the same data type and shape as ``x`` . + A ``Tensor`` with the same data type and shape as ``x`` . Examples: .. code-block:: python -- GitLab