From 215804b8cb4cbfd1a469e32a6450a43d65538ed3 Mon Sep 17 00:00:00 2001 From: zhupengyang Date: Sun, 23 Aug 2020 14:48:02 +0800 Subject: [PATCH] leaky_relu and LeakyReLU doc (#2373) --- doc/fluid/api/nn/leaky_relu.rst | 8 +++- .../nn_cn/activation_cn/LeakyReLU_cn.rst | 22 +++++++---- doc/fluid/api_cn/nn_cn/hardshrink_cn.rst | 2 +- doc/fluid/api_cn/nn_cn/leaky_relu_cn.rst | 38 ++++++++++++++++++- 4 files changed, 58 insertions(+), 12 deletions(-) diff --git a/doc/fluid/api/nn/leaky_relu.rst b/doc/fluid/api/nn/leaky_relu.rst index 2a63e375c..cf38cdb92 100644 --- a/doc/fluid/api/nn/leaky_relu.rst +++ b/doc/fluid/api/nn/leaky_relu.rst @@ -1,7 +1,11 @@ +.. THIS FILE IS GENERATED BY `gen_doc.{py|sh}` + !DO NOT EDIT THIS FILE MANUALLY! + .. _api_nn_leaky_relu: leaky_relu -------------------------------- -:doc_source: paddle.fluid.layers.leaky_relu +---------- +.. autofunction:: paddle.nn.functional.leaky_relu + :noindex: diff --git a/doc/fluid/api_cn/nn_cn/activation_cn/LeakyReLU_cn.rst b/doc/fluid/api_cn/nn_cn/activation_cn/LeakyReLU_cn.rst index 9b9c61818..62d710903 100644 --- a/doc/fluid/api_cn/nn_cn/activation_cn/LeakyReLU_cn.rst +++ b/doc/fluid/api_cn/nn_cn/activation_cn/LeakyReLU_cn.rst @@ -2,19 +2,25 @@ LeakyReLU ------------------------------- -.. py:class:: paddle.nn.LeakyReLU(alpha=0.01, name=None) +.. py:class:: paddle.nn.LeakyReLU(negative_slope=0.01, name=None) -ReLU (Rectified Linear Unit)激活层 +LeakyReLU 激活层 .. math:: - \\Out = max(x, alpha*x)\\ + LeakyReLU(x)= + \left\{ + \begin{aligned} + &x, & & if \ x >= 0 \\ + &negative\_slope * x, & & otherwise \\ + \end{aligned} + \right. \\ 其中,:math:`x` 为输入的 Tensor 参数 :::::::::: - - alpha (float,可选) - :math:`x < 0` 时的斜率。默认值为0.01。 + - negative_slope (float,可选) - :math:`x < 0` 时的斜率。默认值为0.01。 - name (str, 可选) - 操作的名称(可选,默认值为None)。更多信息请参见 :ref:`api_guide_Name`。 形状: @@ -29,8 +35,8 @@ ReLU (Rectified Linear Unit)激活层 import paddle import numpy as np - paddle.enable_imperative() + paddle.disable_static() - lrelu = paddle.nn.LeakyReLU() - x = paddle.imperative.to_variable(np.array([-2, 0, 1], 'float32')) - out = lrelu(x) # [-0.02, 0, 1] + m = paddle.nn.LeakyReLU() + x = paddle.to_tensor(np.array([-2, 0, 1], 'float32')) + out = m(x) # [-0.02, 0., 1.] diff --git a/doc/fluid/api_cn/nn_cn/hardshrink_cn.rst b/doc/fluid/api_cn/nn_cn/hardshrink_cn.rst index cb837884b..66c492e74 100644 --- a/doc/fluid/api_cn/nn_cn/hardshrink_cn.rst +++ b/doc/fluid/api_cn/nn_cn/hardshrink_cn.rst @@ -2,7 +2,7 @@ hardshrink ------------------------------- -.. py:functional:: paddle.nn.functional.hardshrink(x, threshold=0.5, name=None) +.. py:function:: paddle.nn.functional.hardshrink(x, threshold=0.5, name=None) hardshrink激活层。计算公式如下: diff --git a/doc/fluid/api_cn/nn_cn/leaky_relu_cn.rst b/doc/fluid/api_cn/nn_cn/leaky_relu_cn.rst index 11eff1925..a0bb19d6e 100644 --- a/doc/fluid/api_cn/nn_cn/leaky_relu_cn.rst +++ b/doc/fluid/api_cn/nn_cn/leaky_relu_cn.rst @@ -2,6 +2,42 @@ leaky_relu ------------------------------- -:doc_source: paddle.fluid.layers.leaky_relu +.. py:function:: paddle.nn.functional.leaky_relu(x, negative_slope=0.01, name=None) +leaky_relu激活层。计算公式如下: +.. math:: + + LeakyReLU(x)= + \left\{ + \begin{aligned} + &x, & & if \ x >= 0 \\ + &negative\_slope * x, & & otherwise \\ + \end{aligned} + \right. \\ + +其中,:math:`x` 为输入的 Tensor + +参数 +:::::::::: + - x (Tensor) - 输入的Tensor,数据类型为:float32、float64。 + - negative_slope (float,可选) - :math:`x < 0` 时的斜率。默认值为0.01。 + - name (str, 可选) - 操作的名称(可选,默认值为None)。更多信息请参见 :ref:`api_guide_Name`。 + +返回 +:::::::::: + ``Tensor`` ,数据类型和形状同 ``x`` 一致。 + +代码示例 +:::::::::: + +.. code-block:: python + + import paddle + import paddle.nn.functional as F + import numpy as np + + paddle.disable_static() + + x = paddle.to_tensor(np.array([-2, 0, 1], 'float32')) + out = F.leaky_relu(x) # [-0.02, 0., 1.] -- GitLab