未验证 提交 cd7b55a2 编写于 作者: Z zhupengyang 提交者: GitHub

LeakyRelu: refine doc, fix name bug (#25957)

上级 2214394e
...@@ -210,9 +210,6 @@ class ReLU(layers.Layer): ...@@ -210,9 +210,6 @@ class ReLU(layers.Layer):
class LeakyReLU(layers.Layer): class LeakyReLU(layers.Layer):
""" """
:alias_main: paddle.nn.LeakyReLU
:alias: paddle.nn.LeakyReLU,paddle.nn.layer.LeakyReLU,paddle.nn.layer.activation.LeakyReLU
Leaky ReLU Activation. Leaky ReLU Activation.
.. math: .. math:
...@@ -220,36 +217,35 @@ class LeakyReLU(layers.Layer): ...@@ -220,36 +217,35 @@ class LeakyReLU(layers.Layer):
out = max(x, alpha * x) out = max(x, alpha * x)
Parameters: Parameters:
alpha (float, optional): Slope of the activation function at x < 0. Default: 0.01. alpha (float, optional): Slope of the activation function at :math:`x < 0` .
inplace (bool, optional): If inplace is True, the input and output of Default: 0.01.
``LeakyReLU`` are the same variable. Otherwise, the input and output of name (str, optional): Name for the operation (optional, default is None).
``LeakyReLU`` are different variables. Default False. Note that if x is For more information, please refer to :ref:`api_guide_Name`.
more than one OPs' input, inplace must be False. Default: False.
Returns: Shape:
None - input: Tensor with any shape.
- output: Tensor with the same shape as input.
Examples: Examples:
.. code-block:: python .. code-block:: python
import paddle.fluid as fluid import paddle
import paddle.nn as nn
import numpy as np import numpy as np
data = np.array([-2, 0, 1]).astype('float32') paddle.enable_imperative()
lrelu = nn.LeakyReLU()
with fluid.dygraph.guard(): lrelu = paddle.nn.LeakyReLU()
data = fluid.dygraph.to_variable(data) x = paddle.imperative.to_variable(np.array([-2, 0, 1], 'float32'))
res = lrelu(data) # [-0.02, 0, 1] out = lrelu(x) # [-0.02, 0, 1]
""" """
def __init__(self, alpha=1e-2, inplace=False): def __init__(self, alpha=1e-2, name=None):
super(LeakyReLU, self).__init__() super(LeakyReLU, self).__init__()
self._alpha = alpha self._alpha = alpha
self._inplace = inplace self._name = name
def forward(self, input): def forward(self, x):
return functional.leaky_relu(input, self._alpha, self._inplace) return functional.leaky_relu(x, self._alpha, self._name)
class Sigmoid(layers.Layer): class Sigmoid(layers.Layer):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册