未验证 提交 32ae8e81 编写于 作者: Z zhupengyang 提交者: GitHub

leaky_relu, log_softmax, hardshrink formula format (#26720)

上级 c2c68958
......@@ -168,13 +168,13 @@ def hardshrink(x, threshold=0.5, name=None):
.. math::
hardshrink(x)=
\left\{
\begin{aligned}
&x, & & if \ x > threshold \\
&x, & & if \ x < -threshold \\
&0, & & if \ others
\end{aligned}
\right.
\\left\\{
\\begin{aligned}
&x, & & if \\ x > threshold \\\\
&x, & & if \\ x < -threshold \\\\
&0, & & if \\ others
\\end{aligned}
\\right.
Args:
x (Tensor): The input Tensor with data type float32, float64.
......@@ -391,14 +391,14 @@ def leaky_relu(x, negative_slope=0.01, name=None):
"""
leaky_relu activation
.. math:
leaky_relu(x)=
\left\{
\begin{aligned}
&x, & & if \ x >= 0 \\
&negative\_slope * x, & & otherwise \\
\end{aligned}
\right. \\
.. math::
leaky\\_relu(x)=
\\left\\{
\\begin{aligned}
&x, & & if \\ x >= 0 \\\\
&negative\_slope * x, & & otherwise \\\\
\\end{aligned}
\\right. \\\\
Args:
x (Tensor): The input Tensor with data type float32, float64.
......@@ -1033,8 +1033,8 @@ def log_softmax(x, axis=-1, dtype=None, name=None):
.. math::
Out[i, j] = log(softmax(x))
= log(\frac{\exp(X[i, j])}{\sum_j(exp(X[i, j])})
log\\_softmax[i, j] = log(softmax(x))
= log(\\frac{\exp(X[i, j])}{\\sum_j(exp(X[i, j])})
Parameters:
x (Tensor): The input Tensor with data type float32, float64.
......
......@@ -144,13 +144,13 @@ class Hardshrink(layers.Layer):
.. math::
hardshrink(x)=
\left\{
\begin{aligned}
&x, & & if \ x > threshold \\
&x, & & if \ x < -threshold \\
&0, & & if \ others
\end{aligned}
\right.
\\left\\{
\\begin{aligned}
&x, & & if \\ x > threshold \\\\
&x, & & if \\ x < -threshold \\\\
&0, & & if \\ others
\\end{aligned}
\\right.
Parameters:
threshold (float, optional): The value of threshold for hardthrink. Default is 0.5
......@@ -598,15 +598,15 @@ class LeakyReLU(layers.Layer):
"""
Leaky ReLU Activation.
.. math:
.. math::
LeakyReLU(x)=
\left\{
\begin{aligned}
&x, & & if \ x >= 0 \\
&negative\_slope * x, & & otherwise \\
\end{aligned}
\right. \\
\\left\\{
\\begin{aligned}
&x, & & if \\ x >= 0 \\\\
&negative\_slope * x, & & otherwise \\\\
\\end{aligned}
\\right. \\\\
Parameters:
negative_slope (float, optional): Slope of the activation function at
......@@ -1015,7 +1015,7 @@ class LogSoftmax(layers.Layer):
.. math::
Out[i, j] = log(softmax(x))
= log(\frac{\exp(X[i, j])}{\sum_j(exp(X[i, j])})
= log(\\frac{\exp(X[i, j])}{\\sum_j(exp(X[i, j])})
Parameters:
axis (int, optional): The axis along which to perform log_softmax
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册