未验证 提交 47af5c3c 编写于 作者: G Guanghua Yu 提交者: GitHub

fix smooth_l1_loss en docs (#29093)

上级 69510524
......@@ -469,14 +469,14 @@ def smooth_l1_loss(input, label, reduction='mean', delta=1.0, name=None):
.. math::
loss(x,y)=\\frac{1}{n}\\sum_{i}z_i
loss(x,y) = \\frac{1}{n}\\sum_{i}z_i
where z_i is given by:
.. math::
\\mathop{z_i}=\\left\\{\\begin{array}{rcl}
\\mathop{z_i} = \\left\\{\\begin{array}{rcl}
0.5(x_i - y_i)^2 & & {if |x_i - y_i| < delta} \\\\
delta * |x_i - y_i| - 0.5 * delta^2 & & {otherwise}
\\end{array} \\right.
......@@ -511,13 +511,12 @@ def smooth_l1_loss(input, label, reduction='mean', delta=1.0, name=None):
import paddle
import numpy as np
paddle.disable_static()
input_data = np.random.rand(3,3).astype("float32")
label_data = np.random.rand(3,3).astype("float32")
input = paddle.to_tensor(input_data)
label = paddle.to_tensor(label_data)
output = paddle.nn.functioanl.smooth_l1_loss(input, label)
print(output.numpy())
print(output)
"""
fluid.data_feeder.check_variable_and_dtype(
input, 'input', ['float32', 'float64'], 'smooth_l1_loss')
......
......@@ -971,13 +971,13 @@ class SmoothL1Loss(fluid.dygraph.Layer):
.. math::
loss(x,y)=\\frac{1}{n}\\sum_{i}z_i
loss(x,y) = \\frac{1}{n}\\sum_{i}z_i
where z_i is given by:
.. math::
\\mathop{z_i}=\\left\\{\\begin{array}{rcl}
\\mathop{z_i} = \\left\\{\\begin{array}{rcl}
0.5(x_i - y_i)^2 & & {if |x_i - y_i| < delta} \\\\
delta * |x_i - y_i| - 0.5 * delta^2 & & {otherwise}
\\end{array} \\right.
......@@ -1004,7 +1004,7 @@ class SmoothL1Loss(fluid.dygraph.Layer):
is the same as the shape of input.
Returns:
The tensor variable storing the smooth_l1_loss of input and label.
The tensor storing the smooth_l1_loss of input and label.
Return type: Tensor.
......@@ -1013,14 +1013,13 @@ class SmoothL1Loss(fluid.dygraph.Layer):
import paddle
import numpy as np
paddle.disable_static()
input_data = np.random.rand(3,3).astype("float32")
label_data = np.random.rand(3,3).astype("float32")
input = paddle.to_tensor(input_data)
label = paddle.to_tensor(label_data)
loss = paddle.nn.SmoothL1Loss()
output = loss(input, label)
print(output.numpy())
print(output)
"""
def __init__(self, reduction='mean', delta=1.0, name=None):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册