提交 cf38d93e 编写于 作者: Z zhouneng

为mindspore.nn包算子添加Examples

上级 2c85295e
......@@ -48,6 +48,11 @@ class Softmax(Cell):
Outputs:
Tensor, which has the same type and shape as `x` with values in the range[0,1].
Examples:
>>> input_x = Tensor(np.array([-1, -2, 0, 2, 1]), mindspore.float16)
>>> softmax = nn.Softmax()
>>> softmax(input_x)
[0.03168 0.01166 0.0861 0.636 0.2341]
"""
def __init__(self, axis=-1):
super(Softmax, self).__init__()
......@@ -78,6 +83,12 @@ class LogSoftmax(Cell):
Outputs:
Tensor, which has the same type and shape as the input as `x` with values in the range[-inf,0).
Examples:
>>> input_x = Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]]), mindspore.float32)
>>> log_softmax = nn.LogSoftmax()
>>> log_softmax(input_x)
[[-5.00672150e+00 -6.72150636e-03 -1.20067215e+01]
[-7.00091219e+00 -1.40009127e+01 -9.12250078e-04]]
"""
def __init__(self, axis=-1):
......@@ -134,6 +145,11 @@ class ReLU(Cell):
Outputs:
Tensor, with the same type and shape as the `input_data`.
Examples:
>>> input_x = Tensor(np.array([-1, 2, -3, 2, -1]), mindspore.float16)
>>> relu = nn.ReLU()
>>> relu(input_x)
[0. 2. 0. 2. 0.]
"""
def __init__(self):
super(ReLU, self).__init__()
......@@ -157,6 +173,11 @@ class ReLU6(Cell):
Outputs:
Tensor, which has the same type with `input_data`.
Examples:
>>> input_x = Tensor(np.array([-1, -2, 0, 2, 1]), mindspore.float16)
>>> relu6 = nn.ReLU6()
>>> relu6(input_x)
[0. 0. 0. 2. 1.]
"""
def __init__(self):
super(ReLU6, self).__init__()
......@@ -188,6 +209,12 @@ class LeakyReLU(Cell):
Outputs:
Tensor, has the same type and shape with the `input_x`.
Examples:
>>> input_x = Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]]), mindspore.float32)
>>> leaky_relu = nn.LeakyReLU()
>>> leaky_relu(input_x)
[[-0.2 4. -1.6]
[ 2 -1. 9.]]
"""
def __init__(self, alpha=0.2):
super(LeakyReLU, self).__init__()
......@@ -224,6 +251,11 @@ class Tanh(Cell):
Outputs:
Tensor, with the same type and shape as the `input_data`.
Examples:
>>> input_x = Tensor(np.array([1, 2, 3, 2, 1]), mindspore.float16)
>>> tanh = nn.Tanh()
>>> tanh(input_x)
[0.7617 0.964 0.995 0.964 0.7617]
"""
def __init__(self):
super(Tanh, self).__init__()
......@@ -249,6 +281,12 @@ class GELU(Cell):
Outputs:
Tensor, with the same type and shape as the `input_data`.
Examples:
>>> input_x = Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]]), mindspore.float32)
>>> gelu = nn.GELU()
>>> gelu(input_x)
[[-1.5880802e-01 3.9999299e+00 -3.1077917e-21]
[ 1.9545976e+00 -2.2918017e-07 9.0000000e+00]]
"""
def __init__(self):
super(GELU, self).__init__()
......@@ -273,6 +311,11 @@ class Sigmoid(Cell):
Outputs:
Tensor, with the same type and shape as the `input_data`.
Examples:
>>> input_x = Tensor(np.array([-1, -2, 0, 2, 1]), mindspore.float16)
>>> sigmoid = nn.Sigmoid()
>>> sigmoid(input_x)
[0.2688 0.11914 0.5 0.881 0.7305]
"""
def __init__(self):
super(Sigmoid, self).__init__()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册