提交 d12976da 编写于 作者: J jiangjinsheng

add example for hsigmoid

上级 e6273ce3
......@@ -122,6 +122,11 @@ class ELU(Cell):
Outputs:
Tensor, with the same type and shape as the `input_data`.
Examples:
>>> input_x = Tensor(np.array([-1, -2, 0, 2, 1]), mindspore.float32)
>>> elu = nn.ELU()
>>> elu(input_x)
"""
def __init__(self, alpha=1.0):
super(ELU, self).__init__()
......@@ -343,11 +348,16 @@ class PReLU(Cell):
w (float): The initial value of w. Default: 0.25.
Inputs:
- **input_data** (Tensor) - The input of Tanh.
- **input_data** (Tensor) - The input of PReLU.
Outputs:
Tensor, with the same type and shape as the `input_data`.
Examples:
>>> input_x = Tensor(np.array([-1, -2, 0, 2, 1]), mindspore.float32)
>>> prelu = nn.PReLU()
>>> prelu(input_x)
"""
@cell_attr_register(attrs="")
def __init__(self, channel=1, w=0.25):
......@@ -394,6 +404,11 @@ class HSwish(Cell):
Outputs:
Tensor, with the same type and shape as the `input_data`.
Examples:
>>> input_x = Tensor(np.array([-1, -2, 0, 2, 1]), mindspore.float16)
>>> hswish = nn.HSwish()
>>> hswish(input_x)
"""
def __init__(self):
super(HSwish, self).__init__()
......@@ -422,6 +437,11 @@ class HSigmoid(Cell):
Outputs:
Tensor, with the same type and shape as the `input_data`.
Examples:
>>> input_x = Tensor(np.array([-1, -2, 0, 2, 1]), mindspore.float16)
>>> hsigmoid = nn.HSigmoid()
>>> hsigmoid(input_x)
"""
def __init__(self):
super(HSigmoid, self).__init__()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册