Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
2cd9649b
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
2cd9649b
编写于
7月 03, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
7月 03, 2020
浏览文件
操作
浏览文件
下载
差异文件
!2810 Add operator adapting in ME for Softsign
Merge pull request !2810 from zhangzheng/softsign
上级
391f5e44
1e51414f
变更
6
隐藏空白更改
内联
并排
Showing
6 changed file
with
94 addition
and
1 deletion
+94
-1
mindspore/ops/_grad/grad_nn_ops.py
mindspore/ops/_grad/grad_nn_ops.py
+15
-0
mindspore/ops/_op_impl/tbe/__init__.py
mindspore/ops/_op_impl/tbe/__init__.py
+1
-0
mindspore/ops/_op_impl/tbe/softsign.py
mindspore/ops/_op_impl/tbe/softsign.py
+37
-0
mindspore/ops/operations/__init__.py
mindspore/ops/operations/__init__.py
+2
-1
mindspore/ops/operations/nn_ops.py
mindspore/ops/operations/nn_ops.py
+35
-0
tests/ut/python/ops/test_ops.py
tests/ut/python/ops/test_ops.py
+4
-0
未找到文件。
mindspore/ops/_grad/grad_nn_ops.py
浏览文件 @
2cd9649b
...
...
@@ -336,6 +336,21 @@ def get_bprop_softplus(self):
return
bprop
@
bprop_getters
.
register
(
P
.
Softsign
)
def
get_bprop_softsign
(
self
):
"""Grad definition for `Softsign` operation."""
mul
=
P
.
Mul
()
absolute
=
P
.
Abs
()
div
=
P
.
Div
()
square
=
P
.
Square
()
def
bprop
(
x
,
out
,
dout
):
dx
=
mul
(
dout
,
div
(
1
,
square
(
1
+
absolute
(
x
))))
return
(
dx
,)
return
bprop
@
bprop_getters
.
register
(
P
.
Tanh
)
def
get_bprop_tanh
(
self
):
"""Grad definition for `Tanh` operation."""
...
...
mindspore/ops/_op_impl/tbe/__init__.py
浏览文件 @
2cd9649b
...
...
@@ -122,6 +122,7 @@ from .round import _round_tbe
from
.tanh
import
_tanh_tbe
from
.tanh_grad
import
_tanh_grad_tbe
from
.softmax
import
_softmax_tbe
from
.softsign
import
_softsign_tbe
from
.softplus
import
_softplus_tbe
from
.softplus_grad
import
_softplus_grad_tbe
from
.softmax_grad_ext
import
_softmax_grad_ext_tbe
...
...
mindspore/ops/_op_impl/tbe/softsign.py
0 → 100644
浏览文件 @
2cd9649b
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Softsign op"""
from
mindspore.ops.op_info_register
import
op_info_register
,
TBERegOp
,
DataType
softsign_op_info
=
TBERegOp
(
"Softsign"
)
\
.
fusion_type
(
"OPAQUE"
)
\
.
async_flag
(
False
)
\
.
binfile_name
(
"softsign.so"
)
\
.
compute_cost
(
10
)
\
.
kernel_name
(
"softsign"
)
\
.
partial_flag
(
True
)
\
.
op_pattern
(
"formatAgnostic"
)
\
.
input
(
0
,
"x"
,
False
,
"required"
,
"all"
)
\
.
output
(
0
,
"y"
,
False
,
"required"
,
"all"
)
\
.
dtype_format
(
DataType
.
F16_Default
,
DataType
.
F16_Default
)
\
.
dtype_format
(
DataType
.
F32_Default
,
DataType
.
F32_Default
)
\
.
get_op_info
()
@
op_info_register
(
softsign_op_info
)
def
_softsign_tbe
():
"""Softsign TBE register"""
return
mindspore/ops/operations/__init__.py
浏览文件 @
2cd9649b
...
...
@@ -68,7 +68,7 @@ from .nn_ops import (LSTM, SGD, Adam, SparseApplyAdam, SparseApplyLazyAdam, Appl
MaxPoolWithArgmax
,
OneHot
,
Pad
,
MirrorPad
,
PReLU
,
ReLU
,
ReLU6
,
ReLUV2
,
HSwish
,
HSigmoid
,
ResizeBilinear
,
Sigmoid
,
SigmoidCrossEntropyWithLogits
,
SmoothL1Loss
,
Softmax
,
Softplus
,
LRN
,
SmoothL1Loss
,
Softmax
,
Soft
sign
,
Soft
plus
,
LRN
,
SoftmaxCrossEntropyWithLogits
,
ROIAlign
,
SparseSoftmaxCrossEntropyWithLogits
,
Tanh
,
TopK
,
BinaryCrossEntropy
,
SparseApplyAdagrad
,
LARSUpdate
,
ApplyFtrl
,
SparseApplyFtrl
,
...
...
@@ -115,6 +115,7 @@ __all__ = [
'SparseApplyLazyAdam'
,
'Softplus'
,
'Softmax'
,
'Softsign'
,
'LogSoftmax'
,
'SoftmaxCrossEntropyWithLogits'
,
'ROIAlign'
,
...
...
mindspore/ops/operations/nn_ops.py
浏览文件 @
2cd9649b
...
...
@@ -224,6 +224,41 @@ class Softplus(PrimitiveWithInfer):
return
input_x
class
Softsign
(
PrimitiveWithInfer
):
r
"""
Softsign activation function.
The function is shown as follows:
.. math::
\text{output} = \frac{\text{input_x}}{1 + \abs{\text{input_x}}},
Inputs:
- **input_x** (Tensor) - The input tensor whose data type should be float.
Outputs:
Tensor, with the same type and shape as the `input_x`.
Examples:
>>> input_x = Tensor(np.array([0, -1, 2, 30, -30]), mindspore.float32)
>>> softsign = P.Softsign()
>>> softsign(input_x)
[0. -0.5 0.6666667 0.9677419 -0.9677419]
"""
@
prim_attr_register
def
__init__
(
self
):
"""init Softsign"""
self
.
init_prim_io_names
(
inputs
=
[
'x'
],
outputs
=
[
'output'
])
def
infer_shape
(
self
,
input_x
):
return
input_x
def
infer_dtype
(
self
,
input_x
):
validator
.
check_tensor_type_same
({
'input_x'
:
input_x
},
mstype
.
float_type
,
self
.
name
)
return
input_x
class
ReLU
(
PrimitiveWithInfer
):
r
"""
Computes ReLU(Rectified Linear Unit) of input tensor element-wise.
...
...
tests/ut/python/ops/test_ops.py
浏览文件 @
2cd9649b
...
...
@@ -1376,6 +1376,10 @@ test_case_nn_ops = [
'block'
:
P
.
Softmax
(),
'desc_inputs'
:
[[
5
,
5
]],
'desc_bprop'
:
[[
5
,
5
]]}),
(
'Softsign'
,
{
'block'
:
P
.
Softsign
(),
'desc_inputs'
:
[[
5
,
5
]],
'desc_bprop'
:
[[
5
,
5
]]}),
(
'DepthwiseConv2dNative_1'
,
{
'block'
:
P
.
DepthwiseConv2dNative
(
3
,
(
3
,
3
),
pad_mode
=
"pad"
,
pad
=
1
,
stride
=
2
),
'desc_inputs'
:
[[
10
,
32
,
32
,
32
],
[
1
,
32
,
3
,
3
]],
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录