Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
39c19665
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
39c19665
编写于
5月 28, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
5月 28, 2020
浏览文件
操作
浏览文件
下载
差异文件
!1488 add vm ops: Asin, AsinGrad, Asinh, AsinhGrad
Merge pull request !1488 from fangzehua/fzh_edit
上级
19ce0c37
c0b8a901
变更
10
隐藏空白更改
内联
并排
Showing
10 changed file
with
317 addition
and
23 deletion
+317
-23
mindspore/ops/_grad/grad_math_ops.py
mindspore/ops/_grad/grad_math_ops.py
+22
-0
mindspore/ops/_op_impl/tbe/__init__.py
mindspore/ops/_op_impl/tbe/__init__.py
+4
-0
mindspore/ops/_op_impl/tbe/asin.py
mindspore/ops/_op_impl/tbe/asin.py
+37
-0
mindspore/ops/_op_impl/tbe/asin_grad.py
mindspore/ops/_op_impl/tbe/asin_grad.py
+43
-0
mindspore/ops/_op_impl/tbe/asinh.py
mindspore/ops/_op_impl/tbe/asinh.py
+37
-0
mindspore/ops/_op_impl/tbe/asinh_grad.py
mindspore/ops/_op_impl/tbe/asinh_grad.py
+43
-0
mindspore/ops/operations/__init__.py
mindspore/ops/operations/__init__.py
+4
-1
mindspore/ops/operations/_grad_ops.py
mindspore/ops/operations/_grad_ops.py
+39
-0
mindspore/ops/operations/math_ops.py
mindspore/ops/operations/math_ops.py
+80
-22
tests/ut/python/ops/test_ops.py
tests/ut/python/ops/test_ops.py
+8
-0
未找到文件。
mindspore/ops/_grad/grad_math_ops.py
浏览文件 @
39c19665
...
...
@@ -770,6 +770,28 @@ def get_bprop_sin(self):
return
bprop
@
bprop_getters
.
register
(
P
.
Asin
)
def
get_bprop_asin
(
self
):
"""Grad definition for `Asin` operation."""
input_grad
=
G
.
AsinGrad
()
def
bprop
(
x
,
out
,
dout
):
dx
=
input_grad
(
x
,
dout
)
return
(
dx
,)
return
bprop
@
bprop_getters
.
register
(
P
.
Asinh
)
def
get_bprop_asinh
(
self
):
"""Grad definition for `Asinh` operation."""
input_grad
=
G
.
AsinhGrad
()
def
bprop
(
x
,
out
,
dout
):
dx
=
input_grad
(
out
,
dout
)
return
(
dx
,)
return
bprop
@
bprop_getters
.
register
(
P
.
Cos
)
def
get_bprop_cos
(
self
):
"""Grad definition for `Cos` operation."""
...
...
mindspore/ops/_op_impl/tbe/__init__.py
浏览文件 @
39c19665
...
...
@@ -208,3 +208,7 @@ from .bitwise_xor import bitwise_xor_op_info
from
.reduce_all
import
_reduce_all_tbe
from
.sparse_apply_adagrad
import
_sparse_apply_adagrad_tbe
from
.unsorted_segment_min
import
_unsorted_segment_min_tbe
from
.asin
import
_asin_tbe
from
.asin_grad
import
_asin_grad_tbe
from
.asinh
import
_asinh_tbe
from
.asinh_grad
import
_asinh_grad_tbe
mindspore/ops/_op_impl/tbe/asin.py
0 → 100644
浏览文件 @
39c19665
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Asin op"""
from
mindspore.ops.op_info_register
import
op_info_register
,
TBERegOp
,
DataType
asin_op_info
=
TBERegOp
(
"Asin"
)
\
.
fusion_type
(
"ELEMWISE"
)
\
.
async_flag
(
False
)
\
.
binfile_name
(
"asin.so"
)
\
.
compute_cost
(
10
)
\
.
kernel_name
(
"asin"
)
\
.
partial_flag
(
True
)
\
.
op_pattern
(
"formatAgnostic"
)
\
.
input
(
0
,
"x"
,
False
,
"required"
,
"all"
)
\
.
output
(
0
,
"y"
,
False
,
"required"
,
"all"
)
\
.
dtype_format
(
DataType
.
F16_5HD
,
DataType
.
F16_5HD
)
\
.
dtype_format
(
DataType
.
F32_5HD
,
DataType
.
F32_5HD
)
\
.
get_op_info
()
@
op_info_register
(
asin_op_info
)
def
_asin_tbe
():
"""Asin TBE register"""
return
mindspore/ops/_op_impl/tbe/asin_grad.py
0 → 100644
浏览文件 @
39c19665
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""AsinGrad op"""
from
mindspore.ops.op_info_register
import
op_info_register
,
TBERegOp
,
DataType
asin_grad_op_info
=
TBERegOp
(
"AsinGrad"
)
\
.
fusion_type
(
"ELEMWISE"
)
\
.
async_flag
(
False
)
\
.
binfile_name
(
"asin_grad.so"
)
\
.
compute_cost
(
10
)
\
.
kernel_name
(
"asin_grad"
)
\
.
partial_flag
(
True
)
\
.
input
(
0
,
"y"
,
None
,
"required"
,
"all"
)
\
.
input
(
1
,
"dy"
,
None
,
"required"
,
"all"
)
\
.
output
(
0
,
"z"
,
False
,
"required"
,
"all"
)
\
.
dtype_format
(
DataType
.
F16_5HD
,
DataType
.
F16_5HD
,
DataType
.
F16_5HD
)
\
.
dtype_format
(
DataType
.
F16_FracZ
,
DataType
.
F16_FracZ
,
DataType
.
F16_FracZ
)
\
.
dtype_format
(
DataType
.
F16_C1HWNCoC0
,
DataType
.
F16_C1HWNCoC0
,
DataType
.
F16_C1HWNCoC0
)
\
.
dtype_format
(
DataType
.
F16_Default
,
DataType
.
F16_Default
,
DataType
.
F16_Default
)
\
.
dtype_format
(
DataType
.
F32_5HD
,
DataType
.
F32_5HD
,
DataType
.
F32_5HD
)
\
.
dtype_format
(
DataType
.
F32_FracZ
,
DataType
.
F32_FracZ
,
DataType
.
F32_FracZ
)
\
.
dtype_format
(
DataType
.
F32_C1HWNCoC0
,
DataType
.
F32_C1HWNCoC0
,
DataType
.
F32_C1HWNCoC0
)
\
.
dtype_format
(
DataType
.
F32_Default
,
DataType
.
F32_Default
,
DataType
.
F32_Default
)
\
.
get_op_info
()
@
op_info_register
(
asin_grad_op_info
)
def
_asin_grad_tbe
():
"""AsinGrad TBE register"""
return
mindspore/ops/_op_impl/tbe/asinh.py
0 → 100644
浏览文件 @
39c19665
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Asin op"""
from
mindspore.ops.op_info_register
import
op_info_register
,
TBERegOp
,
DataType
asinh_op_info
=
TBERegOp
(
"Asinh"
)
\
.
fusion_type
(
"ELEMWISE"
)
\
.
async_flag
(
False
)
\
.
binfile_name
(
"asinh.so"
)
\
.
compute_cost
(
10
)
\
.
kernel_name
(
"asinh"
)
\
.
partial_flag
(
True
)
\
.
op_pattern
(
"formatAgnostic"
)
\
.
input
(
0
,
"x"
,
False
,
"required"
,
"all"
)
\
.
output
(
0
,
"y"
,
False
,
"required"
,
"all"
)
\
.
dtype_format
(
DataType
.
F16_5HD
,
DataType
.
F16_5HD
)
\
.
dtype_format
(
DataType
.
F32_5HD
,
DataType
.
F32_5HD
)
\
.
get_op_info
()
@
op_info_register
(
asinh_op_info
)
def
_asinh_tbe
():
"""Asinh TBE register"""
return
mindspore/ops/_op_impl/tbe/asinh_grad.py
0 → 100644
浏览文件 @
39c19665
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""AsinhGrad op"""
from
mindspore.ops.op_info_register
import
op_info_register
,
TBERegOp
,
DataType
asinh_grad_op_info
=
TBERegOp
(
"AsinhGrad"
)
\
.
fusion_type
(
"ELEMWISE"
)
\
.
async_flag
(
False
)
\
.
binfile_name
(
"asinh_grad.so"
)
\
.
compute_cost
(
10
)
\
.
kernel_name
(
"asinh_grad"
)
\
.
partial_flag
(
True
)
\
.
input
(
0
,
"y"
,
False
,
"required"
,
"all"
)
\
.
input
(
1
,
"dy"
,
False
,
"required"
,
"all"
)
\
.
output
(
0
,
"z"
,
False
,
"required"
,
"all"
)
\
.
dtype_format
(
DataType
.
F16_5HD
,
DataType
.
F16_5HD
,
DataType
.
F16_5HD
)
\
.
dtype_format
(
DataType
.
F16_FracZ
,
DataType
.
F16_FracZ
,
DataType
.
F16_FracZ
)
\
.
dtype_format
(
DataType
.
F16_C1HWNCoC0
,
DataType
.
F16_C1HWNCoC0
,
DataType
.
F16_C1HWNCoC0
)
\
.
dtype_format
(
DataType
.
F16_Default
,
DataType
.
F16_Default
,
DataType
.
F16_Default
)
\
.
dtype_format
(
DataType
.
F32_5HD
,
DataType
.
F32_5HD
,
DataType
.
F32_5HD
)
\
.
dtype_format
(
DataType
.
F32_FracZ
,
DataType
.
F32_FracZ
,
DataType
.
F32_FracZ
)
\
.
dtype_format
(
DataType
.
F32_C1HWNCoC0
,
DataType
.
F32_C1HWNCoC0
,
DataType
.
F32_C1HWNCoC0
)
\
.
dtype_format
(
DataType
.
F32_Default
,
DataType
.
F32_Default
,
DataType
.
F32_Default
)
\
.
get_op_info
()
@
op_info_register
(
asinh_grad_op_info
)
def
_asinh_grad_tbe
():
"""AsinhGrad TBE register"""
return
mindspore/ops/operations/__init__.py
浏览文件 @
39c19665
...
...
@@ -39,7 +39,8 @@ from .debug_ops import (ImageSummary, InsertGradientOf, HookBackward, ScalarSumm
TensorSummary
,
HistogramSummary
,
Print
)
from
.control_ops
import
ControlDepend
,
GeSwitch
,
Merge
from
.inner_ops
import
ScalarCast
from
.math_ops
import
(
Abs
,
ACos
,
AddN
,
AssignAdd
,
AssignSub
,
Atan2
,
BatchMatMul
,
BitwiseAnd
,
BitwiseOr
,
BitwiseXor
,
from
.math_ops
import
(
Abs
,
ACos
,
Asin
,
Asinh
,
AddN
,
AssignAdd
,
AssignSub
,
Atan2
,
BatchMatMul
,
BitwiseAnd
,
BitwiseOr
,
BitwiseXor
,
ReduceMax
,
ReduceMin
,
ReduceMean
,
ReduceSum
,
ReduceAll
,
ReduceProd
,
CumProd
,
Cos
,
Div
,
Equal
,
EqualCount
,
Exp
,
Erf
,
Erfc
,
Floor
,
FloorDiv
,
FloorMod
,
Acosh
,
Greater
,
GreaterEqual
,
Less
,
LessEqual
,
Log
,
Log1p
,
LogicalAnd
,
...
...
@@ -239,6 +240,7 @@ __all__ = [
'FloorDiv'
,
'FloorMod'
,
'Acosh'
,
'Asinh'
,
"PReLU"
,
"Cos"
,
"ACos"
,
...
...
@@ -249,6 +251,7 @@ __all__ = [
'AssignAdd'
,
'AssignSub'
,
"Sin"
,
"Asin"
,
"LSTM"
,
"Abs"
,
"BinaryCrossEntropy"
,
...
...
mindspore/ops/operations/_grad_ops.py
浏览文件 @
39c19665
...
...
@@ -76,6 +76,45 @@ class AcoshGrad(PrimitiveWithInfer):
return
x
class
AsinGrad
(
PrimitiveWithInfer
):
"""
Computes AsinGrad of input element-wise.
Returns:
Tensor, has the same type as input.
"""
@
prim_attr_register
def
__init__
(
self
):
"""Init AsinGrad"""
def
infer_shape
(
self
,
x
,
dout
):
validator
.
check
(
"x shape"
,
x
,
"dout shape"
,
dout
,
Rel
.
EQ
,
self
.
name
)
return
x
def
infer_dtype
(
self
,
x
,
dout
):
args
=
{
"x"
:
x
,
"dout"
:
dout
}
validator
.
check_tensor_type_same
(
args
,
mstype
.
number_type
,
self
.
name
)
return
x
class
AsinhGrad
(
PrimitiveWithInfer
):
"""Performs grad of Asinh operation."""
@
prim_attr_register
def
__init__
(
self
):
"""init AsinhGrad"""
def
infer_shape
(
self
,
x
,
dout
):
validator
.
check
(
"x shape"
,
x
,
"dout shape"
,
dout
,
Rel
.
EQ
,
self
.
name
)
return
x
def
infer_dtype
(
self
,
x
,
dout
):
args
=
{
"x"
:
x
,
"dout"
:
dout
}
validator
.
check_tensor_type_same
(
args
,
mstype
.
number_type
,
self
.
name
)
return
x
class
BatchNormGrad
(
PrimitiveWithInfer
):
"""Performs grad of BatchNorm operation."""
...
...
mindspore/ops/operations/math_ops.py
浏览文件 @
39c19665
...
...
@@ -1336,8 +1336,7 @@ class Acosh(PrimitiveWithInfer):
Compute inverse hyperbolic cosine of x element-wise.
Inputs:
- **input_x** (Tensor) - The shape of tensor is :math:`(x_1, x_2, ..., x_R)`,
and the data type of 'input_x' is number, the element in 'input_x' should be greater than or equal to 1.
- **input_x** (Tensor) - The shape of tensor is :math:`(x_1, x_2, ..., x_R)`.
Outputs:
Tensor, has the same shape as `input_x`.
...
...
@@ -1352,12 +1351,42 @@ class Acosh(PrimitiveWithInfer):
def
__init__
(
self
):
"""init Acosh"""
def
infer_shape
(
self
,
x
):
return
x
def
infer_shape
(
self
,
x_shape
):
return
x_shape
def
infer_dtype
(
self
,
x_dtype
):
validator
.
check_tensor_type_same
({
'x'
:
x_dtype
},
mstype
.
number_type
,
self
.
name
)
return
x_dtype
def
infer_dtype
(
self
,
x
):
validator
.
check_tensor_type_same
({
'x'
:
x
},
mstype
.
number_type
,
self
.
name
)
return
x
class
Asinh
(
PrimitiveWithInfer
):
"""
Compute inverse hyperbolic cosine of x element-wise.
Inputs:
- **input_x** (Tensor) - The shape of tensor is :math:`(x_1, x_2, ..., x_R)`.
Outputs:
Tensor, has the same shape as `input_x`.
Examples:
>>> asinh = P.Asinh()
>>> input_x = Tensor(np.array([-5.0, 1.5, 3.0, 100.0]), mindspore.float32)
>>> output = asinh(input_x)
[-2.3212, 1.1976, 1.8184, 5.2983]
"""
@
prim_attr_register
def
__init__
(
self
):
"""init Asinh"""
def
infer_shape
(
self
,
x_shape
):
return
x_shape
def
infer_dtype
(
self
,
x_dtype
):
validator
.
check_tensor_type_same
({
'x'
:
x_dtype
},
mstype
.
number_type
,
self
.
name
)
return
x_dtype
class
_LogicBinaryOp
(
_BinaryOp
):
...
...
@@ -1927,12 +1956,12 @@ class Cos(PrimitiveWithInfer):
def
__init__
(
self
):
"""init Cos"""
def
infer_shape
(
self
,
x
):
return
x
def
infer_shape
(
self
,
x
_shape
):
return
x
_shape
def
infer_dtype
(
self
,
x
):
validator
.
check_tensor_type_same
({
'x'
:
x
},
mstype
.
number_type
,
self
.
name
)
return
x
def
infer_dtype
(
self
,
x
_dtype
):
validator
.
check_tensor_type_same
({
'x'
:
x
_dtype
},
mstype
.
number_type
,
self
.
name
)
return
x
_dtype
class
ACos
(
PrimitiveWithInfer
):
...
...
@@ -1955,12 +1984,12 @@ class ACos(PrimitiveWithInfer):
def
__init__
(
self
):
"""init ACos"""
def
infer_shape
(
self
,
x
):
return
x
def
infer_shape
(
self
,
x
_shape
):
return
x
_shape
def
infer_dtype
(
self
,
x
):
validator
.
check_tensor_type_same
({
'x'
:
x
},
mstype
.
number_type
,
self
.
name
)
return
x
def
infer_dtype
(
self
,
x
_dtype
):
validator
.
check_tensor_type_same
({
'x'
:
x
_dtype
},
mstype
.
number_type
,
self
.
name
)
return
x
_dtype
class
Sin
(
PrimitiveWithInfer
):
...
...
@@ -1983,12 +2012,41 @@ class Sin(PrimitiveWithInfer):
def
__init__
(
self
):
"""Init Sin."""
def
infer_shape
(
self
,
x
):
return
x
def
infer_shape
(
self
,
x
_shape
):
return
x
_shape
def
infer_dtype
(
self
,
x
):
validator
.
check_tensor_type_same
({
'x'
:
x
},
mstype
.
number_type
,
self
.
name
)
return
x
def
infer_dtype
(
self
,
x_dtype
):
validator
.
check_tensor_type_same
({
'x'
:
x_dtype
},
mstype
.
number_type
,
self
.
name
)
return
x_dtype
class
Asin
(
PrimitiveWithInfer
):
"""
Computes arccosine of input element-wise.
Inputs:
- **input_x** (Tensor) - The shape of tensor is :math:`(x_1, x_2, ..., x_R)`.
Outputs:
Tensor, has the same shape as `input_x`.
Examples:
>>> asin = P.Asin()
>>> input_x = Tensor(np.array([0.74, 0.04, 0.30, 0.56]), mindspore.float32)
>>> output = asin(input_x)
[0.8331, 0.0400, 0.3047, 0.5944]
"""
@
prim_attr_register
def
__init__
(
self
):
"""init Asin"""
def
infer_shape
(
self
,
x_shape
):
return
x_shape
def
infer_dtype
(
self
,
x_dtype
):
validator
.
check_tensor_type_same
({
'x'
:
x_dtype
},
mstype
.
number_type
,
self
.
name
)
return
x_dtype
class
NMSWithMask
(
PrimitiveWithInfer
):
...
...
tests/ut/python/ops/test_ops.py
浏览文件 @
39c19665
...
...
@@ -369,6 +369,14 @@ test_case_math_ops = [
'block'
:
P
.
Sin
(),
'desc_inputs'
:
[[
2
,
3
]],
'desc_bprop'
:
[[
2
,
3
]]}),
(
'Asin'
,
{
'block'
:
P
.
Asin
(),
'desc_inputs'
:
[[
2
,
3
]],
'desc_bprop'
:
[[
2
,
3
]]}),
(
'Asinh'
,
{
'block'
:
P
.
Asinh
(),
'desc_inputs'
:
[[
3
,
4
,
5
]],
'desc_bprop'
:
[[
3
,
4
,
5
]]}),
(
'Reciprocal'
,
{
'block'
:
P
.
Reciprocal
(),
'desc_inputs'
:
[[
2
,
3
,
3
,
5
]],
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录