Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
183144e1
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
183144e1
编写于
5月 20, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
5月 20, 2020
浏览文件
操作
浏览文件
下载
差异文件
!1282 support vm for PReLU and PReLUGrad
Merge pull request !1282 from jiangjinsheng/vm_prelu
上级
bca2b1a0
ce09f5e1
变更
5
隐藏空白更改
内联
并排
Showing
5 changed file
with
121 addition
and
0 deletion
+121
-0
mindspore/ccsrc/kernel/tbe/tbe_adapter.cc
mindspore/ccsrc/kernel/tbe/tbe_adapter.cc
+2
-0
mindspore/ops/_op_impl/tbe/__init__.py
mindspore/ops/_op_impl/tbe/__init__.py
+2
-0
mindspore/ops/_op_impl/tbe/prelu.py
mindspore/ops/_op_impl/tbe/prelu.py
+41
-0
mindspore/ops/_op_impl/tbe/prelu_grad.py
mindspore/ops/_op_impl/tbe/prelu_grad.py
+43
-0
tests/ut/python/ops/test_nn_ops.py
tests/ut/python/ops/test_nn_ops.py
+33
-0
未找到文件。
mindspore/ccsrc/kernel/tbe/tbe_adapter.cc
浏览文件 @
183144e1
...
...
@@ -36,6 +36,8 @@ static std::map<string, string> tbe_func_adapter_map = {
{
"re_lu6_grad"
,
"relu6_grad"
},
{
"re_lu"
,
"relu"
},
{
"re_luv2"
,
"relu_v2"
},
{
"p_re_lu"
,
"prelu"
},
{
"p_re_lu_grad"
,
"prelu_grad"
},
{
"tensor_add"
,
"add"
},
{
"reduce_mean"
,
"reduce_mean_d"
},
{
"reduce_max"
,
"reduce_max_d"
},
...
...
mindspore/ops/_op_impl/tbe/__init__.py
浏览文件 @
183144e1
...
...
@@ -186,3 +186,5 @@ from .bn_training_update_v2 import _bn_training_update_v2_tbe
from
.square_sum_all
import
square_sum_all_op_info
from
.pack
import
_pack_tbe
from
.unpack
import
_unpack_tbe
from
.prelu
import
_prelu_tbe
from
.prelu_grad
import
_prelu_grad_tbe
mindspore/ops/_op_impl/tbe/prelu.py
0 → 100644
浏览文件 @
183144e1
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""PReLU op"""
from
mindspore.ops.op_info_register
import
op_info_register
,
TBERegOp
,
DataType
prelu_op_info
=
TBERegOp
(
"PReLU"
)
\
.
fusion_type
(
"ELEMWISE"
)
\
.
async_flag
(
False
)
\
.
binfile_name
(
"prelu.so"
)
\
.
compute_cost
(
10
)
\
.
kernel_name
(
"prelu"
)
\
.
partial_flag
(
True
)
\
.
input
(
0
,
"x"
,
False
,
"required"
,
"all"
)
\
.
input
(
1
,
"weight"
,
False
,
"required"
,
"all"
)
\
.
output
(
0
,
"y"
,
False
,
"required"
,
"all"
)
\
.
dtype_format
(
DataType
.
F16_NCHW
,
DataType
.
F16_Default
,
DataType
.
F16_NCHW
)
\
.
dtype_format
(
DataType
.
F16_5HD
,
DataType
.
F16_5HD
,
DataType
.
F16_5HD
)
\
.
dtype_format
(
DataType
.
F16_Default
,
DataType
.
F16_Default
,
DataType
.
F16_Default
)
\
.
dtype_format
(
DataType
.
F32_NCHW
,
DataType
.
F32_Default
,
DataType
.
F32_NCHW
)
\
.
dtype_format
(
DataType
.
F32_5HD
,
DataType
.
F32_5HD
,
DataType
.
F32_5HD
)
\
.
dtype_format
(
DataType
.
F32_Default
,
DataType
.
F32_Default
,
DataType
.
F32_Default
)
\
.
get_op_info
()
@
op_info_register
(
prelu_op_info
)
def
_prelu_tbe
():
"""PReLU TBE register"""
return
mindspore/ops/_op_impl/tbe/prelu_grad.py
0 → 100644
浏览文件 @
183144e1
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""PReLUGrad op"""
from
mindspore.ops.op_info_register
import
op_info_register
,
TBERegOp
,
DataType
prelu_grad_op_info
=
TBERegOp
(
"PReLUGrad"
)
\
.
fusion_type
(
"ELEMWISE"
)
\
.
async_flag
(
False
)
\
.
binfile_name
(
"prelu_grad.so"
)
\
.
compute_cost
(
10
)
\
.
kernel_name
(
"prelu_grad"
)
\
.
partial_flag
(
True
)
\
.
input
(
0
,
"grads"
,
False
,
"required"
,
"all"
)
\
.
input
(
1
,
"features"
,
False
,
"required"
,
"all"
)
\
.
input
(
2
,
"weights"
,
False
,
"required"
,
"all"
)
\
.
output
(
0
,
"dx"
,
False
,
"required"
,
"all"
)
\
.
output
(
0
,
"da"
,
False
,
"required"
,
"all"
)
\
.
dtype_format
(
DataType
.
F32_NCHW
,
DataType
.
F32_NCHW
,
DataType
.
F32_Default
,
DataType
.
F32_NCHW
,
DataType
.
F32_Default
)
\
.
dtype_format
(
DataType
.
F32_5HD
,
DataType
.
F32_5HD
,
DataType
.
F32_5HD
,
DataType
.
F32_5HD
,
DataType
.
F32_5HD
)
\
.
dtype_format
(
DataType
.
F32_Default
,
DataType
.
F32_Default
,
DataType
.
F32_Default
,
DataType
.
F32_Default
,
DataType
.
F32_Default
)
\
.
get_op_info
()
@
op_info_register
(
prelu_grad_op_info
)
def
_prelu_grad_tbe
():
"""PReLUGrad TBE register"""
return
tests/ut/python/ops/test_nn_ops.py
浏览文件 @
183144e1
...
...
@@ -24,6 +24,7 @@ from mindspore.common.initializer import initializer
from
mindspore.ops
import
Primitive
from
mindspore.ops
import
composite
as
C
from
mindspore.ops
import
operations
as
P
from
mindspore.ops.operations
import
_grad_ops
as
G
from
mindspore.ops
import
prim_attr_register
,
PrimitiveWithInfer
from
..ut_filter
import
non_graph_engine
from
....mindspore_test_framework.mindspore_test
import
mindspore_test
...
...
@@ -456,6 +457,28 @@ class FlattenNet(nn.Cell):
return
self
.
flatten
(
x
)
class
PReLUNet
(
nn
.
Cell
):
""" PReLUNet definition """
def
__init__
(
self
):
super
(
PReLUNet
,
self
).
__init__
()
self
.
prelu
=
P
.
PReLU
()
self
.
w
=
Tensor
(
np
.
ones
(
3
,
np
.
float32
))
def
construct
(
self
,
x
):
return
self
.
prelu
(
x
,
self
.
w
)
class
PReLUGradNet
(
nn
.
Cell
):
""" PReLUGradNet definition """
def
__init__
(
self
):
super
(
PReLUGradNet
,
self
).
__init__
()
self
.
prelu_grad
=
G
.
PReLUGrad
()
def
construct
(
self
,
dout
,
x
,
w
):
return
self
.
prelu_grad
(
dout
,
x
,
w
)
test_cases
=
[
(
'SoftMaxGrad'
,
{
'block'
:
SoftMaxGrad
(
VirtualNetWithLoss
(
P
.
Softmax
())),
...
...
@@ -545,6 +568,16 @@ test_cases = [
'block'
:
FlattenNet
(),
'desc_inputs'
:
[
Tensor
(
np
.
ones
([
1
,
2
,
3
,
4
],
np
.
float32
))],
}),
(
'PReLUNet'
,
{
'block'
:
PReLUNet
(),
'desc_inputs'
:
[
Tensor
(
np
.
ones
([
1
,
3
,
4
,
4
],
np
.
float32
))],
}),
(
'PReLUGradNet'
,
{
'block'
:
PReLUGradNet
(),
'desc_inputs'
:
[
Tensor
(
np
.
ones
([
1
,
3
,
4
,
4
],
np
.
float32
)),
Tensor
(
np
.
ones
([
1
,
3
,
4
,
4
],
np
.
float32
)),
Tensor
(
np
.
ones
(
3
,
np
.
float32
))],
}),
]
test_cases_for_verify_exception
=
[
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录