Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
661f9dfa
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
661f9dfa
编写于
5月 09, 2020
作者:
C
chenzomi
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add dropout primtive
上级
3d3b9d54
变更
6
隐藏空白更改
内联
并排
Showing
6 changed file
with
102 addition
and
7 deletion
+102
-7
mindspore/nn/layer/basic.py
mindspore/nn/layer/basic.py
+12
-0
mindspore/ops/_grad/grad_nn_ops.py
mindspore/ops/_grad/grad_nn_ops.py
+14
-0
mindspore/ops/operations/__init__.py
mindspore/ops/operations/__init__.py
+3
-1
mindspore/ops/operations/nn_ops.py
mindspore/ops/operations/nn_ops.py
+65
-0
tests/ut/python/nn/test_dropout.py
tests/ut/python/nn/test_dropout.py
+2
-0
tests/ut/python/pynative_mode/nn/test_dropout.py
tests/ut/python/pynative_mode/nn/test_dropout.py
+6
-6
未找到文件。
mindspore/nn/layer/basic.py
浏览文件 @
661f9dfa
...
...
@@ -25,6 +25,7 @@ from mindspore.ops.operations import _inner_ops as inner
from
mindspore.common.parameter
import
Parameter
from
mindspore._extends
import
cell_attr_register
from
mindspore.common.api
import
ms_function
from
mindspore
import
context
from
..cell
import
Cell
from
.activation
import
get_activation
from
..._checkparam
import
Validator
as
validator
...
...
@@ -84,8 +85,19 @@ class Dropout(Cell):
self
.
dropout_gen_mask
=
P
.
DropoutGenMask
(
Seed0
=
seed0
,
Seed1
=
seed1
)
self
.
dropout_do_mask
=
P
.
DropoutDoMask
()
self
.
cast
=
P
.
Cast
()
self
.
is_gpu
=
context
.
get_context
(
'device_target'
)
in
[
"GPU"
]
if
self
.
is_gpu
:
self
.
dropout
=
P
.
Dropout
(
keep_prob
)
def
construct
(
self
,
x
):
if
not
self
.
training
:
return
x
if
self
.
is_gpu
:
out
,
_
=
self
.
dropout
(
x
)
return
out
shape
=
self
.
get_shape
(
x
)
dtype
=
P
.
DType
()(
x
)
keep_prob
=
self
.
cast
(
self
.
keep_prob
,
dtype
)
...
...
mindspore/ops/_grad/grad_nn_ops.py
浏览文件 @
661f9dfa
...
...
@@ -643,3 +643,17 @@ def get_bprop_binary_cross_entropy(self):
return
dx
,
zeros_like
(
y
),
zeros_like
(
weight
)
return
bprop
@
bprop_getters
.
register
(
P
.
Dropout
)
def
get_bprop_dropout
(
self
):
"""Grad definition for `Dropout` operation."""
grad
=
P
.
DropoutGrad
(
self
.
drop_prob
)
def
bprop
(
x
,
out
,
dout
):
_
,
mask
=
out
dy
,
_
=
dout
dx
=
grad
(
dy
,
mask
)
return
(
dx
,)
return
bprop
mindspore/ops/operations/__init__.py
浏览文件 @
661f9dfa
...
...
@@ -52,7 +52,7 @@ from .random_ops import (RandomChoiceWithMask)
from
.nn_ops
import
(
LSTM
,
SGD
,
Adam
,
ApplyMomentum
,
BatchNorm
,
BiasAdd
,
Conv2D
,
DepthwiseConv2dNative
,
DropoutDoMask
,
DropoutDoMask
,
DropoutGrad
,
Dropout
,
DropoutGenMask
,
Flatten
,
FusedBatchNorm
,
Gelu
,
Elu
,
GetNext
,
L2Normalize
,
LayerNorm
,
L2Loss
,
...
...
@@ -157,6 +157,8 @@ __all__ = [
'Shape'
,
'DropoutDoMask'
,
'DropoutGenMask'
,
'DropoutGrad'
,
'Dropout'
,
'Neg'
,
'Slice'
,
'DType'
,
...
...
mindspore/ops/operations/nn_ops.py
浏览文件 @
661f9dfa
...
...
@@ -2762,3 +2762,68 @@ class ConfusionMulGrad(PrimitiveWithInfer):
validator
.
check_subclass
(
"input1_dtype"
,
input1_dtype
,
mstype
.
tensor
,
self
.
name
)
validator
.
check_subclass
(
"input2_dtype"
,
input2_dtype
,
mstype
.
tensor
,
self
.
name
)
return
input0_dtype
,
input1_dtype
class
Dropout
(
PrimitiveWithInfer
):
"""
During training, randomly zeroes some of the elements of the input tensor with probability.
Args:
drop_prob (float): probability of an element to be zeroed. Default: 0.
Inputs:
- **shape** (tuple[int]) - The shape of target mask.
Outputs:
Tensor, the value of generated mask for input shape.
Examples:
>>> dropout = P.Dropout(drop_prob=0.5)
>>> in = Tensor((20, 16, 50, 50))
>>> out = dropout(in)
"""
@
prim_attr_register
def
__init__
(
self
,
drop_prob
=
0
):
self
.
drop_prob
=
validator
.
check_number_range
(
"drop_prob"
,
drop_prob
,
0
,
1
,
Rel
.
INC_BOTH
,
self
.
name
)
def
infer_shape
(
self
,
x_shape
):
validator
.
check_integer
(
"x_shape"
,
len
(
x_shape
),
1
,
Rel
.
GE
,
self
.
name
)
mask_shape
=
x_shape
return
x_shape
,
mask_shape
def
infer_dtype
(
self
,
x_dtype
):
valid_types
=
(
mstype
.
float16
,
mstype
.
float32
)
validator
.
check_tensor_type_same
({
"x_dtype"
:
x_dtype
},
valid_types
,
self
.
name
)
return
x_dtype
,
x_dtype
class
DropoutGrad
(
PrimitiveWithInfer
):
"""
The gradient of Dropout. During training, randomly zeroes some of the elements
of the input tensor with probability.
Args:
drop_prob (float): probability of an element to be zeroed. Default: 0.
Inputs:
- **shape** (tuple[int]) - The shape of target mask.
Outputs:
Tensor, the value of generated mask for input shape.
Examples:
>>> dropout_grad = P.DropoutGrad(drop_prob=0.5)
>>> in = Tensor((20, 16, 50, 50))
>>> out = dropout_grad(in)
"""
@
prim_attr_register
def
__init__
(
self
,
drop_prob
=
0
):
self
.
drop_prob
=
validator
.
check_number_range
(
"drop_prob"
,
drop_prob
,
0
,
1
,
Rel
.
INC_BOTH
,
self
.
name
)
def
infer_shape
(
self
,
dy_shape
,
mask_shape
):
return
dy_shape
def
infer_dtype
(
self
,
dy_dtype
,
mask_dtype
):
valid_types
=
(
mstype
.
float16
,
mstype
.
float32
)
validator
.
check_tensor_type_same
({
"dy_dtype"
:
dy_dtype
},
valid_types
,
self
.
name
)
return
dy_dtype
tests/ut/python/nn/test_dropout.py
浏览文件 @
661f9dfa
...
...
@@ -17,7 +17,9 @@ import numpy as np
import
pytest
import
mindspore.nn
as
nn
from
mindspore
import
Tensor
from
mindspore
import
context
context
.
set_context
(
device_target
=
"Ascend"
)
def
test_check_dropout_3
():
Tensor
(
np
.
ones
([
20
,
16
,
50
]).
astype
(
np
.
int32
))
...
...
tests/ut/python/pynative_mode/nn/test_dropout.py
浏览文件 @
661f9dfa
...
...
@@ -19,26 +19,26 @@ from mindspore.common.api import _executor
import
mindspore.nn
as
nn
from
mindspore
import
Tensor
from
mindspore
import
dtype
as
mstype
from
mindspore
import
context
context
.
set_context
(
device_target
=
"Ascend"
)
def
test_check_dropout_1
():
x
=
Tensor
(
np
.
ones
([
20
,
16
,
50
]),
mstype
.
float32
)
m
=
nn
.
Dropout
(
0.8
)
with
pytest
.
raises
(
NotImplementedError
):
m
(
x
)
m
(
x
)
def
test_check_dropout_2
():
x
=
Tensor
(
np
.
ones
([
20
,
16
,
50
]),
mstype
.
float32
)
m
=
nn
.
Dropout
(
0.3
,
seed0
=
1
)
with
pytest
.
raises
(
NotImplementedError
):
m
(
x
)
m
(
x
)
def
test_check_dropout_3
():
x
=
Tensor
(
np
.
ones
([
20
,
16
,
50
]),
mstype
.
float32
)
m
=
nn
.
Dropout
(
0.3
,
seed0
=
1
,
seed1
=
1
)
with
pytest
.
raises
(
NotImplementedError
):
m
(
x
)
m
(
x
)
class
Net_Dropout
(
nn
.
Cell
):
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录