Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
e70b162f
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
e70b162f
编写于
4月 28, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
4月 28, 2020
浏览文件
操作
浏览文件
下载
差异文件
!69 fix doc and normalization bug
Merge pull request !69 from JichenZhao/incuba
上级
e4f300d8
56323f54
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
20 addition
and
14 deletion
+20
-14
mindspore/nn/layer/normalization.py
mindspore/nn/layer/normalization.py
+17
-11
mindspore/train/model.py
mindspore/train/model.py
+3
-3
未找到文件。
mindspore/nn/layer/normalization.py
浏览文件 @
e70b162f
...
...
@@ -17,6 +17,7 @@ from mindspore.ops import operations as P
from
mindspore.ops
import
functional
as
F
from
mindspore.common.parameter
import
Parameter
from
mindspore.common.initializer
import
initializer
from
mindspore.ops.primitive
import
constexpr
from
mindspore.common.tensor
import
Tensor
import
mindspore.common.dtype
as
mstype
import
mindspore.context
as
context
...
...
@@ -41,7 +42,7 @@ class _BatchNorm(Cell):
moving_mean_init
=
'zeros'
,
moving_var_init
=
'ones'
,
use_batch_statistics
=
True
,
group
=
1
):
device_num_each_
group
=
1
):
super
(
_BatchNorm
,
self
).
__init__
()
if
num_features
<
1
:
raise
ValueError
(
"num_features must be at least 1"
)
...
...
@@ -60,7 +61,7 @@ class _BatchNorm(Cell):
gamma_init
,
num_features
),
name
=
"gamma"
,
requires_grad
=
affine
)
self
.
beta
=
Parameter
(
initializer
(
beta_init
,
num_features
),
name
=
"beta"
,
requires_grad
=
affine
)
self
.
group
=
check_int_positive
(
group
)
self
.
group
=
check_int_positive
(
device_num_each_
group
)
if
self
.
group
!=
1
:
self
.
rank_id
=
get_rank
()
self
.
rank_size
=
get_group_size
()
...
...
@@ -166,6 +167,10 @@ class _BatchNorm(Cell):
return
'num_features={}, eps={}, momentum={}, gamma={}, beta={}, moving_mean={}, moving_variance={}'
.
format
(
self
.
num_features
,
self
.
eps
,
self
.
momentum
,
self
.
gamma
,
self
.
beta
,
self
.
moving_mean
,
self
.
moving_variance
)
@
constexpr
def
_channel_check
(
channel
,
num_channel
):
if
channel
!=
num_channel
:
raise
ValueError
(
"the input channel is not equal with num_channel"
)
class
BatchNorm1d
(
_BatchNorm
):
r
"""
...
...
@@ -324,7 +329,7 @@ class GlobalBatchNorm(_BatchNorm):
Args:
num_features (int): `C` from an expected input of size (N, C, H, W).
group (int): The number of device in each group.
device_num_each_
group (int): The number of device in each group.
eps (float): A value added to the denominator for numerical stability. Default: 1e-5.
momentum (float): A floating hyperparameter of the momentum for the
running_mean and running_var computation. Default: 0.9.
...
...
@@ -350,7 +355,7 @@ class GlobalBatchNorm(_BatchNorm):
Tensor, the normalized, scaled, offset tensor, of shape :math:`(N, C_{out}, H_{out}, W_{out})`.
Examples:
>>> global_bn_op = nn.GlobalBatchNorm(num_features=3, group=4)
>>> global_bn_op = nn.GlobalBatchNorm(num_features=3,
device_num_each_
group=4)
>>> input = Tensor(np.random.randint(0, 255, [1, 3, 224, 224]), mindspore.float32)
>>> global_bn_op(input)
"""
...
...
@@ -364,7 +369,7 @@ class GlobalBatchNorm(_BatchNorm):
moving_mean_init
=
'zeros'
,
moving_var_init
=
'ones'
,
use_batch_statistics
=
True
,
group
=
1
):
device_num_each_
group
=
1
):
super
(
GlobalBatchNorm
,
self
).
__init__
(
num_features
,
eps
,
momentum
,
...
...
@@ -374,8 +379,8 @@ class GlobalBatchNorm(_BatchNorm):
moving_mean_init
,
moving_var_init
,
use_batch_statistics
,
group
)
self
.
group
=
check_int_positive
(
group
)
device_num_each_
group
)
self
.
group
=
check_int_positive
(
device_num_each_
group
)
if
self
.
group
<=
1
:
raise
ValueError
(
"the number of group must be greater than 1."
)
def
_check_data_dim
(
self
,
x
):
...
...
@@ -482,17 +487,17 @@ class GroupNorm(Cell):
>>> x = Tensor(np.ones([1, 64, 256, 256], np.float32))
>>> goup_norm_op(x)
"""
def
__init__
(
self
,
num_groups
,
num_channels
,
eps
=
1e-05
,
affine
=
True
):
def
__init__
(
self
,
num_groups
,
num_channels
,
eps
=
1e-05
,
affine
=
True
,
gamma_init
=
'ones'
,
beta_init
=
'zeros'
):
super
(
GroupNorm
,
self
).
__init__
()
self
.
num_groups
=
check_int_positive
(
num_groups
)
self
.
num_channels
=
check_int_positive
(
num_channels
)
if
num_channels
%
num_groups
!=
0
:
raise
ValueError
(
"num_channels should be divided by num_groups"
)
self
.
eps
=
Tensor
(
check_typename
(
'eps'
,
eps
,
(
float
,)),
mstype
.
float32
)
self
.
eps
=
check_typename
(
'eps'
,
eps
,
(
float
,)
)
self
.
affine
=
check_bool
(
affine
)
gamma
=
initializer
(
'ones'
,
[
num_channels
,
1
,
1
],
mstype
.
float32
)
beta
=
initializer
(
'zeros'
,
[
num_channels
,
1
,
1
],
mstype
.
float32
)
gamma
=
initializer
(
gamma_init
,
[
num_channels
,
1
,
1
]
)
beta
=
initializer
(
beta_init
,
[
num_channels
,
1
,
1
]
)
if
self
.
affine
:
self
.
gamma
=
Parameter
(
gamma
,
name
=
'gamma'
)
self
.
beta
=
Parameter
(
beta
,
name
=
'beta'
)
...
...
@@ -508,6 +513,7 @@ class GroupNorm(Cell):
def
construct
(
self
,
x
):
batch
,
channel
,
height
,
width
=
self
.
shape
(
x
)
_channel_check
(
channel
,
self
.
num_channels
)
x
=
self
.
reshape
(
x
,
(
batch
,
self
.
num_groups
,
channel
*
height
*
width
/
self
.
num_groups
))
mean
=
self
.
reduce_mean
(
x
,
2
)
var
=
self
.
reduce_sum
(
self
.
square
(
x
-
mean
),
2
)
/
(
channel
*
height
*
width
/
self
.
num_groups
-
1
)
...
...
mindspore/train/model.py
浏览文件 @
e70b162f
...
...
@@ -83,7 +83,7 @@ class Model:
>>> return out
>>>
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(
is_grad=False, sparse=True
)
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None)
>>> dataset = get_dataset()
...
...
@@ -395,7 +395,7 @@ class Model:
Examples:
>>> dataset = get_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(
is_grad=False, sparse=True
)
>>> loss_scale_manager = FixedLossScaleManager()
>>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
>>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None, loss_scale_manager=loss_scale_manager)
...
...
@@ -518,7 +518,7 @@ class Model:
Examples:
>>> dataset = get_dataset()
>>> net = Net()
>>> loss = nn.SoftmaxCrossEntropyWithLogits()
>>> loss = nn.SoftmaxCrossEntropyWithLogits(
is_grad=False, sparse=True
)
>>> model = Model(net, loss_fn=loss, optimizer=None, metrics={'acc'})
>>> model.eval(dataset)
"""
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录