Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
2d2f9ba8
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
2d2f9ba8
编写于
5月 21, 2020
作者:
G
guohongzilong
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix group parameter code for check
上级
bddd743c
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
11 addition
and
5 deletion
+11
-5
mindspore/nn/optim/lars.py
mindspore/nn/optim/lars.py
+2
-0
mindspore/nn/optim/optimizer.py
mindspore/nn/optim/optimizer.py
+8
-4
tests/ut/python/nn/optim/test_optimizer.py
tests/ut/python/nn/optim/test_optimizer.py
+1
-1
未找到文件。
mindspore/nn/optim/lars.py
浏览文件 @
2d2f9ba8
...
...
@@ -80,6 +80,8 @@ class LARS(Optimizer):
decay_filter
=
lambda
x
:
'LayerNorm'
not
in
x
.
name
and
'bias'
not
in
x
.
name
,
lars_filter
=
lambda
x
:
'LayerNorm'
not
in
x
.
name
and
'bias'
not
in
x
.
name
,
loss_scale
=
1.0
):
super
(
LARS
,
self
).
__init__
(
0.0
,
[
Parameter
(
Tensor
(
0.0
),
name
=
"trivial"
)])
if
optimizer
.
is_group
:
raise
RuntimeError
(
f
"The
{
self
.
cls_name
}
optimizer cannot support group setting."
)
self
.
opt
=
optimizer
self
.
parameters
=
optimizer
.
parameters
self
.
learning_rate
=
optimizer
.
learning_rate
...
...
mindspore/nn/optim/optimizer.py
浏览文件 @
2d2f9ba8
...
...
@@ -81,7 +81,7 @@ class Optimizer(Cell):
raise
ValueError
(
"Optimizer got an empty parameter list."
)
if
not
isinstance
(
parameters
[
0
],
(
dict
,
Parameter
)):
raise
Valu
eError
(
"Only a list of Parameter or dict can be supported."
)
raise
Typ
eError
(
"Only a list of Parameter or dict can be supported."
)
if
isinstance
(
loss_scale
,
int
):
loss_scale
=
float
(
loss_scale
)
...
...
@@ -258,9 +258,9 @@ class Optimizer(Cell):
for
param
in
group_param
[
'params'
]:
validator
.
check_value_type
(
"parameter"
,
param
,
[
Parameter
],
self
.
cls_name
)
if
param
in
params_store
:
if
param
.
name
in
params_store
:
raise
RuntimeError
(
f
"The
{
param
.
name
}
parameter has appeared in parameter groups."
)
params_store
.
append
(
param
)
params_store
.
append
(
param
.
name
)
self
.
group_lr
.
append
(
Parameter
(
lr
,
name
=
"lr_"
+
param
.
name
))
self
.
group_weight_decay
.
append
(
weight_decay_
)
...
...
@@ -298,18 +298,22 @@ class Optimizer(Cell):
Parameter, single `Parameter` or `list[Parameter]` according to the input type.
"""
if
not
isinstance
(
param
,
(
Parameter
,
list
)):
raise
TypeError
(
f
"The
'param'
only support 'Parameter' or 'list' type."
)
raise
TypeError
(
f
"The
parameter
only support 'Parameter' or 'list' type."
)
if
isinstance
(
param
,
list
):
lr
=
[]
for
p
in
param
:
validator
.
check_value_type
(
"parameter"
,
p
,
[
Parameter
],
self
.
cls_name
)
if
p
not
in
self
.
parameters
:
raise
ValueError
(
f
"The parameter
{
p
.
name
}
is not in optimizer."
)
if
self
.
is_group_lr
:
index
=
self
.
parameters
.
index
(
p
)
lr
.
append
(
self
.
learning_rate
[
index
])
else
:
lr
.
append
(
self
.
learning_rate
)
else
:
if
param
not
in
self
.
parameters
:
raise
ValueError
(
f
"The parameter
{
param
.
name
}
is not in optimizer."
)
if
self
.
is_group_lr
:
index
=
self
.
parameters
.
index
(
param
)
lr
=
self
.
learning_rate
[
index
]
...
...
tests/ut/python/nn/optim/test_optimizer.py
浏览文件 @
2d2f9ba8
...
...
@@ -94,7 +94,7 @@ class TestUnsupportParam():
""" TestUnsupportParam definition """
def
test_optim_init
(
self
):
with
pytest
.
raises
(
Valu
eError
):
with
pytest
.
raises
(
Typ
eError
):
Optimizer
(
0.1
,
(
1
,
2
,
3
))
def
test_AdamWightDecay_init
(
self
):
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录