Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle
提交
64a21f71
P
Paddle
项目概览
PaddlePaddle
/
Paddle
大约 1 年 前同步成功
通知
2298
Star
20931
Fork
5422
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1423
列表
看板
标记
里程碑
合并请求
543
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1,423
Issue
1,423
列表
看板
标记
里程碑
合并请求
543
合并请求
543
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
64a21f71
编写于
3月 08, 2023
作者:
姜
姜永久
提交者:
GitHub
3月 08, 2023
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
modify optimizer tests (#51084)
* modify optimizer tests * lint
上级
b93e5119
变更
4
隐藏空白更改
内联
并排
Showing
4 changed file
with
134 addition
and
6 deletion
+134
-6
python/paddle/fluid/tests/unittests/test_adam_op.py
python/paddle/fluid/tests/unittests/test_adam_op.py
+50
-1
python/paddle/fluid/tests/unittests/test_adamax_op.py
python/paddle/fluid/tests/unittests/test_adamax_op.py
+33
-1
python/paddle/fluid/tests/unittests/test_adamw_op.py
python/paddle/fluid/tests/unittests/test_adamw_op.py
+46
-1
python/paddle/fluid/tests/unittests/test_lamb_op.py
python/paddle/fluid/tests/unittests/test_lamb_op.py
+5
-3
未找到文件。
python/paddle/fluid/tests/unittests/test_adam_op.py
浏览文件 @
64a21f71
...
...
@@ -15,7 +15,7 @@
import
unittest
import
numpy
as
np
from
op_test
import
OpTest
from
eager_
op_test
import
OpTest
import
paddle
import
paddle.fluid
as
fluid
...
...
@@ -23,10 +23,47 @@ from paddle.fluid import core
from
paddle.fluid.op
import
Operator
def
adam_wrapper
(
param
,
grad
,
LearningRate
,
moment1
,
moment2
,
beta1_pow
,
beta2_pow
,
master_weight
=
None
,
find_inf
=
None
,
beta1
=
0.78
,
beta2
=
0.836
,
epsilon
=
1e-4
,
lazy_mode
=
False
,
):
_
,
_
,
_
,
_
,
_
,
_
=
paddle
.
_C_ops
.
adam_
(
param
,
grad
,
LearningRate
,
moment1
,
moment2
,
beta1_pow
,
beta2_pow
,
master_weight
,
find_inf
,
beta1
,
beta2
,
epsilon
,
lazy_mode
,
1000
,
False
,
False
,
)
class
TestAdamOp1
(
OpTest
):
def
setUp
(
self
):
'''Test Adam Op with supplied attributes'''
self
.
op_type
=
"adam"
self
.
python_api
=
adam_wrapper
self
.
python_out_sig
=
[
'Out'
]
param
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
grad
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
moment1
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
...
...
@@ -73,6 +110,8 @@ class TestAdamOp2(OpTest):
def
setUp
(
self
):
'''Test Adam Op with supplied attributes'''
self
.
op_type
=
"adam"
self
.
python_api
=
adam_wrapper
self
.
python_out_sig
=
[
'Out'
]
self
.
set_shape
()
param
=
np
.
random
.
uniform
(
-
1
,
1
,
self
.
shape
).
astype
(
"float32"
)
grad
=
np
.
random
.
uniform
(
-
1
,
1
,
self
.
shape
).
astype
(
"float32"
)
...
...
@@ -122,6 +161,8 @@ class TestAdamOpMultipleSteps(OpTest):
def
setUp
(
self
):
'''Test Adam Operator with supplied attributes'''
self
.
op_type
=
"adam"
self
.
python_api
=
adam_wrapper
self
.
python_out_sig
=
[
'Out'
]
self
.
num_steps
=
10
param
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
...
...
@@ -414,6 +455,8 @@ class TestAdamOpBetaVariable(OpTest):
def
setUp
(
self
):
'''Test Adam Op with beta as Variable'''
self
.
op_type
=
"adam"
self
.
python_api
=
adam_wrapper
self
.
python_out_sig
=
[
'Out'
]
param
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
grad
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
moment1
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
...
...
@@ -459,6 +502,8 @@ class TestAdamOpBetaEpsilonVariable(OpTest):
def
setUp
(
self
):
'''Test Adam Op with beta/epsilon as Variable'''
self
.
op_type
=
"adam"
self
.
python_api
=
adam_wrapper
self
.
python_out_sig
=
[
'Out'
]
param
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
grad
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
moment1
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
...
...
@@ -505,6 +550,8 @@ class TestAdamOpWithGlobalBetaPow(OpTest):
def
setUp
(
self
):
'''Test Adam Op with global_beta_pow'''
self
.
op_type
=
"adam"
self
.
python_api
=
adam_wrapper
self
.
python_out_sig
=
[
'Out'
]
param
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
grad
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
moment1
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
...
...
@@ -554,6 +601,8 @@ class TestAdamOpWithSkipUpdate(OpTest):
def
setUp
(
self
):
'''Test Adam Op with global_beta_pow'''
self
.
op_type
=
"adam"
self
.
python_api
=
adam_wrapper
self
.
python_out_sig
=
[
'Out'
]
param
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
grad
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
moment1
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
...
...
python/paddle/fluid/tests/unittests/test_adamax_op.py
浏览文件 @
64a21f71
...
...
@@ -15,7 +15,33 @@
import
unittest
import
numpy
as
np
from
op_test
import
OpTest
from
eager_op_test
import
OpTest
import
paddle
def
adamx_wrapper
(
param
,
grad
,
lr
,
moment
,
inf_norm
,
beta1_pow
=
None
,
beta1
=
0.78
,
beta2
=
0.899
,
epsilon
=
1e-5
,
):
return
paddle
.
_C_ops
.
adamax_
(
param
,
grad
,
lr
,
moment
,
inf_norm
,
beta1_pow
,
beta1
,
beta2
,
epsilon
,
)
import
paddle
...
...
@@ -24,6 +50,8 @@ class TestAdamaxOp1(OpTest):
def
setUp
(
self
):
'''Test Adamax Operator with supplied attributes'''
self
.
op_type
=
"adamax"
self
.
python_api
=
adamx_wrapper
self
.
python_out_sig
=
[
'Out'
]
param
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
grad
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
moment
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
...
...
@@ -66,6 +94,8 @@ class TestAdamaxOp2(OpTest):
def
setUp
(
self
):
self
.
op_type
=
"adamax"
self
.
python_api
=
adamx_wrapper
self
.
python_out_sig
=
[
'Out'
]
param
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
grad
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
moment
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
...
...
@@ -104,6 +134,8 @@ class TestAdamaxOpMultipleSteps(OpTest):
def
setUp
(
self
):
'''Test Adamax Operator with supplied attributes'''
self
.
op_type
=
"adamax"
self
.
python_api
=
adamx_wrapper
self
.
python_out_sig
=
[
'Out'
]
self
.
num_steps
=
10
param
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
...
...
python/paddle/fluid/tests/unittests/test_adamw_op.py
浏览文件 @
64a21f71
...
...
@@ -17,7 +17,7 @@ import unittest
from
functools
import
partial
import
numpy
as
np
from
op_test
import
OpTest
from
eager_
op_test
import
OpTest
import
paddle
import
paddle.fluid
as
fluid
...
...
@@ -60,10 +60,53 @@ def adamw_step(inputs, attributes):
return
param_out
,
moment1_out
,
moment2_out
def
adamw_wrapper
(
param
,
grad
,
lr
,
moment1
,
moment2
,
beta1_pow
,
beta2_pow
,
master_weight
=
None
,
found_inf
=
None
,
beta1
=
0.78
,
beta2
=
0.836
,
epsilon
=
1e-4
,
lr_ratio
=
1.0
,
weight_decay
=
0.01
,
with_decay
=
True
,
lazy_mode
=
False
,
):
_
,
_
,
_
,
_
,
_
,
_
=
paddle
.
_C_ops
.
adamw_
(
param
,
grad
,
lr
,
moment1
,
moment2
,
beta1_pow
,
beta2_pow
,
master_weight
,
found_inf
,
beta1
,
beta2
,
epsilon
,
lr_ratio
,
weight_decay
,
with_decay
,
lazy_mode
,
1000
,
False
,
False
,
)
class
TestAdamW
(
OpTest
):
def
setUp
(
self
):
'''Test AdamW Op with supplied attributes'''
self
.
op_type
=
"adamw"
self
.
python_api
=
adamw_wrapper
self
.
python_out_sig
=
[
'Out'
]
param
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
grad
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
moment1
=
np
.
random
.
uniform
(
-
1
,
1
,
(
102
,
105
)).
astype
(
"float32"
)
...
...
@@ -118,6 +161,8 @@ class TestAdamW2(OpTest):
def
setUp
(
self
):
'''Test AdamW Op with supplied attributes'''
self
.
op_type
=
"adamw"
self
.
python_api
=
adamw_wrapper
self
.
python_out_sig
=
[
'Out'
]
param
=
np
.
random
.
uniform
(
-
1
,
1
,
(
2
,
2
)).
astype
(
"float32"
)
grad
=
np
.
random
.
uniform
(
-
1
,
1
,
(
2
,
2
)).
astype
(
"float32"
)
moment1
=
np
.
random
.
uniform
(
-
1
,
1
,
(
2
,
2
)).
astype
(
"float32"
)
...
...
python/paddle/fluid/tests/unittests/test_lamb_op.py
浏览文件 @
64a21f71
...
...
@@ -15,7 +15,7 @@
import
unittest
import
numpy
as
np
from
op_test
import
OpTest
from
eager_
op_test
import
OpTest
import
paddle
from
paddle.fluid
import
core
...
...
@@ -32,6 +32,8 @@ def lamb_wrapper(
moment2
,
beta1Pow
,
beta2Pow
,
master_weight
=
None
,
found_inf
=
None
,
epsilon
=
1e-8
,
beta1
=
0.9
,
beta2
=
0.999
,
...
...
@@ -45,8 +47,8 @@ def lamb_wrapper(
moment2
,
beta1Pow
,
beta2Pow
,
None
,
None
,
master_weight
,
found_inf
,
weight_decay
,
beta1
,
beta2
,
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录