Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
3b8bcd5a
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
3b8bcd5a
编写于
3月 22, 2022
作者:
W
Weilong Wu
提交者:
GitHub
3月 22, 2022
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Update unit tests by using _test_eager_guard (#40760)
上级
aad0ae2a
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
23 addition
and
6 deletion
+23
-6
python/paddle/fluid/tests/unittests/test_imperative_partitial_backward.py
...uid/tests/unittests/test_imperative_partitial_backward.py
+7
-1
python/paddle/fluid/tests/unittests/test_tensor_register_hook.py
...paddle/fluid/tests/unittests/test_tensor_register_hook.py
+16
-5
未找到文件。
python/paddle/fluid/tests/unittests/test_imperative_partitial_backward.py
浏览文件 @
3b8bcd5a
...
@@ -17,10 +17,11 @@ from __future__ import print_function
...
@@ -17,10 +17,11 @@ from __future__ import print_function
import
unittest
import
unittest
import
paddle.fluid
as
fluid
import
paddle.fluid
as
fluid
import
numpy
as
np
import
numpy
as
np
from
paddle.fluid.framework
import
_test_eager_guard
class
TestImperativePartitialBackward
(
unittest
.
TestCase
):
class
TestImperativePartitialBackward
(
unittest
.
TestCase
):
def
test
_partitial_backward
(
self
):
def
func
_partitial_backward
(
self
):
with
fluid
.
dygraph
.
guard
():
with
fluid
.
dygraph
.
guard
():
x
=
np
.
random
.
randn
(
2
,
4
,
5
).
astype
(
"float32"
)
x
=
np
.
random
.
randn
(
2
,
4
,
5
).
astype
(
"float32"
)
x
=
fluid
.
dygraph
.
to_variable
(
x
)
x
=
fluid
.
dygraph
.
to_variable
(
x
)
...
@@ -49,6 +50,11 @@ class TestImperativePartitialBackward(unittest.TestCase):
...
@@ -49,6 +50,11 @@ class TestImperativePartitialBackward(unittest.TestCase):
linear1
.
clear_gradients
()
linear1
.
clear_gradients
()
linear2
.
clear_gradients
()
linear2
.
clear_gradients
()
def
test_partitial_backward
(
self
):
with
_test_eager_guard
():
self
.
func_partitial_backward
()
self
.
func_partitial_backward
()
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
unittest
.
main
()
unittest
.
main
()
python/paddle/fluid/tests/unittests/test_tensor_register_hook.py
浏览文件 @
3b8bcd5a
...
@@ -20,6 +20,8 @@ import numpy as np
...
@@ -20,6 +20,8 @@ import numpy as np
import
paddle
import
paddle
import
paddle.nn
as
nn
import
paddle.nn
as
nn
from
paddle.fluid.framework
import
_test_eager_guard
,
_in_eager_mode
from
paddle.fluid.framework
import
_test_eager_guard
,
_in_eager_mode
import
paddle.fluid
as
fluid
import
paddle.fluid.core
as
core
class
SimpleNet
(
nn
.
Layer
):
class
SimpleNet
(
nn
.
Layer
):
...
@@ -445,8 +447,7 @@ class TestTensorRegisterHook(unittest.TestCase):
...
@@ -445,8 +447,7 @@ class TestTensorRegisterHook(unittest.TestCase):
self
.
func_multiple_hooks_for_interior_var
()
self
.
func_multiple_hooks_for_interior_var
()
self
.
func_multiple_hooks_for_interior_var
()
self
.
func_multiple_hooks_for_interior_var
()
# TODO(wuweilong): enable this case when DoubleGrad in eager mode is ready
def
func_hook_in_double_grad
(
self
):
def
test_hook_in_double_grad
(
self
):
def
double_print_hook
(
grad
):
def
double_print_hook
(
grad
):
grad
=
grad
*
2
grad
=
grad
*
2
print
(
grad
)
print
(
grad
)
...
@@ -461,10 +462,11 @@ class TestTensorRegisterHook(unittest.TestCase):
...
@@ -461,10 +462,11 @@ class TestTensorRegisterHook(unittest.TestCase):
x
.
register_hook
(
double_print_hook
)
x
.
register_hook
(
double_print_hook
)
y
=
x
*
x
y
=
x
*
x
fluid
.
set_flags
({
'FLAGS_retain_grad_for_all_tensor'
:
False
})
# Since y = x * x, dx = 2 * x
# Since y = x * x, dx = 2 * x
dx
=
paddle
.
grad
(
dx
=
paddle
.
grad
(
outputs
=
[
y
],
inputs
=
[
x
],
create_graph
=
True
,
retain_graph
=
True
)[
0
]
outputs
=
[
y
],
inputs
=
[
x
],
create_graph
=
True
,
retain_graph
=
True
)[
0
]
fluid
.
set_flags
({
'FLAGS_retain_grad_for_all_tensor'
:
True
})
z
=
y
+
dx
z
=
y
+
dx
self
.
assertTrue
(
x
.
grad
is
None
)
self
.
assertTrue
(
x
.
grad
is
None
)
...
@@ -475,8 +477,17 @@ class TestTensorRegisterHook(unittest.TestCase):
...
@@ -475,8 +477,17 @@ class TestTensorRegisterHook(unittest.TestCase):
# x.gradient() = 2 * x + 2 = 4.0
# x.gradient() = 2 * x + 2 = 4.0
# after changed by hook: 8.0
# after changed by hook: 8.0
z
.
backward
()
# TODO(wuweilong): enable this case when DoubleGrad in eager mode is ready
self
.
assertTrue
(
np
.
array_equal
(
x
.
grad
.
numpy
(),
np
.
array
([
8.
])))
if
core
.
_in_eager_mode
():
pass
else
:
z
.
backward
()
self
.
assertTrue
(
np
.
array_equal
(
x
.
grad
.
numpy
(),
np
.
array
([
8.
])))
def
test_hook_in_double_grad
(
self
):
with
_test_eager_guard
():
self
.
func_hook_in_double_grad
()
self
.
func_hook_in_double_grad
()
def
func_remove_one_hook_multiple_times
(
self
):
def
func_remove_one_hook_multiple_times
(
self
):
for
device
in
self
.
devices
:
for
device
in
self
.
devices
:
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录