Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
MegEngine 天元
MegEngine
提交
442b4f6c
MegEngine
项目概览
MegEngine 天元
/
MegEngine
1 年多 前同步成功
通知
403
Star
4705
Fork
582
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
DevOps
流水线
流水线任务
计划
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
MegEngine
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
DevOps
DevOps
流水线
流水线任务
计划
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
流水线任务
提交
Issue看板
提交
442b4f6c
编写于
6月 23, 2021
作者:
M
Megvii Engine Team
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
test(traced_module): add some testcases for traced module
GitOrigin-RevId: 0d6bb20b2b5110b5ecd280ec055bb14aed74ebfc
上级
f2691566
变更
7
隐藏空白更改
内联
并排
Showing
7 changed file
with
147 addition
and
20 deletion
+147
-20
imperative/python/megengine/experimental/traced_module/expr.py
...ative/python/megengine/experimental/traced_module/expr.py
+2
-1
imperative/python/megengine/experimental/traced_module/pytree.py
...ive/python/megengine/experimental/traced_module/pytree.py
+27
-4
imperative/python/test/integration/test_converge.py
imperative/python/test/integration/test_converge.py
+9
-4
imperative/python/test/integration/test_converge_with_gradient_clip.py
...thon/test/integration/test_converge_with_gradient_clip.py
+8
-4
imperative/python/test/integration/test_trace_dump.py
imperative/python/test/integration/test_trace_dump.py
+1
-0
imperative/python/test/unit/module/test_module.py
imperative/python/test/unit/module/test_module.py
+41
-7
imperative/python/test/unit/traced_module/test_jit_trace.py
imperative/python/test/unit/traced_module/test_jit_trace.py
+59
-0
未找到文件。
imperative/python/megengine/experimental/traced_module/expr.py
浏览文件 @
442b4f6c
...
...
@@ -201,7 +201,8 @@ class Apply(Expr):
NodeMixin
.
wrap_safe
(
i
,
Constant
.
make
(
i
))
apply_node
=
cls
.
make
(
opdef
)
for
i
in
inputs
:
apply_node
.
add_input
(
NodeMixin
.
get
(
i
))
assert
isinstance
(
i
,
RawTensor
)
apply_node
.
inputs
.
append
(
NodeMixin
.
get
(
i
))
unset_module_tracing
()
outputs
=
apply
(
opdef
,
*
inputs
)
...
...
imperative/python/megengine/experimental/traced_module/pytree.py
浏览文件 @
442b4f6c
# -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import
collections
from
typing
import
Callable
,
NamedTuple
SUPPORTED_TYPE
=
{}
...
...
@@ -9,11 +19,22 @@ def register_supported_type(type, flatten, unflatten):
SUPPORTED_TYPE
[
type
]
=
NodeType
(
flatten
,
unflatten
)
def
_dict_flatten
(
inp
):
aux_data
=
[]
results
=
[]
for
key
,
value
in
sorted
(
inp
.
items
()):
results
.
append
(
value
)
aux_data
.
append
(
key
)
return
results
,
aux_data
def
_dict_unflatten
(
inps
,
aux_data
):
return
dict
(
zip
(
aux_data
,
inps
))
register_supported_type
(
list
,
lambda
x
:
(
x
,
None
),
lambda
x
,
aux_data
:
list
(
x
))
register_supported_type
(
tuple
,
lambda
x
:
(
x
,
None
),
lambda
x
,
aux_data
:
list
(
x
))
register_supported_type
(
dict
,
lambda
x
:
(
list
(
x
.
values
()),
list
(
x
.
keys
())),
lambda
x
,
y
:
dict
(
zip
(
y
,
x
))
)
register_supported_type
(
dict
,
_dict_flatten
,
_dict_unflatten
)
register_supported_type
(
slice
,
lambda
x
:
([
x
.
start
,
x
.
stop
,
x
.
step
],
None
),
...
...
@@ -68,6 +89,8 @@ class TreeDef:
class
LeafDef
(
TreeDef
):
def
__init__
(
self
,
type
):
if
not
isinstance
(
type
,
collections
.
abc
.
Sequence
):
type
=
(
type
,)
super
().
__init__
(
type
,
None
,
[])
self
.
num_leaves
=
1
...
...
@@ -77,4 +100,4 @@ class LeafDef(TreeDef):
return
leaves
[
0
]
def
__repr__
(
self
):
return
"Leaf({})"
.
format
(
self
.
type
.
__name__
)
return
"Leaf({})"
.
format
(
", "
.
join
(
t
.
__name__
for
t
in
self
.
type
)
)
imperative/python/test/integration/test_converge.py
浏览文件 @
442b4f6c
...
...
@@ -14,6 +14,7 @@ import megengine as mge
import
megengine.autodiff
as
ad
import
megengine.functional
as
F
from
megengine
import
Tensor
from
megengine.experimental.traced_module
import
trace_module
from
megengine.module
import
Linear
,
Module
from
megengine.optimizer
import
SGD
...
...
@@ -71,8 +72,13 @@ class XORNet(Module):
return
x
def
test_training_converge
():
@
pytest
.
mark
.
parametrize
(
"test_traced_module"
,
[
True
,
False
])
def
test_training_converge
(
test_traced_module
):
net
=
XORNet
()
if
test_training_converge
:
inp
=
Tensor
(
np
.
random
.
random
((
14
,
2
)))
net
=
trace_module
(
net
,
inp
)
opt
=
SGD
(
net
.
parameters
(),
lr
=
0.01
,
momentum
=
0.9
,
weight_decay
=
5e-4
)
gm
=
ad
.
GradManager
().
attach
(
net
.
parameters
())
...
...
@@ -105,9 +111,8 @@ def test_training_converge():
xx
=
xx
.
reshape
((
ngrid
*
ngrid
,
1
))
yy
=
yy
.
reshape
((
ngrid
*
ngrid
,
1
))
data
=
mge
.
tensor
(
np
.
concatenate
((
xx
,
yy
),
axis
=
1
).
astype
(
np
.
float32
))
pred
=
infer
(
data
).
numpy
()
precision
=
calculate_precision
(
data
.
numpy
(),
pred
)
pred
=
infer
(
data
)
precision
=
calculate_precision
(
data
.
numpy
(),
pred
.
numpy
())
assert
precision
==
1.0
,
"Test precision must be high enough, get {}"
.
format
(
precision
)
imperative/python/test/integration/test_converge_with_gradient_clip.py
浏览文件 @
442b4f6c
...
...
@@ -15,6 +15,7 @@ import megengine.autodiff as ad
import
megengine.functional
as
F
import
megengine.optimizer
as
optim
from
megengine
import
Tensor
from
megengine.experimental.traced_module
import
trace_module
from
megengine.jit
import
trace
from
megengine.module
import
Linear
,
Module
from
megengine.optimizer
import
SGD
...
...
@@ -73,8 +74,12 @@ class XORNet(Module):
return
x
def
test_training_converge
():
@
pytest
.
mark
.
parametrize
(
"test_traced_module"
,
[
True
,
False
])
def
test_training_converge
(
test_traced_module
):
net
=
XORNet
()
if
test_traced_module
:
inp
=
Tensor
(
np
.
random
.
random
((
14
,
2
)))
net
=
trace_module
(
net
,
inp
)
opt
=
SGD
(
net
.
parameters
(),
lr
=
0.01
,
momentum
=
0.9
,
weight_decay
=
5e-4
)
gm
=
ad
.
GradManager
().
attach
(
net
.
parameters
())
...
...
@@ -110,9 +115,8 @@ def test_training_converge():
xx
=
xx
.
reshape
((
ngrid
*
ngrid
,
1
))
yy
=
yy
.
reshape
((
ngrid
*
ngrid
,
1
))
data
=
mge
.
tensor
(
np
.
concatenate
((
xx
,
yy
),
axis
=
1
).
astype
(
np
.
float32
))
pred
=
infer
(
data
).
numpy
()
precision
=
calculate_precision
(
data
.
numpy
(),
pred
)
pred
=
infer
(
data
)
precision
=
calculate_precision
(
data
.
numpy
(),
pred
.
numpy
())
print
(
"precision="
,
precision
)
assert
precision
==
1.0
,
"Test precision must be high enough, get {}"
.
format
(
precision
...
...
imperative/python/test/integration/test_trace_dump.py
浏览文件 @
442b4f6c
...
...
@@ -19,6 +19,7 @@ import megengine.module as M
import
megengine.optimizer
as
optim
from
megengine
import
tensor
from
megengine.autodiff
import
GradManager
from
megengine.experimental.traced_module
import
trace_module
from
megengine.jit
import
trace
...
...
imperative/python/test/unit/module/test_module.py
浏览文件 @
442b4f6c
...
...
@@ -15,6 +15,7 @@ import pytest
import
megengine
as
mge
import
megengine.functional
as
F
from
megengine
import
Parameter
,
Tensor
,
tensor
from
megengine.experimental.traced_module
import
TracedModule
,
trace_module
from
megengine.module
import
(
BatchNorm1d
,
BatchNorm2d
,
...
...
@@ -67,8 +68,18 @@ class MyModule(Module):
return
x
def
test_module_api
():
@
pytest
.
mark
.
parametrize
(
"test_traced_module"
,
[
True
,
False
])
def
test_module_api
(
test_traced_module
):
m
=
MyModule
()
if
test_traced_module
:
buff
=
m
.
buff
param
=
m
.
param
m
=
trace_module
(
m
,
Tensor
(
np
.
random
.
random
((
1
,
4
,
16
,
16
))))
assert
"buff"
not
in
m
.
__dict__
assert
"param"
not
in
m
.
__dict__
m
.
buff
=
buff
m
.
param
=
param
assert
list
(
m
.
children
())
==
[
m
.
bn
,
m
.
i
]
assert
list
(
m
.
named_children
())
==
[(
"bn"
,
m
.
bn
),
(
"i"
,
m
.
i
)]
assert
list
(
m
.
modules
())
==
[
m
,
m
.
bn
,
m
.
i
,
m
.
i
.
bn
]
...
...
@@ -141,8 +152,11 @@ def test_module_api():
assert
m
.
bn
.
training
==
False
and
m
.
i
.
bn
.
training
==
False
def
test_module_api_reuse_submodule
():
@
pytest
.
mark
.
parametrize
(
"test_traced_module"
,
[
True
,
False
])
def
test_module_api_reuse_submodule
(
test_traced_module
):
m
=
MyModule
()
if
test_traced_module
:
m
=
trace_module
(
m
,
Tensor
(
np
.
random
.
random
((
1
,
4
,
16
,
16
))))
m
.
h
=
m
.
i
# pylint: disable=attribute-defined-outside-init
assert
list
(
m
.
modules
())
==
[
m
,
m
.
bn
,
m
.
i
,
m
.
i
.
bn
]
assert
list
(
m
.
named_modules
())
==
[
...
...
@@ -153,15 +167,21 @@ def test_module_api_reuse_submodule():
]
def
test_module_api_iterable_stability
():
@
pytest
.
mark
.
parametrize
(
"test_traced_module"
,
[
True
,
False
])
def
test_module_api_iterable_stability
(
test_traced_module
):
m
=
MyModule
()
if
test_traced_module
:
m
=
trace_module
(
m
,
Tensor
(
np
.
random
.
random
((
1
,
4
,
16
,
16
))))
l
=
list
(
m
.
modules
())
for
_
in
range
(
100
):
assert
list
(
m
.
modules
())
==
l
def
test_module_api_hooks
():
@
pytest
.
mark
.
parametrize
(
"test_traced_module"
,
[
True
,
False
])
def
test_module_api_hooks
(
test_traced_module
):
net
=
MyModule
()
if
test_traced_module
:
net
=
trace_module
(
net
,
Tensor
(
np
.
zeros
((
1
,
4
,
1
,
1
))))
pre_hook_num
=
0
post_hook_num
=
0
hooks
=
[]
...
...
@@ -383,11 +403,16 @@ class Simple(Module):
self
.
conv1
.
weight
=
self
.
conv0
.
weight
def
forward
(
self
,
inputs
):
pass
x
=
self
.
conv0
(
inputs
)
y
=
self
.
conv1
(
inputs
)
return
x
+
y
def
test_shared_param
():
@
pytest
.
mark
.
parametrize
(
"test_traced_module"
,
[
True
,
False
])
def
test_shared_param
(
test_traced_module
):
net
=
Simple
()
if
test_traced_module
:
net
=
trace_module
(
net
,
tensor
(
np
.
random
.
random
((
1
,
1
,
8
,
8
))))
assert
net
.
conv0
.
weight
is
net
.
conv1
.
weight
data
=
tensor
(
np
.
random
.
random
((
1
,
1
,
8
,
8
)).
astype
(
np
.
float32
))
np
.
testing
.
assert_allclose
(
net
.
conv0
(
data
).
numpy
(),
net
.
conv1
(
data
).
numpy
())
...
...
@@ -449,15 +474,21 @@ def test_shared_param_1d():
np
.
testing
.
assert_allclose
(
conv0
(
data
).
numpy
(),
conv1
(
data
).
numpy
())
def
test_pickle_module
():
@
pytest
.
mark
.
parametrize
(
"test_traced_module"
,
[
True
,
False
])
def
test_pickle_module
(
test_traced_module
):
data_shape
=
(
2
,
28
)
data
=
tensor
(
np
.
random
.
random
(
data_shape
))
mlp
=
MLP
()
pred_gt
=
mlp
(
data
)
if
test_traced_module
:
mlp
=
trace_module
(
mlp
,
data
)
# pickle before forward
with
BytesIO
()
as
fout
:
mge
.
save
(
mlp
,
fout
)
fout
.
seek
(
0
)
mlp1
=
mge
.
load
(
fout
)
if
test_traced_module
:
assert
type
(
mlp1
)
==
TracedModule
pred0
=
mlp1
(
data
)
pred1
=
mlp
(
data
)
...
...
@@ -467,8 +498,11 @@ def test_pickle_module():
mge
.
save
(
mlp
,
fout
)
fout
.
seek
(
0
)
mlp1
=
mge
.
load
(
fout
)
if
test_traced_module
:
assert
type
(
mlp1
)
==
TracedModule
pred2
=
mlp1
(
data
)
np
.
testing
.
assert_allclose
(
pred_gt
.
numpy
(),
pred1
.
numpy
(),
atol
=
5e-6
)
np
.
testing
.
assert_allclose
(
pred0
.
numpy
(),
pred1
.
numpy
(),
atol
=
5e-6
)
np
.
testing
.
assert_allclose
(
pred0
.
numpy
(),
pred2
.
numpy
(),
atol
=
5e-6
)
...
...
imperative/python/test/unit/traced_module/test_jit_trace.py
0 → 100644
浏览文件 @
442b4f6c
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import
io
import
numpy
as
np
import
megengine.functional
as
F
import
megengine.module
as
M
import
megengine.utils.comp_graph_tools
as
cgtools
from
megengine.experimental.traced_module
import
trace_module
from
megengine.jit
import
trace
from
megengine.module
import
Module
class
MyBlock
(
Module
):
def
__init__
(
self
,
in_channels
,
channels
):
super
(
MyBlock
,
self
).
__init__
()
self
.
conv1
=
M
.
Conv2d
(
in_channels
,
channels
,
3
,
1
,
padding
=
1
,
bias
=
False
)
self
.
bn1
=
M
.
BatchNorm2d
(
channels
)
def
forward
(
self
,
x
):
x
=
self
.
conv1
(
x
)
x
=
self
.
bn1
(
x
)
x
=
F
.
relu
(
x
)
+
1
return
x
class
MyModule
(
Module
):
def
__init__
(
self
):
super
(
MyModule
,
self
).
__init__
()
self
.
block0
=
MyBlock
(
8
,
4
)
self
.
block1
=
MyBlock
(
4
,
2
)
def
forward
(
self
,
x
):
x
=
self
.
block0
(
x
)
x
=
self
.
block1
(
x
)
return
x
def
test_jit_trace
():
module
=
MyModule
()
module
.
eval
()
x
=
F
.
ones
((
1
,
8
,
14
,
14
))
expect
=
module
(
x
)
traced_module
=
trace_module
(
module
,
x
)
func
=
trace
(
traced_module
,
capture_as_const
=
True
)
np
.
testing
.
assert_array_equal
(
func
(
x
),
expect
)
model
=
io
.
BytesIO
()
func
.
dump
(
model
)
model
.
seek
(
0
)
infer_cg
=
cgtools
.
GraphInference
(
model
)
np
.
testing
.
assert_allclose
(
list
(
infer_cg
.
run
(
x
.
numpy
()).
values
())[
0
],
expect
,
atol
=
1e-6
)
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录