Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleClas
提交
4ae4a4a9
P
PaddleClas
项目概览
PaddlePaddle
/
PaddleClas
大约 1 年 前同步成功
通知
115
Star
4999
Fork
1114
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
19
列表
看板
标记
里程碑
合并请求
6
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleClas
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
19
Issue
19
列表
看板
标记
里程碑
合并请求
6
合并请求
6
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
4ae4a4a9
编写于
8月 18, 2021
作者:
W
Wei Shengyu
提交者:
GitHub
8月 18, 2021
浏览文件
操作
浏览文件
下载
差异文件
Merge pull request #1116 from weisy11/develop
update save sub layer result
上级
bf8ca1ff
b4678fd3
变更
3
显示空白变更内容
内联
并排
Showing
3 changed file
with
77 addition
and
33 deletion
+77
-33
ppcls/arch/backbone/base/theseus_layer.py
ppcls/arch/backbone/base/theseus_layer.py
+70
-29
ppcls/arch/backbone/legendary_models/vgg.py
ppcls/arch/backbone/legendary_models/vgg.py
+5
-2
ppcls/engine/trainer.py
ppcls/engine/trainer.py
+2
-2
未找到文件。
ppcls/arch/backbone/base/theseus_layer.py
浏览文件 @
4ae4a4a9
...
@@ -12,15 +12,9 @@ class Identity(nn.Layer):
...
@@ -12,15 +12,9 @@ class Identity(nn.Layer):
class
TheseusLayer
(
nn
.
Layer
):
class
TheseusLayer
(
nn
.
Layer
):
def
__init__
(
self
,
*
args
,
return_patterns
=
None
,
**
kwargs
):
def
__init__
(
self
,
*
args
,
**
kwargs
):
super
(
TheseusLayer
,
self
).
__init__
()
super
(
TheseusLayer
,
self
).
__init__
()
self
.
res_dict
=
None
self
.
res_dict
=
{}
if
return_patterns
is
not
None
:
self
.
_update_res
(
return_patterns
)
def
forward
(
self
,
*
input
,
res_dict
=
None
,
**
kwargs
):
if
res_dict
is
not
None
:
self
.
res_dict
=
res_dict
# stop doesn't work when stop layer has a parallel branch.
# stop doesn't work when stop layer has a parallel branch.
def
stop_after
(
self
,
stop_layer_name
:
str
):
def
stop_after
(
self
,
stop_layer_name
:
str
):
...
@@ -38,33 +32,43 @@ class TheseusLayer(nn.Layer):
...
@@ -38,33 +32,43 @@ class TheseusLayer(nn.Layer):
stop_layer_name
)
stop_layer_name
)
return
after_stop
return
after_stop
def
_update_res
(
self
,
return_layers
):
def
update_res
(
self
,
return_patterns
):
if
not
return_patterns
or
isinstance
(
self
,
WrapLayer
):
return
for
layer_i
in
self
.
_sub_layers
:
for
layer_i
in
self
.
_sub_layers
:
layer_name
=
self
.
_sub_layers
[
layer_i
].
full_name
()
layer_name
=
self
.
_sub_layers
[
layer_i
].
full_name
()
for
return_pattern
in
return_layers
:
if
isinstance
(
self
.
_sub_layers
[
layer_i
],
(
nn
.
Sequential
,
nn
.
LayerList
)):
if
return_layers
is
not
None
and
re
.
match
(
return_pattern
,
self
.
_sub_layers
[
layer_i
]
=
wrap_theseus
(
self
.
_sub_layers
[
layer_i
])
layer_name
):
self
.
_sub_layers
[
layer_i
].
res_dict
=
self
.
res_dict
self
.
_sub_layers
[
layer_i
].
update_res
(
return_patterns
)
else
:
for
return_pattern
in
return_patterns
:
if
re
.
match
(
return_pattern
,
layer_name
):
if
not
isinstance
(
self
.
_sub_layers
[
layer_i
],
TheseusLayer
):
self
.
_sub_layers
[
layer_i
]
=
wrap_theseus
(
self
.
_sub_layers
[
layer_i
])
self
.
_sub_layers
[
layer_i
].
register_forward_post_hook
(
self
.
_sub_layers
[
layer_i
].
register_forward_post_hook
(
self
.
_save_sub_res_hook
)
self
.
_sub_layers
[
layer_i
].
_save_sub_res_hook
)
self
.
_sub_layers
[
layer_i
].
res_dict
=
self
.
res_dict
if
isinstance
(
self
.
_sub_layers
[
layer_i
],
TheseusLayer
):
self
.
_sub_layers
[
layer_i
].
res_dict
=
self
.
res_dict
self
.
_sub_layers
[
layer_i
].
update_res
(
return_patterns
)
def
replace_sub
(
self
,
layer_name_pattern
,
replace_function
,
def
_save_sub_res_hook
(
self
,
layer
,
input
,
output
):
recursive
=
True
):
self
.
res_dict
[
layer
.
full_name
()]
=
output
for
k
in
self
.
_sub_layers
.
keys
():
layer_name
=
self
.
_sub_layers
[
k
].
full_name
()
def
replace_sub
(
self
,
layer_name_pattern
,
replace_function
,
recursive
=
True
):
for
layer_i
in
self
.
_sub_layers
:
layer_name
=
self
.
_sub_layers
[
layer_i
].
full_name
()
if
re
.
match
(
layer_name_pattern
,
layer_name
):
if
re
.
match
(
layer_name_pattern
,
layer_name
):
self
.
_sub_layers
[
k
]
=
replace_function
(
self
.
_sub_layers
[
k
])
self
.
_sub_layers
[
layer_i
]
=
replace_function
(
self
.
_sub_layers
[
layer_i
])
if
recursive
:
if
recursive
:
if
isinstance
(
self
.
_sub_layers
[
k
],
TheseusLayer
):
if
isinstance
(
self
.
_sub_layers
[
layer_i
],
TheseusLayer
):
self
.
_sub_layers
[
k
].
replace_sub
(
self
.
_sub_layers
[
layer_i
].
replace_sub
(
layer_name_pattern
,
replace_function
,
recursive
)
layer_name_pattern
,
replace_function
,
recursive
)
elif
isinstance
(
self
.
_sub_layers
[
k
],
elif
isinstance
(
self
.
_sub_layers
[
layer_i
],
(
nn
.
Sequential
,
nn
.
LayerList
)):
nn
.
Sequential
)
or
isinstance
(
for
layer_j
in
self
.
_sub_layers
[
layer_i
].
_sub_layers
:
self
.
_sub_layers
[
k
],
nn
.
LayerList
):
self
.
_sub_layers
[
layer_i
].
_sub_layers
[
layer_j
].
replace_sub
(
for
kk
in
self
.
_sub_layers
[
k
].
_sub_layers
.
keys
():
self
.
_sub_layers
[
k
].
_sub_layers
[
kk
].
replace_sub
(
layer_name_pattern
,
replace_function
,
recursive
)
layer_name_pattern
,
replace_function
,
recursive
)
else
:
pass
'''
'''
example of replace function:
example of replace function:
...
@@ -78,3 +82,40 @@ class TheseusLayer(nn.Layer):
...
@@ -78,3 +82,40 @@ class TheseusLayer(nn.Layer):
return new_conv
return new_conv
'''
'''
class
WrapLayer
(
TheseusLayer
):
def
__init__
(
self
,
sub_layer
):
super
(
WrapLayer
,
self
).
__init__
()
self
.
sub_layer
=
sub_layer
self
.
name
=
sub_layer
.
full_name
()
def
full_name
(
self
):
return
self
.
name
def
forward
(
self
,
*
inputs
,
**
kwargs
):
return
self
.
sub_layer
(
*
inputs
,
**
kwargs
)
def
update_res
(
self
,
return_patterns
):
if
not
return_patterns
or
not
isinstance
(
self
.
sub_layer
,
(
nn
.
Sequential
,
nn
.
LayerList
)):
return
for
layer_i
in
self
.
sub_layer
.
_sub_layers
:
if
isinstance
(
self
.
sub_layer
.
_sub_layers
[
layer_i
],
(
nn
.
Sequential
,
nn
.
LayerList
)):
self
.
sub_layer
.
_sub_layers
[
layer_i
]
=
wrap_theseus
(
self
.
sub_layer
.
_sub_layers
[
layer_i
])
self
.
sub_layer
.
_sub_layers
[
layer_i
].
res_dict
=
self
.
res_dict
self
.
sub_layer
.
_sub_layers
[
layer_i
].
update_res
(
return_patterns
)
layer_name
=
self
.
sub_layer
.
_sub_layers
[
layer_i
].
full_name
()
for
return_pattern
in
return_patterns
:
if
re
.
match
(
return_pattern
,
layer_name
):
self
.
sub_layer
.
_sub_layers
[
layer_i
].
res_dict
=
self
.
res_dict
self
.
sub_layer
.
_sub_layers
[
layer_i
].
register_forward_post_hook
(
self
.
_sub_layers
[
layer_i
].
_save_sub_res_hook
)
if
isinstance
(
self
.
sub_layer
.
_sub_layers
[
layer_i
],
TheseusLayer
):
self
.
sub_layer
.
_sub_layers
[
layer_i
].
update_res
(
return_patterns
)
def
wrap_theseus
(
sub_layer
):
wrapped_layer
=
WrapLayer
(
sub_layer
)
return
wrapped_layer
ppcls/arch/backbone/legendary_models/vgg.py
浏览文件 @
4ae4a4a9
...
@@ -111,7 +111,7 @@ class VGGNet(TheseusLayer):
...
@@ -111,7 +111,7 @@ class VGGNet(TheseusLayer):
model: nn.Layer. Specific VGG model depends on args.
model: nn.Layer. Specific VGG model depends on args.
"""
"""
def
__init__
(
self
,
config
,
stop_grad_layers
=
0
,
class_num
=
1000
):
def
__init__
(
self
,
config
,
stop_grad_layers
=
0
,
class_num
=
1000
,
return_patterns
=
None
):
super
().
__init__
()
super
().
__init__
()
self
.
stop_grad_layers
=
stop_grad_layers
self
.
stop_grad_layers
=
stop_grad_layers
...
@@ -138,7 +138,7 @@ class VGGNet(TheseusLayer):
...
@@ -138,7 +138,7 @@ class VGGNet(TheseusLayer):
self
.
fc2
=
Linear
(
4096
,
4096
)
self
.
fc2
=
Linear
(
4096
,
4096
)
self
.
fc3
=
Linear
(
4096
,
class_num
)
self
.
fc3
=
Linear
(
4096
,
class_num
)
def
forward
(
self
,
inputs
):
def
forward
(
self
,
inputs
,
res_dict
=
None
):
x
=
self
.
conv_block_1
(
inputs
)
x
=
self
.
conv_block_1
(
inputs
)
x
=
self
.
conv_block_2
(
x
)
x
=
self
.
conv_block_2
(
x
)
x
=
self
.
conv_block_3
(
x
)
x
=
self
.
conv_block_3
(
x
)
...
@@ -152,6 +152,9 @@ class VGGNet(TheseusLayer):
...
@@ -152,6 +152,9 @@ class VGGNet(TheseusLayer):
x
=
self
.
relu
(
x
)
x
=
self
.
relu
(
x
)
x
=
self
.
drop
(
x
)
x
=
self
.
drop
(
x
)
x
=
self
.
fc3
(
x
)
x
=
self
.
fc3
(
x
)
if
self
.
res_dict
and
res_dict
is
not
None
:
for
res_key
in
list
(
self
.
res_dict
):
res_dict
[
res_key
]
=
self
.
res_dict
.
pop
(
res_key
)
return
x
return
x
...
...
ppcls/engine/trainer.py
浏览文件 @
4ae4a4a9
...
@@ -588,7 +588,7 @@ class Trainer(object):
...
@@ -588,7 +588,7 @@ class Trainer(object):
if
len
(
batch
)
==
3
:
if
len
(
batch
)
==
3
:
has_unique_id
=
True
has_unique_id
=
True
batch
[
2
]
=
batch
[
2
].
reshape
([
-
1
,
1
]).
astype
(
"int64"
)
batch
[
2
]
=
batch
[
2
].
reshape
([
-
1
,
1
]).
astype
(
"int64"
)
out
=
self
.
model
(
batch
[
0
],
batch
[
1
]
)
out
=
self
.
forward
(
batch
)
batch_feas
=
out
[
"features"
]
batch_feas
=
out
[
"features"
]
# do norm
# do norm
...
@@ -653,7 +653,7 @@ class Trainer(object):
...
@@ -653,7 +653,7 @@ class Trainer(object):
image_file_list
.
append
(
image_file
)
image_file_list
.
append
(
image_file
)
if
len
(
batch_data
)
>=
batch_size
or
idx
==
len
(
image_list
)
-
1
:
if
len
(
batch_data
)
>=
batch_size
or
idx
==
len
(
image_list
)
-
1
:
batch_tensor
=
paddle
.
to_tensor
(
batch_data
)
batch_tensor
=
paddle
.
to_tensor
(
batch_data
)
out
=
self
.
model
(
batch_tensor
)
out
=
self
.
forward
([
batch_tensor
]
)
if
isinstance
(
out
,
list
):
if
isinstance
(
out
,
list
):
out
=
out
[
0
]
out
=
out
[
0
]
result
=
postprocess_func
(
out
,
image_file_list
)
result
=
postprocess_func
(
out
,
image_file_list
)
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录