Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleSlim
提交
ab25d262
P
PaddleSlim
项目概览
PaddlePaddle
/
PaddleSlim
大约 2 年 前同步成功
通知
51
Star
1434
Fork
344
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
53
列表
看板
标记
里程碑
合并请求
16
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleSlim
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
53
Issue
53
列表
看板
标记
里程碑
合并请求
16
合并请求
16
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
ab25d262
编写于
11月 19, 2019
作者:
W
wanghaoshuang
浏览文件
操作
浏览文件
下载
差异文件
Merge branch 'fix_prune' into 'develop'
Fix pruner in only_grah mode. See merge request
!33
上级
e8d74b4b
00ba112c
变更
1
隐藏空白更改
内联
并排
Showing
1 changed file
with
89 addition
and
49 deletion
+89
-49
paddleslim/prune/pruner.py
paddleslim/prune/pruner.py
+89
-49
未找到文件。
paddleslim/prune/pruner.py
浏览文件 @
ab25d262
...
@@ -102,29 +102,49 @@ class Pruner():
...
@@ -102,29 +102,49 @@ class Pruner():
"""
"""
if
params
[
0
].
name
()
in
self
.
pruned_list
[
0
]:
if
params
[
0
].
name
()
in
self
.
pruned_list
[
0
]:
return
return
param_t
=
scope
.
find_var
(
params
[
0
].
name
()).
get_tensor
()
pruned_idx
=
self
.
_cal_pruned_idx
(
if
only_graph
:
params
[
0
].
name
(),
np
.
array
(
param_t
),
ratio
,
axis
=
0
)
pruned_num
=
int
(
round
(
params
[
0
].
shape
()[
0
]
*
ratio
))
for
param
in
params
:
for
param
in
params
:
assert
isinstance
(
param
,
VarWrapper
)
ori_shape
=
param
.
shape
()
param_t
=
scope
.
find_var
(
param
.
name
()).
get_tensor
()
if
param_backup
is
not
None
and
(
if
param_backup
is
not
None
and
(
param
.
name
()
not
in
param_backup
):
param
.
name
()
not
in
param_backup
):
param_backup
[
param
.
name
()]
=
copy
.
deepcopy
(
np
.
array
(
param_t
))
param_backup
[
param
.
name
()]
=
copy
.
deepcopy
(
ori_shape
)
pruned_param
=
self
.
_prune_tensor
(
new_shape
=
list
(
ori_shape
)
np
.
array
(
param_t
),
pruned_idx
,
pruned_axis
=
0
,
lazy
=
lazy
)
new_shape
[
0
]
-=
pruned_num
if
not
only_graph
:
param
.
set_shape
(
new_shape
)
_logger
.
info
(
"prune [{}] from {} to {}"
.
format
(
param
.
name
(
),
ori_shape
,
new_shape
))
self
.
pruned_list
[
0
].
append
(
param
.
name
())
return
range
(
pruned_num
)
else
:
param_t
=
scope
.
find_var
(
params
[
0
].
name
()).
get_tensor
()
pruned_idx
=
self
.
_cal_pruned_idx
(
params
[
0
].
name
(),
np
.
array
(
param_t
),
ratio
,
axis
=
0
)
for
param
in
params
:
assert
isinstance
(
param
,
VarWrapper
)
param_t
=
scope
.
find_var
(
param
.
name
()).
get_tensor
()
if
param_backup
is
not
None
and
(
param
.
name
()
not
in
param_backup
):
param_backup
[
param
.
name
()]
=
copy
.
deepcopy
(
np
.
array
(
param_t
))
pruned_param
=
self
.
_prune_tensor
(
np
.
array
(
param_t
),
pruned_idx
,
pruned_axis
=
0
,
lazy
=
lazy
)
param_t
.
set
(
pruned_param
,
place
)
param_t
.
set
(
pruned_param
,
place
)
ori_shape
=
param
.
shape
()
ori_shape
=
param
.
shape
()
if
param_shape_backup
is
not
None
and
(
if
param_shape_backup
is
not
None
and
(
param
.
name
()
not
in
param_shape_backup
):
param
.
name
()
not
in
param_shape_backup
):
param_shape_backup
[
param
.
name
()]
=
copy
.
deepcopy
(
param
.
shape
())
param_shape_backup
[
param
.
name
()]
=
copy
.
deepcopy
(
new_shape
=
list
(
param
.
shape
())
param
.
shape
())
new_shape
[
0
]
=
pruned_param
.
shape
[
0
]
new_shape
=
list
(
param
.
shape
())
param
.
set_shape
(
new_shape
)
new_shape
[
0
]
=
pruned_param
.
shape
[
0
]
_logger
.
info
(
"prune [{}] from {} to {}"
.
format
(
param
.
name
(
param
.
set_shape
(
new_shape
)
),
ori_shape
,
new_shape
))
_logger
.
info
(
"prune [{}] from {} to {}"
.
format
(
param
.
name
(
self
.
pruned_list
[
0
].
append
(
param
.
name
())
),
ori_shape
,
new_shape
))
return
pruned_idx
self
.
pruned_list
[
0
].
append
(
param
.
name
())
return
pruned_idx
def
_prune_parameter_by_idx
(
self
,
def
_prune_parameter_by_idx
(
self
,
scope
,
scope
,
...
@@ -151,26 +171,44 @@ class Pruner():
...
@@ -151,26 +171,44 @@ class Pruner():
"""
"""
if
params
[
0
].
name
()
in
self
.
pruned_list
[
pruned_axis
]:
if
params
[
0
].
name
()
in
self
.
pruned_list
[
pruned_axis
]:
return
return
for
param
in
params
:
assert
isinstance
(
param
,
VarWrapper
)
if
only_graph
:
param_t
=
scope
.
find_var
(
param
.
name
()).
get_tensor
()
pruned_num
=
len
(
pruned_idx
)
if
param_backup
is
not
None
and
(
param
.
name
()
not
in
param_backup
):
for
param
in
params
:
param_backup
[
param
.
name
()]
=
copy
.
deepcopy
(
np
.
array
(
param_t
))
ori_shape
=
param
.
shape
()
pruned_param
=
self
.
_prune_tensor
(
if
param_backup
is
not
None
and
(
np
.
array
(
param_t
),
pruned_idx
,
pruned_axis
,
lazy
=
lazy
)
param
.
name
()
not
in
param_backup
):
if
not
only_graph
:
param_backup
[
param
.
name
()]
=
copy
.
deepcopy
(
ori_shape
)
new_shape
=
list
(
ori_shape
)
new_shape
[
pruned_axis
]
-=
pruned_num
param
.
set_shape
(
new_shape
)
_logger
.
info
(
"prune [{}] from {} to {}"
.
format
(
param
.
name
(
),
ori_shape
,
new_shape
))
self
.
pruned_list
[
pruned_axis
].
append
(
param
.
name
())
else
:
for
param
in
params
:
assert
isinstance
(
param
,
VarWrapper
)
param_t
=
scope
.
find_var
(
param
.
name
()).
get_tensor
()
if
param_backup
is
not
None
and
(
param
.
name
()
not
in
param_backup
):
param_backup
[
param
.
name
()]
=
copy
.
deepcopy
(
np
.
array
(
param_t
))
pruned_param
=
self
.
_prune_tensor
(
np
.
array
(
param_t
),
pruned_idx
,
pruned_axis
,
lazy
=
lazy
)
param_t
.
set
(
pruned_param
,
place
)
param_t
.
set
(
pruned_param
,
place
)
ori_shape
=
param
.
shape
()
ori_shape
=
param
.
shape
()
if
param_shape_backup
is
not
None
and
(
if
param_shape_backup
is
not
None
and
(
param
.
name
()
not
in
param_shape_backup
):
param
.
name
()
not
in
param_shape_backup
):
param_shape_backup
[
param
.
name
()]
=
copy
.
deepcopy
(
param
.
shape
())
param_shape_backup
[
param
.
name
()]
=
copy
.
deepcopy
(
new_shape
=
list
(
param
.
shape
())
param
.
shape
())
new_shape
[
pruned_axis
]
=
pruned_param
.
shape
[
pruned_axis
]
new_shape
=
list
(
param
.
shape
())
param
.
set_shape
(
new_shape
)
new_shape
[
pruned_axis
]
=
pruned_param
.
shape
[
pruned_axis
]
_logger
.
info
(
"prune [{}] from {} to {}"
.
format
(
param
.
name
(
param
.
set_shape
(
new_shape
)
),
ori_shape
,
new_shape
))
_logger
.
info
(
"prune [{}] from {} to {}"
.
format
(
param
.
name
(
self
.
pruned_list
[
pruned_axis
].
append
(
param
.
name
())
),
ori_shape
,
new_shape
))
self
.
pruned_list
[
pruned_axis
].
append
(
param
.
name
())
def
_forward_search_related_op
(
self
,
graph
,
param
):
def
_forward_search_related_op
(
self
,
graph
,
param
):
"""
"""
...
@@ -500,14 +538,16 @@ class Pruner():
...
@@ -500,14 +538,16 @@ class Pruner():
visited
.
append
(
op
.
idx
())
visited
.
append
(
op
.
idx
())
while
len
(
stack
)
>
0
:
while
len
(
stack
)
>
0
:
top_op
=
stack
.
pop
()
top_op
=
stack
.
pop
()
for
parent
in
graph
.
pre_ops
(
top_op
):
if
top_op
.
type
().
startswith
(
"elementwise_"
):
if
parent
.
idx
()
not
in
visited
and
(
not
parent
.
is_bwd_op
()):
for
parent
in
graph
.
pre_ops
(
top_op
):
if
((
parent
.
type
()
==
'conv2d'
)
or
if
parent
.
idx
()
not
in
visited
and
(
(
parent
.
type
()
==
'fc'
)):
not
parent
.
is_bwd_op
()):
brothers
.
append
(
parent
)
if
((
parent
.
type
()
==
'conv2d'
)
or
else
:
(
parent
.
type
()
==
'fc'
)):
stack
.
append
(
parent
)
brothers
.
append
(
parent
)
visited
.
append
(
parent
.
idx
())
else
:
stack
.
append
(
parent
)
visited
.
append
(
parent
.
idx
())
for
child
in
graph
.
next_ops
(
top_op
):
for
child
in
graph
.
next_ops
(
top_op
):
if
(
child
.
type
()
!=
'conv2d'
)
and
(
child
.
type
()
!=
'fc'
)
and
(
if
(
child
.
type
()
!=
'conv2d'
)
and
(
child
.
type
()
!=
'fc'
)
and
(
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录