Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
s920243400
PaddleDetection
提交
c3d27b15
P
PaddleDetection
项目概览
s920243400
/
PaddleDetection
与 Fork 源项目一致
Fork自
PaddlePaddle / PaddleDetection
通知
2
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleDetection
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
c3d27b15
编写于
2月 04, 2018
作者:
K
Kexin Zhao
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
modify prune.cc for multiple blocks
上级
dc8390d8
变更
1
隐藏空白更改
内联
并排
Showing
1 changed file
with
57 addition
and
14 deletion
+57
-14
paddle/framework/prune.cc
paddle/framework/prune.cc
+57
-14
未找到文件。
paddle/framework/prune.cc
浏览文件 @
c3d27b15
...
...
@@ -49,11 +49,28 @@ bool IsTarget(const proto::OpDesc& op_desc) {
return
false
;
}
void
prune_impl
(
const
proto
::
ProgramDesc
&
input
,
proto
::
ProgramDesc
*
output
,
int
block_id
)
{
// TODO(tonyyang-svail):
// - will change to use multiple blocks for RNN op and Cond Op
int
GetSubBlockIndex
(
const
proto
::
OpDesc
&
op_desc
)
{
for
(
auto
&
attr
:
op_desc
.
attrs
())
{
if
(
attr
.
type
()
==
proto
::
AttrType
::
BLOCK
)
{
PADDLE_ENFORCE
(
attr
.
has_block_idx
());
return
attr
.
block_idx
();
}
}
return
-
1
;
}
bool
HasSubBlock
(
const
proto
::
OpDesc
&
op_desc
)
{
return
GetSubBlockIndex
(
op_desc
)
>
0
;
}
// block_id is the idx of the current block in the input desc
// parent_block_id is the idx of the parent of the current block
// in the output desc, -1 means the current block is global block
// dependent_vars is passed recursively from the parent block to
// the child block to help pruning
void
prune_impl
(
const
proto
::
ProgramDesc
&
input
,
proto
::
ProgramDesc
*
output
,
int
block_id
,
int
parent_block_id
,
std
::
set
<
std
::
string
>&
dependent_vars
)
{
auto
&
block
=
input
.
blocks
(
block_id
);
auto
&
ops
=
block
.
ops
();
...
...
@@ -72,11 +89,9 @@ void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
expect_fetch
=
(
op_desc
.
type
()
==
kFetchOpType
);
}
std
::
set
<
std
::
string
>
dependent_vars
;
std
::
vector
<
bool
>
should_run
;
for
(
auto
op_iter
=
ops
.
rbegin
();
op_iter
!=
ops
.
rend
();
++
op_iter
)
{
auto
&
op_desc
=
*
op_iter
;
if
(
IsTarget
(
op_desc
)
||
HasDependentVar
(
op_desc
,
dependent_vars
))
{
// insert its input to the dependency graph
for
(
auto
&
var
:
op_desc
.
inputs
())
{
...
...
@@ -84,7 +99,6 @@ void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
dependent_vars
.
insert
(
argu
);
}
}
should_run
.
push_back
(
true
);
}
else
{
should_run
.
push_back
(
false
);
...
...
@@ -95,19 +109,48 @@ void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
// we reverse the should_run vector
std
::
reverse
(
should_run
.
begin
(),
should_run
.
end
());
*
output
=
input
;
auto
*
op_field
=
output
->
mutable_blocks
(
block_id
)
->
mutable_ops
();
//*output = input;
// copy the current block from input to output
auto
*
block_field
=
output
->
mutable_blocks
();
*
block_field
->
Add
()
=
input
.
blocks
(
block_id
);
int
output_block_id
=
output
->
blocks_size
()
-
1
;
auto
*
output_block
=
output
->
mutable_blocks
(
output_block_id
);
output_block
->
set_idx
=
output_block_id
;
output_block
->
set_parent_idx
=
parent_block_id
;
auto
*
op_field
=
output_block
->
mutable_ops
();
op_field
->
Clear
();
for
(
size_t
i
=
0
;
i
<
should_run
.
size
();
++
i
)
{
if
(
should_run
[
i
])
{
*
op_field
->
Add
()
=
input
.
blocks
(
block_id
).
ops
(
i
);
auto
*
op
=
op_field
->
Add
();
*
op
=
input
.
blocks
(
block_id
).
ops
(
i
);
if
(
HasSubBlock
(
*
op
))
{
// create sub_block_dependent_vars here to help prune the sub block
std
::
set
<
std
::
string
>
sub_block_dependent_vars
;
for
(
auto
&
var
:
op
.
inputs
())
{
for
(
auto
&
argu
:
var
.
arguments
())
{
sub_block_dependent_vars
.
insert
(
argu
);
}
}
for
(
auto
&
var
:
op
.
outputs
())
{
for
(
auto
&
argu
:
var
.
arguments
())
{
sub_block_dependent_vars
.
insert
(
argu
);
}
}
// GetSubBlockIndex(*op) is the idx of the sub_block in the input desc
// output_block_id is the idx of the current block in the output desc
prune_impl
(
input
,
output
,
GetSubBlockIndex
(
*
op
),
output_block_id
,
sub_block_dependent_vars
);
}
}
}
// remove the VarDescs in BlockDesc that are not referenced in
// the pruned OpDescs
std
::
unordered_map
<
std
::
string
,
proto
::
VarDesc
>
var_map
;
auto
*
var_field
=
output
->
mutable_blocks
(
block_id
)
->
mutable_vars
();
auto
*
var_field
=
output
->
mutable_blocks
(
output_
block_id
)
->
mutable_vars
();
for
(
const
auto
&
var
:
*
var_field
)
{
var_map
[
var
.
name
()]
=
var
;
}
...
...
@@ -118,14 +161,14 @@ void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
auto
&
input_field
=
op
.
inputs
();
for
(
auto
&
input_var
:
input_field
)
{
for
(
auto
&
arg
:
input_var
.
arguments
())
{
*
var_field
->
Add
()
=
var_map
[
arg
]
;
*
var_field
->
Add
()
=
var_map
.
at
(
arg
)
;
}
}
// add VarDescs of all output arguments for each OpDesc
auto
&
output_field
=
op
.
outputs
();
for
(
auto
&
output_var
:
output_field
)
{
for
(
auto
&
arg
:
output_var
.
arguments
())
{
*
var_field
->
Add
()
=
var_map
[
arg
]
;
*
var_field
->
Add
()
=
var_map
.
at
(
arg
)
;
}
}
}
...
...
@@ -133,7 +176,7 @@ void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
// TODO(fengjiayi): Prune() could be inplaced to avoid unnecessary copies
void
Prune
(
const
proto
::
ProgramDesc
&
input
,
proto
::
ProgramDesc
*
output
)
{
prune_impl
(
input
,
output
,
0
);
prune_impl
(
input
,
output
,
0
,
-
1
,
{}
);
}
void
inference_optimize_impl
(
const
proto
::
ProgramDesc
&
input
,
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录