Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
8cfda7ee
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
8cfda7ee
编写于
11月 14, 2018
作者:
X
Xin Pan
提交者:
GitHub
11月 14, 2018
浏览文件
操作
浏览文件
下载
差异文件
Merge pull request #14382 from panyx0718/fix4
Refine the pass builder and buildstrategy
上级
8f301f46
bae36597
变更
5
隐藏空白更改
内联
并排
Showing
5 changed file
with
43 addition
and
15 deletion
+43
-15
paddle/fluid/framework/details/build_strategy.cc
paddle/fluid/framework/details/build_strategy.cc
+10
-6
paddle/fluid/framework/details/build_strategy.h
paddle/fluid/framework/details/build_strategy.h
+10
-1
paddle/fluid/pybind/pybind.cc
paddle/fluid/pybind/pybind.cc
+16
-6
python/paddle/fluid/tests/unittests/test_dist_base.py
python/paddle/fluid/tests/unittests/test_dist_base.py
+1
-1
python/paddle/fluid/tests/unittests/test_pass_builder.py
python/paddle/fluid/tests/unittests/test_pass_builder.py
+6
-1
未找到文件。
paddle/fluid/framework/details/build_strategy.cc
浏览文件 @
8cfda7ee
...
@@ -79,9 +79,15 @@ class ParallelExecutorPassBuilder : public ir::PassBuilder {
...
@@ -79,9 +79,15 @@ class ParallelExecutorPassBuilder : public ir::PassBuilder {
BuildStrategy
strategy_
;
BuildStrategy
strategy_
;
};
};
std
::
shared_ptr
<
ir
::
PassBuilder
>
BuildStrategy
::
CreatePassesFromStrategy
()
std
::
shared_ptr
<
ir
::
PassBuilder
>
BuildStrategy
::
CreatePassesFromStrategy
(
const
{
bool
finalize_strategy
)
const
{
if
(
is_finalized_
)
{
return
pass_builder_
;
}
pass_builder_
.
reset
(
new
ParallelExecutorPassBuilder
(
*
this
));
pass_builder_
.
reset
(
new
ParallelExecutorPassBuilder
(
*
this
));
if
(
finalize_strategy
)
{
is_finalized_
=
true
;
}
return
pass_builder_
;
return
pass_builder_
;
}
}
...
@@ -95,10 +101,8 @@ std::unique_ptr<ir::Graph> BuildStrategy::Apply(
...
@@ -95,10 +101,8 @@ std::unique_ptr<ir::Graph> BuildStrategy::Apply(
#else
#else
const
bool
use_cuda
)
const
{
const
bool
use_cuda
)
const
{
#endif
#endif
// Create a default one if not initialized by user.
// Create a default one if not finalized by user.
if
(
!
pass_builder_
)
{
CreatePassesFromStrategy
(
false
);
CreatePassesFromStrategy
();
}
std
::
unique_ptr
<
ir
::
Graph
>
graph
(
new
ir
::
Graph
(
main_program
));
std
::
unique_ptr
<
ir
::
Graph
>
graph
(
new
ir
::
Graph
(
main_program
));
...
...
paddle/fluid/framework/details/build_strategy.h
浏览文件 @
8cfda7ee
...
@@ -75,12 +75,20 @@ struct BuildStrategy {
...
@@ -75,12 +75,20 @@ struct BuildStrategy {
bool
remove_unnecessary_lock_
{
false
};
bool
remove_unnecessary_lock_
{
false
};
// NOTE:
// Before you add new options, think if it's a general strategy that works
// with other strategy. If not, the strategy should be created through
// CreatePassesFromStrategy and the pass can be managed separately.
// User normally doesn't need to call this API.
// User normally doesn't need to call this API.
// The PassBuilder allows for more customized insert, remove of passes
// The PassBuilder allows for more customized insert, remove of passes
// from python side.
// from python side.
// A new PassBuilder is created based on configs defined above and
// A new PassBuilder is created based on configs defined above and
// passes are owned by the PassBuilder.
// passes are owned by the PassBuilder.
std
::
shared_ptr
<
ir
::
PassBuilder
>
CreatePassesFromStrategy
()
const
;
std
::
shared_ptr
<
ir
::
PassBuilder
>
CreatePassesFromStrategy
(
bool
finalize_strategy
)
const
;
bool
IsFinalized
()
const
{
return
is_finalized_
;
}
// Apply the passes built by the pass_builder_. The passes will be
// Apply the passes built by the pass_builder_. The passes will be
// applied to the Program and output an ir::Graph.
// applied to the Program and output an ir::Graph.
...
@@ -97,6 +105,7 @@ struct BuildStrategy {
...
@@ -97,6 +105,7 @@ struct BuildStrategy {
#endif
#endif
private:
private:
mutable
bool
is_finalized_
=
false
;
mutable
std
::
shared_ptr
<
ir
::
PassBuilder
>
pass_builder_
;
mutable
std
::
shared_ptr
<
ir
::
PassBuilder
>
pass_builder_
;
};
};
...
...
paddle/fluid/pybind/pybind.cc
浏览文件 @
8cfda7ee
...
@@ -650,9 +650,9 @@ All parameter, weight, gradient are variables in Paddle.
...
@@ -650,9 +650,9 @@ All parameter, weight, gradient are variables in Paddle.
[](
ir
::
Pass
&
self
,
const
std
::
string
&
name
,
const
std
::
string
&
attr
)
{
[](
ir
::
Pass
&
self
,
const
std
::
string
&
name
,
const
std
::
string
&
attr
)
{
self
.
Set
<
std
::
string
>
(
name
,
new
std
::
string
(
attr
));
self
.
Set
<
std
::
string
>
(
name
,
new
std
::
string
(
attr
));
})
})
.
def
(
"set_int"
,
[](
ir
::
Pass
&
self
,
const
std
::
string
&
name
,
int
val
)
{
.
def
(
"set_int"
,
[](
ir
::
Pass
&
self
,
const
std
::
string
&
name
,
self
.
Set
<
const
int
>
(
name
,
new
int
(
val
));
int
val
)
{
self
.
Set
<
const
int
>
(
name
,
new
int
(
val
));
})
}
);
.
def
(
"type"
,
&
ir
::
Pass
::
Type
);
py
::
class_
<
ir
::
PassBuilder
,
std
::
shared_ptr
<
ir
::
PassBuilder
>>
pb
(
py
::
class_
<
ir
::
PassBuilder
,
std
::
shared_ptr
<
ir
::
PassBuilder
>>
pb
(
m
,
"PassBuilder"
);
m
,
"PassBuilder"
);
...
@@ -791,6 +791,7 @@ All parameter, weight, gradient are variables in Paddle.
...
@@ -791,6 +791,7 @@ All parameter, weight, gradient are variables in Paddle.
"reduce_strategy"
,
"reduce_strategy"
,
[](
const
BuildStrategy
&
self
)
{
return
self
.
reduce_
;
},
[](
const
BuildStrategy
&
self
)
{
return
self
.
reduce_
;
},
[](
BuildStrategy
&
self
,
BuildStrategy
::
ReduceStrategy
strategy
)
{
[](
BuildStrategy
&
self
,
BuildStrategy
::
ReduceStrategy
strategy
)
{
PADDLE_ENFORCE
(
!
self
.
IsFinalized
(),
"BuildStrategy is finlaized."
);
self
.
reduce_
=
strategy
;
self
.
reduce_
=
strategy
;
},
},
R"DOC(The type is STR, there are two reduce strategies in ParallelExecutor,
R"DOC(The type is STR, there are two reduce strategies in ParallelExecutor,
...
@@ -804,6 +805,7 @@ All parameter, weight, gradient are variables in Paddle.
...
@@ -804,6 +805,7 @@ All parameter, weight, gradient are variables in Paddle.
[](
const
BuildStrategy
&
self
)
{
return
self
.
gradient_scale_
;
},
[](
const
BuildStrategy
&
self
)
{
return
self
.
gradient_scale_
;
},
[](
BuildStrategy
&
self
,
[](
BuildStrategy
&
self
,
BuildStrategy
::
GradientScaleStrategy
strategy
)
{
BuildStrategy
::
GradientScaleStrategy
strategy
)
{
PADDLE_ENFORCE
(
!
self
.
IsFinalized
(),
"BuildStrategy is finlaized."
);
self
.
gradient_scale_
=
strategy
;
self
.
gradient_scale_
=
strategy
;
},
},
R"DOC(The type is STR, there are three ways of defining :math:`loss@grad` in
R"DOC(The type is STR, there are three ways of defining :math:`loss@grad` in
...
@@ -815,6 +817,7 @@ All parameter, weight, gradient are variables in Paddle.
...
@@ -815,6 +817,7 @@ All parameter, weight, gradient are variables in Paddle.
"debug_graphviz_path"
,
"debug_graphviz_path"
,
[](
const
BuildStrategy
&
self
)
{
return
self
.
debug_graphviz_path_
;
},
[](
const
BuildStrategy
&
self
)
{
return
self
.
debug_graphviz_path_
;
},
[](
BuildStrategy
&
self
,
const
std
::
string
&
path
)
{
[](
BuildStrategy
&
self
,
const
std
::
string
&
path
)
{
PADDLE_ENFORCE
(
!
self
.
IsFinalized
(),
"BuildStrategy is finlaized."
);
self
.
debug_graphviz_path_
=
path
;
self
.
debug_graphviz_path_
=
path
;
},
},
R"DOC(The type is STR, debug_graphviz_path indicate the path that
R"DOC(The type is STR, debug_graphviz_path indicate the path that
...
@@ -824,6 +827,7 @@ All parameter, weight, gradient are variables in Paddle.
...
@@ -824,6 +827,7 @@ All parameter, weight, gradient are variables in Paddle.
"enable_data_balance"
,
"enable_data_balance"
,
[](
const
BuildStrategy
&
self
)
{
return
self
.
enable_data_balance_
;
},
[](
const
BuildStrategy
&
self
)
{
return
self
.
enable_data_balance_
;
},
[](
BuildStrategy
&
self
,
bool
b
)
{
[](
BuildStrategy
&
self
,
bool
b
)
{
PADDLE_ENFORCE
(
!
self
.
IsFinalized
(),
"BuildStrategy is finlaized."
);
self
.
enable_data_balance_
=
b
;
self
.
enable_data_balance_
=
b
;
})
// FIXME(chengudo): enable_data_balance seems not important
})
// FIXME(chengudo): enable_data_balance seems not important
.
def_property
(
.
def_property
(
...
@@ -832,6 +836,7 @@ All parameter, weight, gradient are variables in Paddle.
...
@@ -832,6 +836,7 @@ All parameter, weight, gradient are variables in Paddle.
return
self
.
enable_sequential_execution_
;
return
self
.
enable_sequential_execution_
;
},
},
[](
BuildStrategy
&
self
,
bool
b
)
{
[](
BuildStrategy
&
self
,
bool
b
)
{
PADDLE_ENFORCE
(
!
self
.
IsFinalized
(),
"BuildStrategy is finlaized."
);
self
.
enable_sequential_execution_
=
b
;
self
.
enable_sequential_execution_
=
b
;
},
},
R"DOC(The type is BOOL. If set True, the execution order of ops would be the same as what is in the program. Default False.)DOC"
)
R"DOC(The type is BOOL. If set True, the execution order of ops would be the same as what is in the program. Default False.)DOC"
)
...
@@ -841,6 +846,7 @@ All parameter, weight, gradient are variables in Paddle.
...
@@ -841,6 +846,7 @@ All parameter, weight, gradient are variables in Paddle.
return
self
.
remove_unnecessary_lock_
;
return
self
.
remove_unnecessary_lock_
;
},
},
[](
BuildStrategy
&
self
,
bool
b
)
{
[](
BuildStrategy
&
self
,
bool
b
)
{
PADDLE_ENFORCE
(
!
self
.
IsFinalized
(),
"BuildStrategy is finlaized."
);
self
.
remove_unnecessary_lock_
=
b
;
self
.
remove_unnecessary_lock_
=
b
;
},
},
R"DOC(The type is BOOL. If set True, some locks in GPU ops would be released and ParallelExecutor would run faster. Default False.)DOC"
)
R"DOC(The type is BOOL. If set True, some locks in GPU ops would be released and ParallelExecutor would run faster. Default False.)DOC"
)
...
@@ -850,15 +856,19 @@ All parameter, weight, gradient are variables in Paddle.
...
@@ -850,15 +856,19 @@ All parameter, weight, gradient are variables in Paddle.
return
self
.
fuse_elewise_add_act_ops_
;
return
self
.
fuse_elewise_add_act_ops_
;
},
},
[](
BuildStrategy
&
self
,
bool
b
)
{
[](
BuildStrategy
&
self
,
bool
b
)
{
PADDLE_ENFORCE
(
!
self
.
IsFinalized
(),
"BuildStrategy is finlaized."
);
self
.
fuse_elewise_add_act_ops_
=
b
;
self
.
fuse_elewise_add_act_ops_
=
b
;
},
},
R"DOC(The type is BOOL, fuse_elewise_add_act_ops indicate whether
R"DOC(The type is BOOL, fuse_elewise_add_act_ops indicate whether
to fuse elementwise_add_op and activation_op,
to fuse elementwise_add_op and activation_op,
it may make the execution faster. Default False)DOC"
)
it may make the execution faster. Default False)DOC"
)
.
def
(
"_
create_passes_from_strategy
"
,
.
def
(
"_
finalize_strategy_and_create_passes
"
,
[](
BuildStrategy
&
self
)
->
std
::
shared_ptr
<
ir
::
PassBuilder
>
{
[](
BuildStrategy
&
self
)
->
std
::
shared_ptr
<
ir
::
PassBuilder
>
{
return
self
.
CreatePassesFromStrategy
();
return
self
.
CreatePassesFromStrategy
(
true
);
});
},
R"DOC(Allow user to customized passes. Normally model-specific
optimization passes should be defined in this way. BuildStrategy
cannot be updated after being finalized.)DOC"
);
pe
.
def
(
py
::
init
<
const
std
::
vector
<
platform
::
Place
>
&
,
pe
.
def
(
py
::
init
<
const
std
::
vector
<
platform
::
Place
>
&
,
const
std
::
unordered_set
<
std
::
string
>
&
,
const
std
::
unordered_set
<
std
::
string
>
&
,
...
...
python/paddle/fluid/tests/unittests/test_dist_base.py
浏览文件 @
8cfda7ee
...
@@ -105,7 +105,7 @@ class TestDistRunnerBase(object):
...
@@ -105,7 +105,7 @@ class TestDistRunnerBase(object):
build_stra
.
reduce_strategy
=
fluid
.
BuildStrategy
.
ReduceStrategy
.
AllReduce
build_stra
.
reduce_strategy
=
fluid
.
BuildStrategy
.
ReduceStrategy
.
AllReduce
if
args
.
batch_merge_repeat
>
1
:
if
args
.
batch_merge_repeat
>
1
:
pass_builder
=
build_stra
.
_
create_passes_from_strategy
()
pass_builder
=
build_stra
.
_
finalize_strategy_and_create_passes
()
mypass
=
pass_builder
.
insert_pass
(
mypass
=
pass_builder
.
insert_pass
(
len
(
pass_builder
.
all_passes
())
-
2
,
"multi_batch_merge_pass"
)
len
(
pass_builder
.
all_passes
())
-
2
,
"multi_batch_merge_pass"
)
mypass
.
set_int
(
"num_repeats"
,
args
.
batch_merge_repeat
)
mypass
.
set_int
(
"num_repeats"
,
args
.
batch_merge_repeat
)
...
...
python/paddle/fluid/tests/unittests/test_pass_builder.py
浏览文件 @
8cfda7ee
...
@@ -94,7 +94,12 @@ class TestPassBuilder(unittest.TestCase):
...
@@ -94,7 +94,12 @@ class TestPassBuilder(unittest.TestCase):
def
test_parallel_testing_with_new_strategy
(
self
):
def
test_parallel_testing_with_new_strategy
(
self
):
build_strategy
=
fluid
.
BuildStrategy
()
build_strategy
=
fluid
.
BuildStrategy
()
pass_builder
=
build_strategy
.
_create_passes_from_strategy
()
self
.
assertFalse
(
build_strategy
.
fuse_elewise_add_act_ops
)
build_strategy
.
fuse_elewise_add_act_ops
=
True
pass_builder
=
build_strategy
.
_finalize_strategy_and_create_passes
()
self
.
assertTrue
(
"fuse_elewise_add_act_pass"
in
[
p
.
type
()
for
p
in
pass_builder
.
all_passes
()])
origin_len
=
len
(
pass_builder
.
all_passes
())
origin_len
=
len
(
pass_builder
.
all_passes
())
viz_pass
=
pass_builder
.
append_pass
(
"graph_viz_pass"
)
viz_pass
=
pass_builder
.
append_pass
(
"graph_viz_pass"
)
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录