Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleDetection
提交
16eaaf3f
P
PaddleDetection
项目概览
PaddlePaddle
/
PaddleDetection
大约 1 年 前同步成功
通知
695
Star
11112
Fork
2696
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
184
列表
看板
标记
里程碑
合并请求
40
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleDetection
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
184
Issue
184
列表
看板
标记
里程碑
合并请求
40
合并请求
40
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
16eaaf3f
编写于
9月 12, 2018
作者:
T
Tomasz Patejko
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
MKLDNN conv + elementwise_add fusion: added one more UT, found and corrected bugs in pass
上级
604bad08
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
104 addition
and
48 deletion
+104
-48
paddle/fluid/framework/ir/conv_elementwise_add_mkldnn_fuse_pass.cc
...uid/framework/ir/conv_elementwise_add_mkldnn_fuse_pass.cc
+16
-25
paddle/fluid/framework/ir/conv_elementwise_add_mkldnn_fuse_pass_tester.cc
...mework/ir/conv_elementwise_add_mkldnn_fuse_pass_tester.cc
+88
-23
未找到文件。
paddle/fluid/framework/ir/conv_elementwise_add_mkldnn_fuse_pass.cc
浏览文件 @
16eaaf3f
...
@@ -45,17 +45,13 @@ struct Conv {
...
@@ -45,17 +45,13 @@ struct Conv {
->
assert_is_op
(
"conv2d"
);
->
assert_is_op
(
"conv2d"
);
auto
input_var
=
pattern
->
new_node
(
input_name
())
auto
input_var
=
pattern
->
new_node
(
input_name
())
->
AsInput
()
->
assert_is_op_input
(
conv_name
(),
input_name
());
->
assert_is_op_input
(
conv_name
());
auto
filter_var
=
pattern
->
new_node
(
filter_name
())
auto
filter_var
=
pattern
->
new_node
(
filter_name
())
->
AsInput
()
->
assert_is_op_input
(
conv_name
(),
filter_name
());
->
assert_is_persistable_var
()
->
assert_is_op_input
(
conv_name
());
auto
output_var
=
pattern
->
new_node
(
output_name
())
auto
output_var
=
pattern
->
new_node
(
output_name
())
->
AsOutput
()
->
assert_is_op_output
(
conv_name
(),
output_name
());
->
assert_is_op_output
(
conv_name
());
conv_op
->
LinksFrom
({
input_var
,
filter_var
});
conv_op
->
LinksFrom
({
input_var
,
filter_var
});
conv_op
->
LinksTo
({
output_var
});
conv_op
->
LinksTo
({
output_var
});
...
@@ -77,19 +73,13 @@ struct ElementwiseAdd {
...
@@ -77,19 +73,13 @@ struct ElementwiseAdd {
->
assert_is_op
(
"elementwise_add"
);
->
assert_is_op
(
"elementwise_add"
);
auto
y_var
=
pattern
->
new_node
(
y_name
())
auto
y_var
=
pattern
->
new_node
(
y_name
())
->
AsInput
()
->
assert_is_op_input
(
elementwise_add_name
(),
y_name
());
->
assert_is_op_input
(
elementwise_add_name
());
conv_output
->
assert_is_op_input
(
pattern
->
node_name
(
elementwise_add_name
()),
conv_output
->
assert_is_op_input
(
elementwise_add_name
(),
x_name
());
pattern
->
node_name
(
x_name
()));
// auto y_var = pattern->NewNode(y_name())
// ->AsInput()
// ->assert_is_op_input(elementwise_add_name());
auto
out_var
=
pattern
->
new_node
(
out_name
())
auto
out_var
=
pattern
->
new_node
(
out_name
())
->
AsOutput
()
->
AsOutput
()
->
assert_is_op_output
(
->
assert_is_op_output
(
elementwise_add_name
(),
out_name
());
pattern
->
node_name
(
elementwise_add_name
()));
elementwise_add_op
->
LinksFrom
({
y_var
,
conv_output
});
elementwise_add_op
->
LinksFrom
({
y_var
,
conv_output
});
elementwise_add_op
->
LinksTo
({
out_var
});
elementwise_add_op
->
LinksTo
({
out_var
});
...
@@ -118,16 +108,16 @@ graph_ptr ConvElementwiseAddMKLDNNFusePass::ApplyImpl(graph_ptr graph) const {
...
@@ -118,16 +108,16 @@ graph_ptr ConvElementwiseAddMKLDNNFusePass::ApplyImpl(graph_ptr graph) const {
GraphPatternDetector
gpd
;
GraphPatternDetector
gpd
;
auto
pattern
=
gpd
.
mutable_pattern
();
auto
pattern
=
gpd
.
mutable_pattern
();
auto
pattern_ptr
=
std
::
make_shared
<
patterns
::
Pattern
>
(
pattern
,
name_scope_
);
auto
pattern_ptr
=
std
::
make_shared
<
patterns
::
Pattern
>
(
pattern
,
name_scope_
);
patterns
::
Conv
conv_pattern
;
patterns
::
Conv
conv_pattern
;
auto
conv_output
=
conv_pattern
(
pattern_ptr
)();
auto
conv_output
=
conv_pattern
(
pattern_ptr
)();
conv_output
->
AsIntermediate
();
patterns
::
ElementwiseAdd
elementwise_add_pattern
;
patterns
::
ElementwiseAdd
elementwise_add_pattern
;
elementwise_add_pattern
(
pattern_ptr
)(
conv_output
);
elementwise_add_pattern
(
pattern_ptr
)(
conv_output
);
conv_output
->
AsIntermediate
();
auto
link_nodes_to
=
[](
Node
*
a
,
Node
*
b
)
{
auto
link_nodes_to
=
[](
Node
*
a
,
Node
*
b
)
{
a
->
outputs
.
push_back
(
b
);
a
->
outputs
.
push_back
(
b
);
b
->
inputs
.
push_back
(
a
);
b
->
inputs
.
push_back
(
a
);
...
@@ -139,7 +129,7 @@ graph_ptr ConvElementwiseAddMKLDNNFusePass::ApplyImpl(graph_ptr graph) const {
...
@@ -139,7 +129,7 @@ graph_ptr ConvElementwiseAddMKLDNNFusePass::ApplyImpl(graph_ptr graph) const {
op_desc
.
SetInput
(
"Input"
,
{
conv_input
->
Name
()});
op_desc
.
SetInput
(
"Input"
,
{
conv_input
->
Name
()});
op_desc
.
SetInput
(
"Filter"
,
{
conv_filter
->
Name
()});
op_desc
.
SetInput
(
"Filter"
,
{
conv_filter
->
Name
()});
op_desc
.
SetOutput
(
"Ouput"
,
{
y
->
Name
()});
op_desc
.
SetOutput
(
"Ou
t
put"
,
{
y
->
Name
()});
op_desc
.
SetAttr
(
"fuse_sum"
,
true
);
op_desc
.
SetAttr
(
"fuse_sum"
,
true
);
...
@@ -155,16 +145,17 @@ graph_ptr ConvElementwiseAddMKLDNNFusePass::ApplyImpl(graph_ptr graph) const {
...
@@ -155,16 +145,17 @@ graph_ptr ConvElementwiseAddMKLDNNFusePass::ApplyImpl(graph_ptr graph) const {
};
};
auto
handler
=
[
&
](
const
GraphPatternDetector
::
subgraph_t
&
subgraph
,
Graph
*
g
)
{
auto
handler
=
[
&
](
const
GraphPatternDetector
::
subgraph_t
&
subgraph
,
Graph
*
g
)
{
auto
elementwise_add_x
=
node_from_subgraph
(
subgraph
,
pattern_ptr
,
elementwise_add_pattern
.
x_name
());
auto
conv_op
=
node_from_subgraph
(
subgraph
,
pattern_ptr
,
conv_pattern
.
conv_name
());
auto
elementwise_add_y
=
node_from_subgraph
(
subgraph
,
pattern_ptr
,
elementwise_add_pattern
.
y_name
());
auto
elementwise_add_out
=
node_from_subgraph
(
subgraph
,
pattern_ptr
,
elementwise_add_pattern
.
out_name
());
auto
conv_filter
=
node_from_subgraph
(
subgraph
,
pattern_ptr
,
conv_pattern
.
filter_name
());
auto
conv_input
=
node_from_subgraph
(
subgraph
,
pattern_ptr
,
conv_pattern
.
input_name
());
auto
conv_input
=
node_from_subgraph
(
subgraph
,
pattern_ptr
,
conv_pattern
.
input_name
());
auto
conv_filter
=
node_from_subgraph
(
subgraph
,
pattern_ptr
,
conv_pattern
.
filter_name
());
auto
conv_output
=
node_from_subgraph
(
subgraph
,
pattern_ptr
,
conv_pattern
.
output_name
());
auto
conv_output
=
node_from_subgraph
(
subgraph
,
pattern_ptr
,
conv_pattern
.
output_name
());
auto
elementwise_add_op
=
node_from_subgraph
(
subgraph
,
pattern_ptr
,
elementwise_add_pattern
.
elementwise_add_name
());
auto
elementwise_add_y
=
node_from_subgraph
(
subgraph
,
pattern_ptr
,
elementwise_add_pattern
.
y_name
());
auto
elementwise_add_out
=
node_from_subgraph
(
subgraph
,
pattern_ptr
,
elementwise_add_pattern
.
out_name
());
fuse_conv
(
g
,
conv_input
,
conv_filter
,
elementwise_add_y
);
fuse_conv
(
g
,
conv_input
,
conv_filter
,
elementwise_add_y
);
remove_unused_nodes
(
g
,
{
elementwise_add_x
,
conv_output
,
elementwise_add_out
});
remove_unused_nodes
(
g
,
{
conv_output
,
elementwise_add_out
,
conv_op
,
elementwise_add_op
});
};
};
gpd
(
graph
.
get
(),
handler
);
gpd
(
graph
.
get
(),
handler
);
...
...
paddle/fluid/framework/ir/conv_elementwise_add_mkldnn_fuse_pass_tester.cc
浏览文件 @
16eaaf3f
...
@@ -16,7 +16,7 @@ void SetOp(ProgramDesc* prog, const std::string& type,
...
@@ -16,7 +16,7 @@ void SetOp(ProgramDesc* prog, const std::string& type,
op
->
SetAttr
(
"use_mkldnn"
,
true
);
op
->
SetAttr
(
"use_mkldnn"
,
true
);
op
->
SetInput
(
"Input"
,
{
inputs
[
0
]});
op
->
SetInput
(
"Input"
,
{
inputs
[
0
]});
op
->
SetInput
(
"Filter"
,
{
inputs
[
1
]});
op
->
SetInput
(
"Filter"
,
{
inputs
[
1
]});
op
->
Set
Input
(
"Output"
,
{
outputs
}
);
op
->
Set
Output
(
"Output"
,
outputs
);
}
else
if
(
type
==
"elementwise_add"
)
{
}
else
if
(
type
==
"elementwise_add"
)
{
op
->
SetInput
(
"X"
,
{
inputs
[
0
]});
op
->
SetInput
(
"X"
,
{
inputs
[
0
]});
op
->
SetInput
(
"Y"
,
{
inputs
[
1
]});
op
->
SetInput
(
"Y"
,
{
inputs
[
1
]});
...
@@ -24,54 +24,119 @@ void SetOp(ProgramDesc* prog, const std::string& type,
...
@@ -24,54 +24,119 @@ void SetOp(ProgramDesc* prog, const std::string& type,
}
}
}
}
ProgramDesc
BuildProgramDesc
()
{
TEST
(
ConvElementwiseAddMKLDNNFusePass
,
ConvolutionWithElementwiseAddWithOps
)
{
ProgramDesc
prog
;
auto
build_program_desc
=
[
&
]()
->
ProgramDesc
{
for
(
auto
&
v
:
ProgramDesc
prog
;
std
::
vector
<
std
::
string
>
({
"a"
,
"b"
,
"c"
,
"d"
,
"weights"
,
"f"
,
"g"
}))
{
for
(
auto
&
v
:
auto
*
var
=
prog
.
MutableBlock
(
0
)
->
Var
(
v
);
std
::
vector
<
std
::
string
>
({
"a"
,
"b"
,
"weights"
,
"c"
,
"d"
,
"e"
,
"f"
,
"g"
}))
{
var
->
SetType
(
proto
::
VarType
::
LOD_TENSOR
);
auto
*
var
=
prog
.
MutableBlock
(
0
)
->
Var
(
v
);
if
(
v
==
"weights"
||
v
==
"bias"
)
{
var
->
SetType
(
proto
::
VarType
::
LOD_TENSOR
);
var
->
SetPersistable
(
true
);
if
(
v
==
"weights"
||
v
==
"bias"
)
{
var
->
SetPersistable
(
true
);
}
}
}
}
SetOp
(
&
prog
,
"OP0"
,
{
"a"
},
{
"b"
});
SetOp
(
&
prog
,
"OP0"
,
{
"a"
},
{
"b"
});
SetOp
(
&
prog
,
"OP1"
,
{
"c"
},
{
"d"
});
SetOp
(
&
prog
,
"OP1"
,
{
"c"
},
{
"d"
});
SetOp
(
&
prog
,
"conv2d"
,
{
"d"
,
"weights"
},
{
"f"
});
SetOp
(
&
prog
,
"conv2d"
,
{
"b"
,
"weights"
},
{
"e"
});
SetOp
(
&
prog
,
"elemenwise_add"
,
{
"d"
,
"f"
},
{
"g"
});
SetOp
(
&
prog
,
"elementwise_add"
,
{
"e"
,
"d"
},
{
"f"
});
SetOp
(
&
prog
,
"OP3"
,
{
"f"
},
{
"g"
});
return
prog
;
};
return
prog
;
auto
prog
=
build_program_desc
();
std
::
unique_ptr
<
ir
::
Graph
>
graph
(
new
ir
::
Graph
(
prog
));
auto
pass
=
PassRegistry
::
Instance
().
Get
(
"conv_elementwise_add_mkldnn_fuse_pass"
);
int
original_nodes_num
=
graph
->
Nodes
().
size
();
graph
=
pass
->
Apply
(
std
::
move
(
graph
));
int
current_nodes_num
=
graph
->
Nodes
().
size
();
EXPECT_EQ
(
original_nodes_num
-
4
+
1
,
current_nodes_num
);
// Assert conv_relu op in newly generated graph
int
conv_count
=
0
;
int
elementwise_add_count
=
0
;
for
(
auto
*
node
:
graph
->
Nodes
())
{
if
(
node
->
IsOp
()
&&
node
->
Op
()
->
Type
()
==
"conv2d"
)
{
++
conv_count
;
}
if
(
node
->
IsOp
()
&&
node
->
Op
()
->
Type
()
==
"elementwise_add"
)
{
++
elementwise_add_count
;
}
/*
if (node->Op()->HasAttr("use_mkldnn")) {
bool use_mkldnn = boost::get<bool>(node->Op()->GetAttr("use_mkldnn"));
if (use_mkldnn) {
if (node->Op()->HasAttr("fuse_sum")) {
// bool fuse_sum = boost::get<bool>(node->Op()->GetAttr("fuse_sum"));
if (fuse_sum) {
++conv_elementwise_add_count;
}
}
}
}
}
*/
}
EXPECT_EQ
(
conv_count
,
1
);
EXPECT_EQ
(
elementwise_add_count
,
0
);
}
}
TEST
(
ConvElementwiseAddMKLDNNFusePass
,
basic
)
{
TEST
(
ConvElementwiseAddMKLDNNFusePass
,
OnlyConvolutionElementwiseAdd
)
{
auto
prog
=
BuildProgramDesc
();
auto
build_program_desc
=
[
&
]()
->
ProgramDesc
{
ProgramDesc
prog
;
for
(
auto
&
v
:
std
::
vector
<
std
::
string
>
({
"a"
,
"b"
,
"weights"
}))
{
auto
*
var
=
prog
.
MutableBlock
(
0
)
->
Var
(
v
);
var
->
SetType
(
proto
::
VarType
::
LOD_TENSOR
);
if
(
v
==
"weights"
||
v
==
"bias"
)
{
var
->
SetPersistable
(
true
);
}
}
SetOp
(
&
prog
,
"conv2d"
,
{
"a"
,
"weights"
},
{
"b"
});
SetOp
(
&
prog
,
"elementwise_add"
,
{
"b"
,
"c"
},
{
"d"
});
return
prog
;
};
auto
prog
=
build_program_desc
();
std
::
unique_ptr
<
ir
::
Graph
>
graph
(
new
ir
::
Graph
(
prog
));
std
::
unique_ptr
<
ir
::
Graph
>
graph
(
new
ir
::
Graph
(
prog
));
auto
pass
=
PassRegistry
::
Instance
().
Get
(
"conv_elementwise_add_mkldnn_fuse_pass"
);
auto
pass
=
PassRegistry
::
Instance
().
Get
(
"conv_elementwise_add_mkldnn_fuse_pass"
);
int
original_nodes_num
=
graph
->
Nodes
().
size
();
int
original_nodes_num
=
graph
->
Nodes
().
size
();
graph
=
pass
->
Apply
(
std
::
move
(
graph
));
graph
=
pass
->
Apply
(
std
::
move
(
graph
));
int
current_nodes_num
=
graph
->
Nodes
().
size
();
int
current_nodes_num
=
graph
->
Nodes
().
size
();
EXPECT_EQ
(
original_nodes_num
-
2
,
current_nodes_num
);
EXPECT_EQ
(
original_nodes_num
-
4
+
1
,
current_nodes_num
);
// Assert conv_relu op in newly generated graph
// Assert conv_relu op in newly generated graph
int
conv_elementwise_add_count
=
0
;
int
conv_count
=
0
;
int
elementwise_add_count
=
0
;
for
(
auto
*
node
:
graph
->
Nodes
())
{
for
(
auto
*
node
:
graph
->
Nodes
())
{
if
(
node
->
IsOp
()
&&
node
->
Op
()
->
Type
()
==
"conv2d"
)
{
if
(
node
->
IsOp
()
&&
node
->
Op
()
->
Type
()
==
"conv2d"
)
{
++
conv_count
;
}
if
(
node
->
IsOp
()
&&
node
->
Op
()
->
Type
()
==
"elementwise_add"
)
{
++
elementwise_add_count
;
}
/*
if (node->Op()->HasAttr("use_mkldnn")) {
if (node->Op()->HasAttr("use_mkldnn")) {
bool use_mkldnn = boost::get<bool>(node->Op()->GetAttr("use_mkldnn"));
bool use_mkldnn = boost::get<bool>(node->Op()->GetAttr("use_mkldnn"));
if (use_mkldnn) {
if (use_mkldnn) {
// TODO tpatejko: it is commented because convolution does not support this attribute
if (node->Op()->HasAttr("fuse_sum")) {
if
(
true
/*node->Op()->HasAttr("fuse_sum")*/
)
{
// bool fuse_sum = boost::get<bool>(node->Op()->GetAttr("fuse_sum"));
// bool fuse_sum = boost::get<bool>(node->Op()->GetAttr("fuse_sum"));
if
(
true
/*fuse_sum*/
)
{
if (
fuse_sum
) {
++conv_elementwise_add_count;
++conv_elementwise_add_count;
}
}
}
}
}
}
}
}
}
}
*/
}
}
EXPECT_EQ
(
conv_elementwise_add_count
,
1
);
EXPECT_EQ
(
conv_count
,
1
);
EXPECT_EQ
(
elementwise_add_count
,
0
);
}
}
}
// namespace ir
}
// namespace ir
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录