Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Crayon鑫
Paddle
提交
a49aa4da
P
Paddle
项目概览
Crayon鑫
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
a49aa4da
编写于
9月 24, 2018
作者:
S
Sylwester Fraczek
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
make bias unnecessary for ConvRelu fuse
上级
493ef0c8
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
5 addition
and
13 deletion
+5
-13
paddle/fluid/framework/ir/conv_relu_mkldnn_fuse_pass.cc
paddle/fluid/framework/ir/conv_relu_mkldnn_fuse_pass.cc
+3
-4
paddle/fluid/framework/ir/graph_pattern_detector.cc
paddle/fluid/framework/ir/graph_pattern_detector.cc
+1
-7
paddle/fluid/framework/ir/graph_pattern_detector.h
paddle/fluid/framework/ir/graph_pattern_detector.h
+1
-2
未找到文件。
paddle/fluid/framework/ir/conv_relu_mkldnn_fuse_pass.cc
浏览文件 @
a49aa4da
...
@@ -42,14 +42,13 @@ std::unique_ptr<ir::Graph> ConvReLUFusePass::ApplyImpl(
...
@@ -42,14 +42,13 @@ std::unique_ptr<ir::Graph> ConvReLUFusePass::ApplyImpl(
Graph
*
g
)
{
Graph
*
g
)
{
VLOG
(
4
)
<<
"handle ConvReLU fuse"
;
VLOG
(
4
)
<<
"handle ConvReLU fuse"
;
GET_IR_NODE_FROM_SUBGRAPH
(
conv_weight
,
conv_weight
,
GET_IR_NODE_FROM_SUBGRAPH
(
conv_weight
,
conv_weight
,
conv_relu_pattern
);
// Filter
conv_relu_pattern
);
// Filter
GET_IR_NODE_FROM_SUBGRAPH
(
conv_bias
,
conv_bias
,
conv_relu_pattern
);
// Bias
GET_IR_NODE_FROM_SUBGRAPH
(
conv_out
,
conv_out
,
conv_relu_pattern
);
// tmp
GET_IR_NODE_FROM_SUBGRAPH
(
conv_out
,
conv_out
,
conv_relu_pattern
);
// tmp
GET_IR_NODE_FROM_SUBGRAPH
(
conv
,
conv
,
conv_relu_pattern
);
// CONV op
GET_IR_NODE_FROM_SUBGRAPH
(
conv
,
conv
,
conv_relu_pattern
);
// CONV op
GET_IR_NODE_FROM_SUBGRAPH
(
relu_out
,
relu_out
,
conv_relu_pattern
);
// Out
GET_IR_NODE_FROM_SUBGRAPH
(
relu_out
,
relu_out
,
conv_relu_pattern
);
// Out
GET_IR_NODE_FROM_SUBGRAPH
(
relu
,
relu
,
conv_relu_pattern
);
// ReLU op
GET_IR_NODE_FROM_SUBGRAPH
(
relu
,
relu
,
conv_relu_pattern
);
// ReLU op
//
Create an ConvReLU N
ode.
//
Transform Conv node into ConvReLU n
ode.
OpDesc
*
desc
=
conv
->
Op
();
OpDesc
*
desc
=
conv
->
Op
();
desc
->
SetOutput
(
"Output"
,
std
::
vector
<
std
::
string
>
({
relu_out
->
Name
()}));
desc
->
SetOutput
(
"Output"
,
std
::
vector
<
std
::
string
>
({
relu_out
->
Name
()}));
desc
->
SetAttr
(
"fuse_relu"
,
true
);
desc
->
SetAttr
(
"fuse_relu"
,
true
);
...
...
paddle/fluid/framework/ir/graph_pattern_detector.cc
浏览文件 @
a49aa4da
...
@@ -638,11 +638,6 @@ PDNode *patterns::ConvReLU::operator()(
...
@@ -638,11 +638,6 @@ PDNode *patterns::ConvReLU::operator()(
->
AsInput
()
->
AsInput
()
->
assert_is_persistable_var
()
->
assert_is_persistable_var
()
->
assert_is_op_input
(
"conv2d"
,
"Filter"
);
->
assert_is_op_input
(
"conv2d"
,
"Filter"
);
// Bias
auto
*
conv_bias_var
=
pattern
->
NewNode
(
conv_bias_repr
())
->
AsInput
()
->
assert_is_persistable_var
()
->
assert_is_op_input
(
"conv2d"
,
"Bias"
);
// intermediate variable, will be removed in the IR after fuse.
// intermediate variable, will be removed in the IR after fuse.
auto
*
conv_out_var
=
pattern
->
NewNode
(
conv_out_repr
())
auto
*
conv_out_var
=
pattern
->
NewNode
(
conv_out_repr
())
->
AsIntermediate
()
->
AsIntermediate
()
...
@@ -653,8 +648,7 @@ PDNode *patterns::ConvReLU::operator()(
...
@@ -653,8 +648,7 @@ PDNode *patterns::ConvReLU::operator()(
->
AsOutput
()
->
AsOutput
()
->
assert_is_op_output
(
"relu"
);
->
assert_is_op_output
(
"relu"
);
conv_op
->
LinksFrom
({
conv_input
,
conv_weight_var
,
conv_bias_var
})
conv_op
->
LinksFrom
({
conv_input
,
conv_weight_var
}).
LinksTo
({
conv_out_var
});
.
LinksTo
({
conv_out_var
});
relu_op
->
LinksFrom
({
conv_out_var
}).
LinksTo
({
relu_out_var
});
relu_op
->
LinksFrom
({
conv_out_var
}).
LinksTo
({
relu_out_var
});
return
relu_out_var
;
return
relu_out_var
;
}
}
...
...
paddle/fluid/framework/ir/graph_pattern_detector.h
浏览文件 @
a49aa4da
...
@@ -379,7 +379,7 @@ struct PatternBase {
...
@@ -379,7 +379,7 @@ struct PatternBase {
// op: conv + relu
// op: conv + relu
// named nodes:
// named nodes:
// conv_input, conv_weight,
// conv_input, conv_weight,
// conv_
bias, conv_
out, conv,
// conv_out, conv,
// relu_out, relu
// relu_out, relu
struct
ConvReLU
:
public
PatternBase
{
struct
ConvReLU
:
public
PatternBase
{
ConvReLU
(
PDPattern
*
pattern
,
const
std
::
string
&
name_scope
)
ConvReLU
(
PDPattern
*
pattern
,
const
std
::
string
&
name_scope
)
...
@@ -392,7 +392,6 @@ struct ConvReLU : public PatternBase {
...
@@ -392,7 +392,6 @@ struct ConvReLU : public PatternBase {
PATTERN_DECL_NODE
(
relu
);
PATTERN_DECL_NODE
(
relu
);
// declare variable node's name
// declare variable node's name
PATTERN_DECL_NODE
(
conv_weight
);
PATTERN_DECL_NODE
(
conv_weight
);
PATTERN_DECL_NODE
(
conv_bias
);
PATTERN_DECL_NODE
(
conv_out
);
PATTERN_DECL_NODE
(
conv_out
);
PATTERN_DECL_NODE
(
relu_out
);
PATTERN_DECL_NODE
(
relu_out
);
};
};
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录