Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
6a2bc9a2
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
6a2bc9a2
编写于
2月 25, 2019
作者:
M
Michal Gallus
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Add Conv Residual Connection UT for Projection
test=develop
上级
a6e3cd5e
变更
1
显示空白变更内容
内联
并排
Showing
1 changed file
with
40 addition
and
10 deletion
+40
-10
paddle/fluid/framework/ir/mkldnn/conv_elementwise_add_mkldnn_fuse_pass_tester.cc
...ir/mkldnn/conv_elementwise_add_mkldnn_fuse_pass_tester.cc
+40
-10
未找到文件。
paddle/fluid/framework/ir/mkldnn/conv_elementwise_add_mkldnn_fuse_pass_tester.cc
浏览文件 @
6a2bc9a2
...
@@ -44,10 +44,14 @@ struct TestIsReachable {
...
@@ -44,10 +44,14 @@ struct TestIsReachable {
using
func
=
std
::
function
<
bool
(
const
std
::
string
&
,
const
std
::
string
&
)
>
;
using
func
=
std
::
function
<
bool
(
const
std
::
string
&
,
const
std
::
string
&
)
>
;
auto
operator
()(
const
std
::
unique_ptr
<
ir
::
Graph
>&
graph
)
->
func
{
auto
operator
()(
const
std
::
unique_ptr
<
ir
::
Graph
>&
graph
)
->
func
{
auto
find_node
=
[](
const
std
::
unique_ptr
<
ir
::
Graph
>&
graph
,
auto
hash
=
[](
const
Node
*
node
)
->
std
::
string
{
return
node
->
Name
()
+
std
::
to_string
(
node
->
id
());
};
auto
find_node
=
[
&
](
const
std
::
unique_ptr
<
ir
::
Graph
>&
graph
,
const
std
::
string
&
name
)
->
Node
*
{
const
std
::
string
&
name
)
->
Node
*
{
for
(
auto
&
node
:
GraphTraits
::
DFS
(
*
graph
))
{
for
(
auto
&
node
:
GraphTraits
::
DFS
(
*
graph
))
{
if
(
name
==
node
.
Name
(
))
{
if
(
name
==
hash
(
&
node
))
{
return
&
node
;
return
&
node
;
}
}
}
}
...
@@ -55,13 +59,17 @@ struct TestIsReachable {
...
@@ -55,13 +59,17 @@ struct TestIsReachable {
return
nullptr
;
return
nullptr
;
};
};
return
[
&
](
std
::
string
from
,
const
std
::
string
to
)
->
bool
{
// update the from and to strings to hashed equivs in loop from graph traits
return
[
&
](
std
::
string
from
,
std
::
string
to
)
->
bool
{
if
(
from
==
to
)
return
true
;
if
(
from
==
to
)
return
true
;
std
::
map
<
std
::
string
,
bool
>
visited
;
std
::
map
<
std
::
string
,
bool
>
visited
;
for
(
auto
&
node
:
GraphTraits
::
DFS
(
*
graph
))
{
for
(
auto
&
node
:
GraphTraits
::
DFS
(
*
graph
))
{
visited
[
node
.
Name
()]
=
false
;
auto
hashed
=
hash
(
&
node
);
if
(
node
.
Name
()
==
from
)
from
=
hashed
;
if
(
node
.
Name
()
==
to
)
to
=
hashed
;
visited
[
hashed
]
=
false
;
}
}
visited
[
from
]
=
true
;
visited
[
from
]
=
true
;
...
@@ -72,15 +80,15 @@ struct TestIsReachable {
...
@@ -72,15 +80,15 @@ struct TestIsReachable {
while
(
!
queue
.
empty
())
{
while
(
!
queue
.
empty
())
{
auto
cur
=
find_node
(
graph
,
queue
.
front
());
auto
cur
=
find_node
(
graph
,
queue
.
front
());
queue
.
pop_front
();
queue
.
pop_front
();
if
(
cur
==
nullptr
)
return
false
;
if
(
cur
==
nullptr
)
return
false
;
for
(
auto
n
:
cur
->
outputs
)
{
for
(
auto
n
:
cur
->
outputs
)
{
if
(
n
->
Name
()
==
to
)
return
true
;
auto
hashed_name
=
hash
(
n
);
if
(
hashed_name
==
to
)
return
true
;
if
(
!
visited
[
n
->
Name
()
])
{
if
(
!
visited
[
hashed_name
])
{
visited
[
n
->
Name
()
]
=
true
;
visited
[
hashed_name
]
=
true
;
queue
.
push_back
(
n
->
Name
()
);
queue
.
push_back
(
hashed_name
);
}
}
}
}
}
}
...
@@ -166,6 +174,28 @@ TEST(ConvElementwiseAddMKLDNNFusePass, ConvolutionAsYWithElementwiseAddRelu) {
...
@@ -166,6 +174,28 @@ TEST(ConvElementwiseAddMKLDNNFusePass, ConvolutionAsYWithElementwiseAddRelu) {
RunPassAndAssert
(
&
prog
,
"a"
,
"relu"
,
1
);
RunPassAndAssert
(
&
prog
,
"a"
,
"relu"
,
1
);
}
}
TEST
(
ConvElementwiseAddMKLDNNFusePass
,
ConvolutionProjectionAsYWithElementwiseAddRelu
)
{
auto
prog
=
BuildProgramDesc
({
"a"
,
"b"
,
"c"
,
"d"
,
"e"
,
"f"
},
{
"bias"
,
"weights"
,
"bias2"
,
"weights2"
});
SetOp
(
&
prog
,
"sigmoid"
,
{{
"X"
,
"a"
}},
{
"Out"
,
"b"
});
// right branch
SetOp
(
&
prog
,
"conv2d"
,
{{
"Input"
,
"b"
},
{
"Bias"
,
"bias"
},
{
"Filter"
,
"weights"
}},
{
"Output"
,
"c"
});
// left branch
SetOp
(
&
prog
,
"conv2d"
,
{{
"Input"
,
"a"
},
{
"Bias"
,
"bias2"
},
{
"Filter"
,
"weights2"
}},
{
"Output"
,
"f"
});
SetOp
(
&
prog
,
"elementwise_add"
,
{{
"X"
,
"f"
},
{
"Y"
,
"c"
}},
{
"Out"
,
"d"
});
SetOp
(
&
prog
,
"relu"
,
{{
"X"
,
"d"
}},
{
"Out"
,
"e"
});
RunPassAndAssert
(
&
prog
,
"a"
,
"relu"
,
2
);
}
TEST
(
ConvElementwiseAddMKLDNNFusePass
,
TEST
(
ConvElementwiseAddMKLDNNFusePass
,
ConvolutionAsYWithElementwiseAddReluNoBias
)
{
ConvolutionAsYWithElementwiseAddReluNoBias
)
{
auto
prog
=
BuildProgramDesc
({
"a"
,
"b"
,
"c"
,
"d"
,
"e"
},
{
"weights"
});
auto
prog
=
BuildProgramDesc
({
"a"
,
"b"
,
"c"
,
"d"
,
"e"
},
{
"weights"
});
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录