Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
02e4f1f8
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
02e4f1f8
编写于
7月 11, 2022
作者:
N
niuliling123
提交者:
GitHub
7月 11, 2022
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Add Concat transformer for layout autotune (#42003)
* Add Concat transformer for layout autotune
上级
d4372a1e
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
77 addition
and
0 deletion
+77
-0
paddle/fluid/imperative/layout_autotune.cc
paddle/fluid/imperative/layout_autotune.cc
+2
-0
paddle/fluid/imperative/layout_transformer.h
paddle/fluid/imperative/layout_transformer.h
+46
-0
python/paddle/fluid/tests/unittests/test_layout_autotune.py
python/paddle/fluid/tests/unittests/test_layout_autotune.py
+29
-0
未找到文件。
paddle/fluid/imperative/layout_autotune.cc
浏览文件 @
02e4f1f8
...
@@ -131,6 +131,8 @@ paddle::imperative::NameVarMap<VarType> DealLightlyLayoutSensitive(
...
@@ -131,6 +131,8 @@ paddle::imperative::NameVarMap<VarType> DealLightlyLayoutSensitive(
transposer
=
std
::
make_shared
<
FlattenOpTransformer
<
VarType
>>
(
op_type
);
transposer
=
std
::
make_shared
<
FlattenOpTransformer
<
VarType
>>
(
op_type
);
}
else
if
(
op_type
==
"arg_max"
)
{
}
else
if
(
op_type
==
"arg_max"
)
{
transposer
=
std
::
make_shared
<
ArgmaxOpTransformer
<
VarType
>>
(
op_type
);
transposer
=
std
::
make_shared
<
ArgmaxOpTransformer
<
VarType
>>
(
op_type
);
}
else
if
(
op_type
==
"concat"
)
{
transposer
=
std
::
make_shared
<
ConcatOpTransformer
<
VarType
>>
(
op_type
);
}
else
if
(
op_type
.
find
(
"elementwise_"
)
!=
std
::
string
::
npos
)
{
}
else
if
(
op_type
.
find
(
"elementwise_"
)
!=
std
::
string
::
npos
)
{
transposer
=
std
::
make_shared
<
ElementwiseOpTransformer
<
VarType
>>
(
op_type
);
transposer
=
std
::
make_shared
<
ElementwiseOpTransformer
<
VarType
>>
(
op_type
);
}
else
{
}
else
{
...
...
paddle/fluid/imperative/layout_transformer.h
浏览文件 @
02e4f1f8
...
@@ -401,5 +401,51 @@ class ArgmaxOpTransformer
...
@@ -401,5 +401,51 @@ class ArgmaxOpTransformer
}
}
};
};
template
<
typename
VarType
>
class
ConcatOpTransformer
:
public
LightlyLayoutSensitiveOpTransformer
<
VarType
>
{
public:
explicit
ConcatOpTransformer
(
const
std
::
string
&
type
)
:
LightlyLayoutSensitiveOpTransformer
<
VarType
>
(
type
)
{}
paddle
::
imperative
::
NameVarMap
<
VarType
>
Apply
(
const
paddle
::
imperative
::
NameVarMap
<
VarType
>&
ins
,
const
paddle
::
imperative
::
NameVarMap
<
VarType
>&
outs
,
paddle
::
framework
::
AttributeMap
*
attrs
,
const
std
::
shared_ptr
<
paddle
::
imperative
::
Tracer
>&
tracer
)
{
VLOG
(
3
)
<<
"Optimze lightly layout sensitive op "
<<
this
->
Type
();
auto
&
in_var
=
ins
.
at
(
"X"
)[
0
];
auto
var_layout
=
paddle
::
imperative
::
GetDataLayout
(
in_var
);
bool
need_tranppose
=
false
;
for
(
auto
&
pair
:
ins
)
{
for
(
auto
&
var
:
pair
.
second
)
{
if
(
var
!=
nullptr
&&
(
paddle
::
imperative
::
GetDataLayout
(
var
)
!=
var_layout
))
{
need_tranppose
=
true
;
break
;
}
}
}
if
(
need_tranppose
)
{
return
LightlyLayoutSensitiveOpTransformer
<
VarType
>::
Apply
(
ins
,
outs
,
attrs
,
tracer
);
}
if
(
var_layout
!=
DataLayout
::
UNDEFINED
)
{
std
::
vector
<
int
>
perm_nhwc
=
{
0
,
3
,
1
,
2
};
std
::
vector
<
int
>
perm_nchw
=
{
0
,
2
,
3
,
1
};
auto
perm
=
var_layout
==
DataLayout
::
NHWC
?
perm_nhwc
:
perm_nchw
;
auto
axis
=
BOOST_GET_CONST
(
int
,
(
*
attrs
)[
"axis"
]);
(
*
attrs
)[
"axis"
]
=
static_cast
<
int
>
(
perm
[
axis
]);
}
auto
axis
=
BOOST_GET_CONST
(
int
,
(
*
attrs
)[
"axis"
]);
VLOG
(
3
)
<<
"Optimze lightly layout sensitive op asdfasdfasdf axis"
<<
axis
;
this
->
SetVarsLayout
(
outs
,
var_layout
);
return
ins
;
}
};
}
// namespace imperative
}
// namespace imperative
}
// namespace paddle
}
// namespace paddle
python/paddle/fluid/tests/unittests/test_layout_autotune.py
浏览文件 @
02e4f1f8
...
@@ -161,6 +161,35 @@ class LayoutAutoTune(unittest.TestCase):
...
@@ -161,6 +161,35 @@ class LayoutAutoTune(unittest.TestCase):
self
.
assertEqual
(
conv_out
.
shape
,
[
1
,
14
,
12
,
8
])
self
.
assertEqual
(
conv_out
.
shape
,
[
1
,
14
,
12
,
8
])
self
.
assertEqual
(
out
.
shape
,
[
1
])
self
.
assertEqual
(
out
.
shape
,
[
1
])
def
test_concat_op_transposer
(
self
):
if
not
self
.
use_autoune
():
return
in1
=
paddle
.
rand
([
1
,
8
,
14
,
12
])
conv
=
paddle
.
nn
.
Conv2D
(
3
,
8
,
(
3
,
3
))
data
=
paddle
.
rand
([
1
,
3
,
16
,
14
])
with
paddle
.
amp
.
auto_cast
(
level
=
"O2"
):
conv_out
=
conv
(
data
)
# conv_out.shape = [1, 14, 12, 8] with NHWC
out
=
paddle
.
concat
(
x
=
[
conv_out
,
in1
],
axis
=
0
)
self
.
assertEqual
(
conv_out
.
shape
,
[
1
,
14
,
12
,
8
])
self
.
assertEqual
(
out
.
shape
,
[
2
,
8
,
14
,
12
])
def
test_concat_op_no_transposer
(
self
):
if
not
self
.
use_autoune
():
return
conv
=
paddle
.
nn
.
Conv2D
(
3
,
8
,
(
3
,
3
))
data1
=
paddle
.
rand
([
1
,
3
,
16
,
14
])
data2
=
paddle
.
rand
([
1
,
3
,
16
,
14
])
with
paddle
.
amp
.
auto_cast
(
level
=
"O2"
):
conv_out1
=
conv
(
data1
)
conv_out2
=
conv
(
data2
)
# conv_out.shape = [1, 14, 12, 8] with NHWC
out
=
paddle
.
concat
(
x
=
[
conv_out1
,
conv_out2
],
axis
=
0
)
self
.
assertEqual
(
conv_out1
.
shape
,
[
1
,
14
,
12
,
8
])
self
.
assertEqual
(
out
.
shape
,
[
2
,
14
,
12
,
8
])
class
TestAutoTuneAPI
(
unittest
.
TestCase
):
class
TestAutoTuneAPI
(
unittest
.
TestCase
):
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录