Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
813e7526
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
813e7526
编写于
11月 01, 2021
作者:
J
jiangcheng
提交者:
GitHub
11月 01, 2021
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add debug infomation for build_cinn_pass and graph symbolization (#36867)
上级
29c6bcbf
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
56 addition
and
18 deletion
+56
-18
paddle/fluid/framework/paddle2cinn/build_cinn_pass.cc
paddle/fluid/framework/paddle2cinn/build_cinn_pass.cc
+37
-18
paddle/fluid/framework/paddle2cinn/cinn_graph_symbolization.cc
...e/fluid/framework/paddle2cinn/cinn_graph_symbolization.cc
+19
-0
未找到文件。
paddle/fluid/framework/paddle2cinn/build_cinn_pass.cc
浏览文件 @
813e7526
...
...
@@ -43,6 +43,7 @@ using framework::ir::Node;
using
GraphNodeVec
=
std
::
vector
<
Node
*>
;
using
GraphNodeSet
=
std
::
unordered_set
<
Node
*>
;
using
GraphNodeMap
=
std
::
unordered_map
<
Node
*
,
Node
*>
;
namespace
{
int
ExtractOpRole
(
const
GraphNodeSet
&
cluster
)
{
...
...
@@ -62,11 +63,9 @@ int ExtractOpRole(const GraphNodeSet& cluster) {
// Deal with subgraph's feed input var node:
// create a new input var node and it's feed op node
void
AddFeedOpAndVar
(
const
std
::
unordered_set
<
Node
*>&
feed_vars
,
const
GraphNodeSet
&
cluster
,
const
std
::
unordered_map
<
Node
*
,
Node
*>&
old_op2new_op
,
const
std
::
unordered_map
<
Node
*
,
Node
*>&
old_var2new_var
,
Graph
*
graph
)
{
void
AddFeedOpAndVar
(
const
GraphNodeSet
&
feed_vars
,
const
GraphNodeSet
&
cluster
,
const
GraphNodeMap
&
old_op2new_op
,
const
GraphNodeMap
&
old_var2new_var
,
Graph
*
graph
)
{
for
(
auto
*
old_var
:
feed_vars
)
{
// create feed op
OpDesc
desc
;
...
...
@@ -76,6 +75,7 @@ void AddFeedOpAndVar(const std::unordered_set<Node*>& feed_vars,
// get new feed var node
auto
*
var
=
old_var2new_var
.
at
(
old_var
);
VLOG
(
4
)
<<
"Add Feed Op before: "
<<
var
->
Name
();
// link feed op and feed var
IR_NODE_LINK_TO
(
op
,
var
);
...
...
@@ -95,13 +95,12 @@ void AddFeedOpAndVar(const std::unordered_set<Node*>& feed_vars,
// Deal with subgraph's parameter var node:
// create a new input var node, it's data will get by scope,
// so it don't need feed op
void
AddParamVar
(
const
std
::
unordered_set
<
Node
*>&
param_vars
,
const
GraphNodeSet
&
cluster
,
const
std
::
unordered_map
<
Node
*
,
Node
*>&
old_op2new_op
,
const
std
::
unordered_map
<
Node
*
,
Node
*>&
old_var2new_var
,
Graph
*
graph
)
{
void
AddParamVar
(
const
GraphNodeSet
&
param_vars
,
const
GraphNodeSet
&
cluster
,
const
GraphNodeMap
&
old_op2new_op
,
const
GraphNodeMap
&
old_var2new_var
,
Graph
*
graph
)
{
for
(
auto
*
old_var
:
param_vars
)
{
auto
*
var
=
old_var2new_var
.
at
(
old_var
);
VLOG
(
4
)
<<
"Add Param Var Node: "
<<
var
->
Name
();
for
(
auto
*
old_op
:
old_var
->
outputs
)
{
if
(
cluster
.
count
(
old_op
))
{
...
...
@@ -113,13 +112,12 @@ void AddParamVar(const std::unordered_set<Node*>& param_vars,
// Deal with subgraph's outputs var node:
// create a new output var node and it's fetch op
void
AddOutputVar
(
const
std
::
unordered_set
<
Node
*>&
output_vars
,
const
GraphNodeSet
&
cluster
,
const
std
::
unordered_map
<
Node
*
,
Node
*>&
old_op2new_op
,
const
std
::
unordered_map
<
Node
*
,
Node
*>&
old_var2new_var
,
Graph
*
graph
)
{
void
AddOutputVar
(
const
GraphNodeSet
&
output_vars
,
const
GraphNodeSet
&
cluster
,
const
GraphNodeMap
&
old_op2new_op
,
const
GraphNodeMap
&
old_var2new_var
,
Graph
*
graph
)
{
for
(
auto
*
old_var
:
output_vars
)
{
auto
*
var
=
old_var2new_var
.
at
(
old_var
);
VLOG
(
4
)
<<
"Add Output Var Node: "
<<
var
->
Name
();
for
(
auto
*
old_op
:
old_var
->
inputs
)
{
if
(
cluster
.
count
(
old_op
))
{
...
...
@@ -139,13 +137,13 @@ std::unique_ptr<Graph> CreateNewSubGraph(const GraphNodeSet& cluster,
// the ProgramDesc is useless, so here we pass a temporary object.
auto
subgraph
=
std
::
make_unique
<
Graph
>
(
framework
::
ProgramDesc
());
std
::
unordered_map
<
Node
*
,
Node
*>
old_op2new_op
;
GraphNodeMap
old_op2new_op
;
for
(
auto
*
op
:
cluster
)
{
auto
sub_node
=
subgraph
->
CreateOpNode
(
op
->
Op
());
old_op2new_op
[
op
]
=
sub_node
;
}
std
::
unordered_map
<
Node
*
,
Node
*>
old_var2new_var
;
GraphNodeMap
old_var2new_var
;
for
(
auto
*
var
:
cluster_internals
)
{
PADDLE_ENFORCE_NOT_NULL
(
var
->
Var
(),
platform
::
errors
::
PreconditionNotMet
(
...
...
@@ -167,7 +165,7 @@ std::unique_ptr<Graph> CreateNewSubGraph(const GraphNodeSet& cluster,
}
}
std
::
unordered_set
<
Node
*>
need_feed_vars
;
GraphNodeSet
need_feed_vars
;
std
::
unordered_set
<
Node
*>
param_vars
,
output_vars
;
// the subgraph is independently, so here we only need link
// to the node in new subgraph, and discard the link to
...
...
@@ -303,6 +301,8 @@ void AddCinnOpToGraph(const GraphNodeSet& cluster,
auto
*
cinn_op_node
=
graph
->
CreateOpNode
(
&
cinn_op_desc
);
// Add new links from or to the the cinn launch op node
AddLinkToCinnOp
(
cluster_inputs
,
cluster_outputs
,
cinn_op_node
);
VLOG
(
4
)
<<
"Add op ["
<<
kCinnLaunchOp
<<
"] into graph."
;
}
// Removing cluster node and internals node from Graph
...
...
@@ -346,6 +346,16 @@ void SearchAllSubgraphs(Graph* graph) {
std
::
vector
<
GraphNodeVec
>
clusters
=
framework
::
ir
::
SubgraphDetector
(
graph
,
teller
)();
auto
cluster_debug_info
=
[](
const
GraphNodeSet
&
cluster
)
{
std
::
string
res
=
"("
;
for
(
auto
*
node
:
cluster
)
{
res
.
append
(
node
->
Name
());
res
.
append
(
", "
);
}
res
.
append
(
")"
);
return
res
;
};
auto
*
cinn_compiler
=
CinnCompiler
::
GetInstance
();
for
(
const
auto
&
node_vec
:
clusters
)
{
// Classify var node to inputs, outputs, and internals.
...
...
@@ -354,10 +364,19 @@ void SearchAllSubgraphs(Graph* graph) {
GraphNodeSet
cluster_inputs
,
cluster_outputs
,
cluster_internals
;
AnalyseClusterVariables
(
cluster_set
,
&
cluster_inputs
,
&
cluster_outputs
,
&
cluster_internals
);
VLOG
(
4
)
<<
"Cluster Ops: "
<<
cluster_debug_info
(
cluster_set
);
VLOG
(
4
)
<<
"Cluster input vars: "
<<
cluster_debug_info
(
cluster_inputs
);
VLOG
(
4
)
<<
"Cluster output vars: "
<<
cluster_debug_info
(
cluster_outputs
);
VLOG
(
4
)
<<
"Cluster internal vars: "
<<
cluster_debug_info
(
cluster_internals
);
// Create a new subgraph according to the found cluster and
// save it in CinnCompiler
std
::
string
compilation_key
=
cinn_compiler
->
AddGraph
(
CreateNewSubGraph
(
cluster_set
,
cluster_internals
,
cluster_inputs
,
cluster_outputs
));
VLOG
(
4
)
<<
"Compilation Key: "
<<
compilation_key
;
// Replace the found cluster to a new cinn op node
ReplaceSubGraphWithCinnOpNode
(
cluster_set
,
cluster_inputs
,
cluster_outputs
,
cluster_internals
,
compilation_key
,
graph
);
...
...
paddle/fluid/framework/paddle2cinn/cinn_graph_symbolization.cc
浏览文件 @
813e7526
...
...
@@ -59,8 +59,21 @@ FeedInfoMap CinnGraphSymbolization::GetFeedInfoMapFromInput() const {
for
(
auto
&
feed_pair
:
input_tensors_
)
{
const
auto
&
feed_name
=
feed_pair
.
first
;
const
auto
*
tensor
=
feed_pair
.
second
;
PADDLE_ENFORCE_NE
(
tensor
,
nullptr
,
platform
::
errors
::
PreconditionNotMet
(
"The input variable %s's tensor cannot be NULL,"
"we need the variable's dtype and shape from tensor."
,
feed_name
.
c_str
()));
VLOG
(
4
)
<<
"Get feed info from input: "
<<
feed_name
;
feed_map
[
feed_name
]
=
utils
::
GetCinnFeedInfoFromTensor
(
*
tensor
);
PADDLE_ENFORCE_NE
(
feed_map
[
feed_name
].
shape
.
size
(),
0UL
,
platform
::
errors
::
PreconditionNotMet
(
"The input variable %s's tensor shape cannot be empty,"
"we need the variable's dtype and shape from tensor."
,
feed_name
.
c_str
()));
}
return
feed_map
;
}
...
...
@@ -95,6 +108,12 @@ CinnGraphSymbolization::CreateCinnScope(const FeedInfoMap& feed_map) {
auto
parameter_names
=
GetGraphInputParameterNames
();
for
(
const
auto
&
param_name
:
parameter_names
)
{
PADDLE_ENFORCE_GT
(
feed_map
.
count
(
param_name
),
0UL
,
platform
::
errors
::
NotFound
(
"Cannot find parameter %s from input list,"
"please add the tensor into input."
,
param_name
.
c_str
()));
// if cannot find var in graph input, skip.
// scope accepte the CINN format name, so here we need transform
// paddle format name to CINN format.
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录