未验证 提交 813e7526 编写于 作者: J jiangcheng 提交者: GitHub

add debug infomation for build_cinn_pass and graph symbolization (#36867)

上级 29c6bcbf
......@@ -43,6 +43,7 @@ using framework::ir::Node;
using GraphNodeVec = std::vector<Node*>;
using GraphNodeSet = std::unordered_set<Node*>;
using GraphNodeMap = std::unordered_map<Node*, Node*>;
namespace {
int ExtractOpRole(const GraphNodeSet& cluster) {
......@@ -62,11 +63,9 @@ int ExtractOpRole(const GraphNodeSet& cluster) {
// Deal with subgraph's feed input var node:
// create a new input var node and it's feed op node
void AddFeedOpAndVar(const std::unordered_set<Node*>& feed_vars,
const GraphNodeSet& cluster,
const std::unordered_map<Node*, Node*>& old_op2new_op,
const std::unordered_map<Node*, Node*>& old_var2new_var,
Graph* graph) {
void AddFeedOpAndVar(const GraphNodeSet& feed_vars, const GraphNodeSet& cluster,
const GraphNodeMap& old_op2new_op,
const GraphNodeMap& old_var2new_var, Graph* graph) {
for (auto* old_var : feed_vars) {
// create feed op
OpDesc desc;
......@@ -76,6 +75,7 @@ void AddFeedOpAndVar(const std::unordered_set<Node*>& feed_vars,
// get new feed var node
auto* var = old_var2new_var.at(old_var);
VLOG(4) << "Add Feed Op before: " << var->Name();
// link feed op and feed var
IR_NODE_LINK_TO(op, var);
......@@ -95,13 +95,12 @@ void AddFeedOpAndVar(const std::unordered_set<Node*>& feed_vars,
// Deal with subgraph's parameter var node:
// create a new input var node, it's data will get by scope,
// so it don't need feed op
void AddParamVar(const std::unordered_set<Node*>& param_vars,
const GraphNodeSet& cluster,
const std::unordered_map<Node*, Node*>& old_op2new_op,
const std::unordered_map<Node*, Node*>& old_var2new_var,
Graph* graph) {
void AddParamVar(const GraphNodeSet& param_vars, const GraphNodeSet& cluster,
const GraphNodeMap& old_op2new_op,
const GraphNodeMap& old_var2new_var, Graph* graph) {
for (auto* old_var : param_vars) {
auto* var = old_var2new_var.at(old_var);
VLOG(4) << "Add Param Var Node: " << var->Name();
for (auto* old_op : old_var->outputs) {
if (cluster.count(old_op)) {
......@@ -113,13 +112,12 @@ void AddParamVar(const std::unordered_set<Node*>& param_vars,
// Deal with subgraph's outputs var node:
// create a new output var node and it's fetch op
void AddOutputVar(const std::unordered_set<Node*>& output_vars,
const GraphNodeSet& cluster,
const std::unordered_map<Node*, Node*>& old_op2new_op,
const std::unordered_map<Node*, Node*>& old_var2new_var,
Graph* graph) {
void AddOutputVar(const GraphNodeSet& output_vars, const GraphNodeSet& cluster,
const GraphNodeMap& old_op2new_op,
const GraphNodeMap& old_var2new_var, Graph* graph) {
for (auto* old_var : output_vars) {
auto* var = old_var2new_var.at(old_var);
VLOG(4) << "Add Output Var Node: " << var->Name();
for (auto* old_op : old_var->inputs) {
if (cluster.count(old_op)) {
......@@ -139,13 +137,13 @@ std::unique_ptr<Graph> CreateNewSubGraph(const GraphNodeSet& cluster,
// the ProgramDesc is useless, so here we pass a temporary object.
auto subgraph = std::make_unique<Graph>(framework::ProgramDesc());
std::unordered_map<Node*, Node*> old_op2new_op;
GraphNodeMap old_op2new_op;
for (auto* op : cluster) {
auto sub_node = subgraph->CreateOpNode(op->Op());
old_op2new_op[op] = sub_node;
}
std::unordered_map<Node*, Node*> old_var2new_var;
GraphNodeMap old_var2new_var;
for (auto* var : cluster_internals) {
PADDLE_ENFORCE_NOT_NULL(var->Var(),
platform::errors::PreconditionNotMet(
......@@ -167,7 +165,7 @@ std::unique_ptr<Graph> CreateNewSubGraph(const GraphNodeSet& cluster,
}
}
std::unordered_set<Node*> need_feed_vars;
GraphNodeSet need_feed_vars;
std::unordered_set<Node *> param_vars, output_vars;
// the subgraph is independently, so here we only need link
// to the node in new subgraph, and discard the link to
......@@ -303,6 +301,8 @@ void AddCinnOpToGraph(const GraphNodeSet& cluster,
auto* cinn_op_node = graph->CreateOpNode(&cinn_op_desc);
// Add new links from or to the the cinn launch op node
AddLinkToCinnOp(cluster_inputs, cluster_outputs, cinn_op_node);
VLOG(4) << "Add op [" << kCinnLaunchOp << "] into graph.";
}
// Removing cluster node and internals node from Graph
......@@ -346,6 +346,16 @@ void SearchAllSubgraphs(Graph* graph) {
std::vector<GraphNodeVec> clusters =
framework::ir::SubgraphDetector(graph, teller)();
auto cluster_debug_info = [](const GraphNodeSet& cluster) {
std::string res = "(";
for (auto* node : cluster) {
res.append(node->Name());
res.append(", ");
}
res.append(")");
return res;
};
auto* cinn_compiler = CinnCompiler::GetInstance();
for (const auto& node_vec : clusters) {
// Classify var node to inputs, outputs, and internals.
......@@ -354,10 +364,19 @@ void SearchAllSubgraphs(Graph* graph) {
GraphNodeSet cluster_inputs, cluster_outputs, cluster_internals;
AnalyseClusterVariables(cluster_set, &cluster_inputs, &cluster_outputs,
&cluster_internals);
VLOG(4) << "Cluster Ops: " << cluster_debug_info(cluster_set);
VLOG(4) << "Cluster input vars: " << cluster_debug_info(cluster_inputs);
VLOG(4) << "Cluster output vars: " << cluster_debug_info(cluster_outputs);
VLOG(4) << "Cluster internal vars: "
<< cluster_debug_info(cluster_internals);
// Create a new subgraph according to the found cluster and
// save it in CinnCompiler
std::string compilation_key = cinn_compiler->AddGraph(CreateNewSubGraph(
cluster_set, cluster_internals, cluster_inputs, cluster_outputs));
VLOG(4) << "Compilation Key: " << compilation_key;
// Replace the found cluster to a new cinn op node
ReplaceSubGraphWithCinnOpNode(cluster_set, cluster_inputs, cluster_outputs,
cluster_internals, compilation_key, graph);
......
......@@ -59,8 +59,21 @@ FeedInfoMap CinnGraphSymbolization::GetFeedInfoMapFromInput() const {
for (auto& feed_pair : input_tensors_) {
const auto& feed_name = feed_pair.first;
const auto* tensor = feed_pair.second;
PADDLE_ENFORCE_NE(tensor, nullptr,
platform::errors::PreconditionNotMet(
"The input variable %s's tensor cannot be NULL,"
"we need the variable's dtype and shape from tensor.",
feed_name.c_str()));
VLOG(4) << "Get feed info from input: " << feed_name;
feed_map[feed_name] = utils::GetCinnFeedInfoFromTensor(*tensor);
PADDLE_ENFORCE_NE(
feed_map[feed_name].shape.size(), 0UL,
platform::errors::PreconditionNotMet(
"The input variable %s's tensor shape cannot be empty,"
"we need the variable's dtype and shape from tensor.",
feed_name.c_str()));
}
return feed_map;
}
......@@ -95,6 +108,12 @@ CinnGraphSymbolization::CreateCinnScope(const FeedInfoMap& feed_map) {
auto parameter_names = GetGraphInputParameterNames();
for (const auto& param_name : parameter_names) {
PADDLE_ENFORCE_GT(
feed_map.count(param_name), 0UL,
platform::errors::NotFound("Cannot find parameter %s from input list,"
"please add the tensor into input.",
param_name.c_str()));
// if cannot find var in graph input, skip.
// scope accepte the CINN format name, so here we need transform
// paddle format name to CINN format.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册