From 25706d0868879c7eaa336df2825101d242085e18 Mon Sep 17 00:00:00 2001 From: Xin Pan Date: Fri, 27 Jul 2018 21:55:24 +0800 Subject: [PATCH] properly set up dep of concat and fetch_bar --- paddle/fluid/framework/ir/graph.cc | 77 ++++++++++++++++++++++++++++++ 1 file changed, 77 insertions(+) diff --git a/paddle/fluid/framework/ir/graph.cc b/paddle/fluid/framework/ir/graph.cc index 2cfad606d..f870fb2b9 100644 --- a/paddle/fluid/framework/ir/graph.cc +++ b/paddle/fluid/framework/ir/graph.cc @@ -24,6 +24,68 @@ namespace paddle { namespace framework { namespace ir { +std::vector FindDistTrainSendVars( + const std::vector &nodes) { + std::vector send_vars; + // since parameters are all in block 0, + // it's enough to only scan send ops in block 0 + for (auto &node : nodes) { + auto op_vars = node->Op()->InputArgumentNames(); + send_vars.reserve(send_vars.size() + + std::distance(op_vars.begin(), op_vars.end())); + send_vars.insert(send_vars.end(), op_vars.begin(), op_vars.end()); + } + return send_vars; +} + +std::vector FindDistTrainRecvVars( + const std::vector &nodes) { + std::vector recv_vars; + for (auto &node : nodes) { + auto op_vars = node->Op()->OutputArgumentNames(); + recv_vars.reserve(recv_vars.size() + + std::distance(op_vars.begin(), op_vars.end())); + recv_vars.insert(recv_vars.end(), op_vars.begin(), op_vars.end()); + } + return recv_vars; +} + +bool IsDistTrainOp(ir::Node *node, const std::vector &send_vars, + const std::vector &recv_vars) { + if (send_vars.size() == 0 || recv_vars.size() == 0) { + return false; + } + + /** + * Check any of opvars contains `.block` and in sendvars + */ + auto checker = [](const std::vector &opvars, + const std::vector &rpc_vars) -> bool { + for (auto &var : opvars) { + // a variable name with the suffix `.block` means it's a splited + // variable by (DistributeTranspiler) + // [python/paddle/fluid/transpiler/distribute_transpiler.py] + if (var.find(".block") != std::string::npos && + std::find(rpc_vars.begin(), rpc_vars.end(), var) != rpc_vars.end()) { + return true; + } + } + return false; + }; + + std::vector input_var_names; + std::vector output_var_names; + for (ir::Node *input : node->inputs) { + input_var_names.push_back(input->Name()); + } + for (ir::Node *output : node->outputs) { + output_var_names.push_back(output->Name()); + } + + return checker(output_var_names, send_vars) || + checker(input_var_names, recv_vars); +} + Graph::Graph(const ProgramDesc &program) : program_(program) { VLOG(3) << "block in program:" << program_.Size(); std::unordered_map all_vars; @@ -104,6 +166,21 @@ Graph::Graph(const ProgramDesc &program) : program_(program) { dep_var->outputs.push_back(fetch_bar); } } + + std::vector send_vars = FindDistTrainSendVars(send_ops); + std::vector recv_vars = FindDistTrainRecvVars(recv_ops); + for (ir::Node *node : Nodes()) { + if (IsDistTrainOp(node, send_vars, recv_vars)) { + if (fetch_bar && node->Name() == "concat") { + ir::Node *dep_var = CreateControlDepVar(); + fetch_bar->outputs.push_back(dep_var); + dep_var->inputs.push_back(fetch_bar); + node->inputs.push_back(dep_var); + dep_var->outputs.push_back(node); + } + } + } + /** * We only handle write after read(WAR), since it should not have a write * after write in program. If there are write after write operators, we need -- GitLab