graph.cc 14.8 KB
Newer Older
X
Xin Pan 已提交
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
X
start  
Xin Pan 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

15
#include <memory>
X
Xin Pan 已提交
16

X
start  
Xin Pan 已提交
17
#include "paddle/fluid/framework/ir/graph.h"
18
#include "paddle/fluid/framework/operator.h"
X
start  
Xin Pan 已提交
19

20 21
PADDLE_DEFINE_EXPORTED_bool(convert_all_blocks, true,
                            "Convert all blocks in program into SSAgraphs");
22

X
start  
Xin Pan 已提交
23
namespace paddle {
X
Xin Pan 已提交
24
namespace framework {
X
Xin Pan 已提交
25
namespace ir {
X
Xin Pan 已提交
26

27 28 29
Graph::Graph(const ProgramDesc &program)
    : Graph(program, 0, program.Block(0).AllOps().size()) {}

30 31 32
Graph::Graph(const ProgramDesc &program, const int64_t start_op_index,
             const int64_t end_op_index)
    : program_(program), main_graph_(nullptr) {
33 34 35 36 37 38 39 40 41 42
  PADDLE_ENFORCE_GE(start_op_index, 0,
                    platform::errors::InvalidArgument(
                        "Required start_op_index >= 0, but received "
                        "start_op_index = %d",
                        start_op_index));
  PADDLE_ENFORCE_GE(end_op_index, start_op_index,
                    platform::errors::InvalidArgument(
                        "Required end_op_index >= start_op_index, but received "
                        "end_op_index: %d < start_op_index: %d",
                        end_op_index, start_op_index));
43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58
  PADDLE_ENFORCE_GE(
      program_.Size(), 1,
      platform::errors::InvalidArgument("Can't construct a graph from this "
                                        "program, it doesn't have a block"));

  const int64_t block_op_size = program_.Block(0).AllOps().size();
  PADDLE_ENFORCE_LE(end_op_index, block_op_size,
                    platform::errors::InvalidArgument(
                        "Required end_op_index <= block_op_size, but received "
                        "end_op_index: %d > block_op_size: %d",
                        end_op_index, block_op_size));
  if (FLAGS_convert_all_blocks) {
    // NOTE(levi): start_op_index and end_op_index only work on the first
    // sub_graph.
    std::unique_ptr<Graph> first_sub_graph = std::make_unique<Graph>(
        program_.Block(0), this, start_op_index, end_op_index);
59
    first_sub_graph->block_id_ = 0;
60 61 62 63
    sub_graphs_.push_back(std::move(first_sub_graph));
    for (size_t idx = 1; idx < program_.Size(); ++idx) {
      std::unique_ptr<Graph> sub_graph =
          std::make_unique<Graph>(program_.Block(idx), this);
64
      sub_graph->block_id_ = idx;
65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81
      sub_graphs_.push_back(std::move(sub_graph));
    }
  } else {
    auto var_nodes = InitFromProgram(program_, start_op_index, end_op_index);
    ResolveHazard(var_nodes);
  }
}

Graph::Graph(const BlockDesc &block, const Graph *main_graph)
    : Graph(block, main_graph, 0, block.AllOps().size()) {}

Graph::Graph(const BlockDesc &block, const Graph *main_graph,
             const int64_t start_op_index, const int64_t end_op_index)
    : main_graph_(main_graph) {
  auto var_nodes = InitFromBlock(block, start_op_index, end_op_index);
  ResolveHazard(var_nodes);
}
82

83 84 85 86 87 88 89 90 91 92 93 94
// TODO(levi): delete this interface after when we can convert all
// blocks into sub_graphs.
std::map<std::string, std::vector<ir::Node *>> Graph::InitFromProgram(
    const ProgramDesc &program, const int64_t start_op_index,
    const int64_t end_op_index) {
  VLOG(3) << "block in program:" << program_.Size();
  return InitFromBlock(program.Block(0), start_op_index, end_op_index);
}

std::map<std::string, std::vector<ir::Node *>> Graph::InitFromBlock(
    const BlockDesc &block, const int64_t start_op_index,
    const int64_t end_op_index) {
95 96 97
  std::unordered_map<std::string, std::pair<VarDesc *, int>>
      name_to_desc_block_id;

98
  block_id_ = block.ID();
99 100 101 102 103 104 105 106 107 108 109 110 111 112 113
  const BlockDesc *block_var_visible = &block;
  while (block_var_visible != nullptr) {
    for (auto *var : block_var_visible->AllVars()) {
      name_to_desc_block_id.emplace(
          var->Name(), std::make_pair(var, block_var_visible->ID()));
    }
    const BlockDesc *forward_block = block_var_visible->ForwardBlock();
    if (forward_block != nullptr) {
      for (auto *var : forward_block->AllVars()) {
        name_to_desc_block_id.emplace(var->Name(),
                                      std::make_pair(var, forward_block->ID()));
      }
    }
    block_var_visible = block_var_visible->ParentBlock();
  }
114 115
  // var nodes for each var name, will have multiple versions in SSA
  std::map<std::string, std::vector<ir::Node *>> var_nodes;
116
  std::unordered_map<std::string, VarDesc *> not_visited_vars;
117
  for (auto *var : block.AllVars()) {
118
    not_visited_vars.emplace(var->Name(), var);
119 120
  }

121
  int desc_order = 0;
122
  auto all_ops = block.AllOps();
123 124 125 126 127
  PADDLE_ENFORCE_LE(
      end_op_index, all_ops.size(),
      platform::errors::InvalidArgument(
          "Required end_op_index <= %d, but received end_op_index = %d",
          all_ops.size(), end_op_index));
128

129 130 131
  for (auto i = start_op_index; i < end_op_index; ++i) {
    auto *op = all_ops[i];
    VLOG(3) << "create OpNode by " << op->Type();
X
clean  
Xin Pan 已提交
132
    ir::Node *node = CreateOpNode(op);
133 134
    node->SetDescOrder(desc_order);
    ++desc_order;
X
Xin Pan 已提交
135 136
    // For input args, reuse the same var name if it was created before.
    // Otherwise, create a new one.
137
    for (auto &each_var_name : op->InputArgumentNames()) {
138
      not_visited_vars.erase(each_var_name);
139
      ir::Node *var = nullptr;
X
Xin Pan 已提交
140
      if (var_nodes.find(each_var_name) != var_nodes.end()) {
X
Xin Pan 已提交
141
        var = var_nodes.at(each_var_name).back();
142 143 144
      } else if (name_to_desc_block_id.count(each_var_name) != 0) {
        auto desc_and_block_id = name_to_desc_block_id.at(each_var_name);
        var = CreateVarNode(desc_and_block_id.first, desc_and_block_id.second);
X
Xin Pan 已提交
145
        var_nodes[each_var_name].push_back(var);
146
      } else {
X
Xin Pan 已提交
147 148 149
        // Operation input var can be optional (dispensable). Which means
        // the operation doesn't really need the var at runtime. In this
        // case, the no-existed var is ready at the beginning.
X
polish  
Xin Pan 已提交
150
        var = CreateEmptyNode(each_var_name, ir::Node::Type::kVariable);
X
Xin Pan 已提交
151
        var_nodes[each_var_name].push_back(var);
152 153 154 155
      }
      node->inputs.push_back(var);
      var->outputs.push_back(node);
    }
X
Xin Pan 已提交
156
    // For output args, always create a new var.
157
    std::unordered_set<std::string> out_arg_set;
158
    for (auto &each_var_name : op->OutputArgumentNames()) {
159
      not_visited_vars.erase(each_var_name);
160
      if (each_var_name != kEmptyVarName) {
161 162 163 164 165
        PADDLE_ENFORCE_EQ(out_arg_set.count(each_var_name), 0,
                          platform::errors::InvalidArgument(
                              "The input Program is invalid. Variable %s occurs"
                              " in output of %s multiple times.",
                              each_var_name, op->Type()));
166 167 168
        out_arg_set.insert(each_var_name);
      }

X
Xin Pan 已提交
169
      ir::Node *var = nullptr;
170 171 172
      if (name_to_desc_block_id.count(each_var_name) != 0) {
        auto desc_and_block_id = name_to_desc_block_id.at(each_var_name);
        var = CreateVarNode(desc_and_block_id.first, desc_and_block_id.second);
X
Xin Pan 已提交
173 174 175 176 177 178
      } else {
        // Operation output vars can be @EMPTY@. For example, while_grad
        // can have multi @EMPTY@ outputs with no VarDesc.
        // TODO(panyx0718): Add a test.
        var = CreateEmptyNode(each_var_name, ir::Node::Type::kVariable);
      }
X
Xin Pan 已提交
179
      var_nodes[each_var_name].push_back(var);
180 181 182 183
      node->outputs.push_back(var);
      var->inputs.push_back(node);
    }
  }
184

185 186 187 188 189 190 191 192 193 194 195 196
  if (end_op_index < static_cast<int64_t>(all_ops.size()) ||
      start_op_index > 0) {
    is_partial_ = true;
  }
  if (!is_partial_) {
    for (auto &pair : not_visited_vars) {
      const auto &var_name = pair.first;
      auto *var_desc = pair.second;
      if (var_name != kEmptyVarName) {
        VLOG(10) << "Create isolated var node " << var_name;
        var_nodes[var_name].push_back(CreateVarNode(var_desc));
      }
197 198 199
    }
  }

X
polish  
Xin Pan 已提交
200
  Set<const std::vector<OpDesc *>>(
X
Xin Pan 已提交
201
      details::kStaleProgramOpDescs,
202 203 204 205 206
      new std::vector<OpDesc *>(all_ops.begin() + start_op_index,
                                all_ops.begin() + end_op_index));
  VLOG(3)
      << "kStaleProgramOpDescs.size: "
      << Get<const std::vector<OpDesc *>>(details::kStaleProgramOpDescs).size();
G
Gabor Buella 已提交
207
  return var_nodes;
208
}
X
Xin Pan 已提交
209

210 211
void Graph::ResolveHazard(
    const std::map<std::string, std::vector<ir::Node *>> &var_nodes) {
X
polish  
Xin Pan 已提交
212
  /**
213 214 215 216 217
   * We should handle write after read(WAR) and write after write(WAW) here.
   * Because some of the operators of the program can be executed parallelly.
   * So, to make the program running in the right order, we should add the
   * dependence of WAR and WAW.
   *
X
polish  
Xin Pan 已提交
218 219 220
   *
   * https://en.wikipedia.org/wiki/Hazard_(computer_architecture)#Write_after_read_(WAR)
   */
X
Xin Pan 已提交
221

X
Xin Pan 已提交
222 223 224 225 226 227 228 229
  for (auto &var : var_nodes) {
    auto &versions = var.second;
    if (versions.size() <= 1) continue;

    auto it_new = versions.rbegin();
    auto it_old = versions.rbegin();
    ++it_old;
    for (; it_old != versions.rend(); it_new = it_old, ++it_old) {
M
minqiyang 已提交
230
      VLOG(3) << "deal with var: " << (*it_new)->Name();
X
Xin Pan 已提交
231 232 233 234
      ir::Node *write_op =
          (*it_new)->inputs.empty() ? nullptr : (*it_new)->inputs[0];
      const auto &read_ops = (*it_old)->outputs;

235 236 237 238
      PADDLE_ENFORCE_NOT_NULL(
          write_op, platform::errors::NotFound(
                        "The generate operator of variable %s is null.",
                        (*it_new)->Name()));
239 240 241 242

      // Add write after write dependence
      ir::Node *upstream_op =
          (*it_old)->inputs.empty() ? nullptr : (*it_old)->inputs[0];
X
Xin Pan 已提交
243 244
      // TODO(zcd): Add a test.
      if (upstream_op && upstream_op != write_op) {
245 246 247
        ir::Node *dep_var = CreateControlDepVar();
        write_op->inputs.push_back(dep_var);
        upstream_op->outputs.push_back(dep_var);
248
        VLOG(10) << "add dep_var:" << dep_var->Name();
249 250 251 252
        dep_var->outputs.push_back(write_op);
        dep_var->inputs.push_back(upstream_op);
      }

X
Xin Pan 已提交
253 254 255 256 257 258
      for (auto *read_op : read_ops) {
        // Manually add a dependency var from read_op to write_op;
        if (read_op == write_op) {
          // Read Write is the same op.
          continue;
        }
X
Xin Pan 已提交
259 260 261 262 263 264 265 266 267 268 269
        // 2 ops might have been connected via other vars.
        bool has_dep = false;
        for (ir::Node *r_out : read_op->outputs) {
          for (ir::Node *w_in : write_op->inputs) {
            if (r_out == w_in) {
              has_dep = true;
              break;
            }
          }
        }
        if (has_dep) continue;
X
Xin Pan 已提交
270

X
Xin Pan 已提交
271
        ir::Node *dep_var = CreateControlDepVar();
272
        VLOG(10) << "add dep_var:" << dep_var->Name();
X
Xin Pan 已提交
273 274 275 276 277 278 279
        read_op->outputs.push_back(dep_var);
        dep_var->inputs.push_back(read_op);
        write_op->inputs.push_back(dep_var);
        dep_var->outputs.push_back(write_op);
      }
    }
  }
X
better  
Xin Pan 已提交
280
}
X
Xin Pan 已提交
281

282
std::shared_ptr<Graph> Graph::Clone() {
283 284 285 286 287 288 289 290 291 292 293 294 295 296 297
  PADDLE_ENFORCE_EQ(
      this->IsMainGraph(), true,
      platform::errors::InvalidArgument(
          "This graph is a sub_graph, and can't be cloned individually"));
  if (FLAGS_convert_all_blocks) {
    auto cloned_graph = std::make_shared<Graph>(this->program_);
    cloned_graph->ReleaseSubGraphs();
    for (size_t idx = 0; idx < this->program_.Size(); ++idx) {
      cloned_graph->AddSubGraph(this->CloneSubGraph(idx));
    }
    return cloned_graph;
  } else {
    auto cloned_graph = std::make_shared<Graph>(this->program_);
    cloned_graph->ReleaseNodes();
    cloned_graph->num_node_created_ = 0;
298
    cloned_graph->block_id_ = this->block_id_;
299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341
    std::unordered_map<ir::Node *, ir::Node *> origin_to_cloned;
    for (auto *n : this->node_set_) {
      PADDLE_ENFORCE_NOT_NULL(n, platform::errors::InvalidArgument(
                                     "The node to be cloned is nullptr."));
      ir::Node *cloned_node = nullptr;
      if (n->IsCtrlVar()) {
        cloned_node = cloned_graph->CreateControlDepVar();
      } else if (!n->var_desc_ && !n->op_desc_) {  // empty node
        cloned_node = cloned_graph->CreateEmptyNode(n->Name(), n->NodeType());
      } else if (n->IsVar()) {
        cloned_node = cloned_graph->CreateVarNode(n->Var());
      } else if (n->IsOp()) {
        cloned_node = cloned_graph->CreateOpNode(n->Op());
      }
      PADDLE_ENFORCE_NOT_NULL(
          cloned_node,
          platform::errors::InvalidArgument(
              "Failed to clone new node from original node in graph."));
      origin_to_cloned[n] = cloned_node;
    }
    for (auto *n : this->node_set_) {
      for (auto it = n->inputs.begin(); it != n->inputs.end(); it++) {
        origin_to_cloned[n]->inputs.push_back(origin_to_cloned[*it]);
      }
      for (auto it = n->outputs.begin(); it != n->outputs.end(); it++) {
        origin_to_cloned[n]->outputs.push_back(origin_to_cloned[*it]);
      }
    }
    return cloned_graph;
  }
}

std::unique_ptr<Graph> Graph::CloneSubGraph(const size_t idx) {
  PADDLE_ENFORCE_EQ(
      this->IsMainGraph(), true,
      platform::errors::InvalidArgument("This graph is not main_graph"));
  PADDLE_ENFORCE_LT(
      idx, this->sub_graphs_.size(),
      platform::errors::InvalidArgument("Invalid sub_graph index"));
  std::unique_ptr<Graph> cloned_sub_graph =
      std::make_unique<Graph>(this->program_.Block(idx), this);
  cloned_sub_graph->ReleaseNodes();
  cloned_sub_graph->num_node_created_ = 0;
342
  cloned_sub_graph->block_id_ = idx;
343
  std::unordered_map<ir::Node *, ir::Node *> origin_to_cloned;
344
  for (auto *n : this->sub_graphs_.at(idx)->Nodes()) {
345 346
    PADDLE_ENFORCE_NOT_NULL(n, platform::errors::InvalidArgument(
                                   "The node to be cloned is nullptr."));
347 348
    ir::Node *cloned_node = nullptr;
    if (n->IsCtrlVar()) {
349
      cloned_node = cloned_sub_graph->CreateControlDepVar();
350
    } else if (!n->var_desc_ && !n->op_desc_) {  // empty node
351
      cloned_node = cloned_sub_graph->CreateEmptyNode(n->Name(), n->NodeType());
352
    } else if (n->IsVar()) {
353
      cloned_node = cloned_sub_graph->CreateVarNode(n->Var());
354
    } else if (n->IsOp()) {
355
      cloned_node = cloned_sub_graph->CreateOpNode(n->Op());
356
    }
357 358 359 360 361
    PADDLE_ENFORCE_NOT_NULL(
        cloned_node,
        platform::errors::InvalidArgument(
            "Failed to clone new node from original node in graph."));
    origin_to_cloned[n] = cloned_node;
362
  }
363
  for (auto *n : this->sub_graphs_.at(idx)->Nodes()) {
364 365 366 367 368 369 370
    for (auto it = n->inputs.begin(); it != n->inputs.end(); it++) {
      origin_to_cloned[n]->inputs.push_back(origin_to_cloned[*it]);
    }
    for (auto it = n->outputs.begin(); it != n->outputs.end(); it++) {
      origin_to_cloned[n]->outputs.push_back(origin_to_cloned[*it]);
    }
  }
371
  return cloned_sub_graph;
372 373
}

X
Xin Pan 已提交
374 375 376
bool IsControlDepVar(const ir::Node &var) {
  return var.Name().find(ir::Node::kControlDepVarName) != std::string::npos;
}
X
Xin Pan 已提交
377
}  // namespace ir
X
Xin Pan 已提交
378
}  // namespace framework
X
start  
Xin Pan 已提交
379
}  // namespace paddle