graph.cc 15.0 KB
Newer Older
X
Xin Pan 已提交
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
X
start  
Xin Pan 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

15 16
#include "paddle/fluid/framework/ir/graph.h"

17
#include <memory>
X
Xin Pan 已提交
18

19
#include "paddle/fluid/framework/operator.h"
X
start  
Xin Pan 已提交
20

21 22
PADDLE_DEFINE_EXPORTED_bool(convert_all_blocks,
                            true,
23
                            "Convert all blocks in program into SSAgraphs");
24

X
start  
Xin Pan 已提交
25
namespace paddle {
X
Xin Pan 已提交
26
namespace framework {
X
Xin Pan 已提交
27
namespace ir {
X
Xin Pan 已提交
28

29 30 31
Graph::Graph(const ProgramDesc &program)
    : Graph(program, 0, program.Block(0).AllOps().size()) {}

32 33
Graph::Graph(const ProgramDesc &program,
             const int64_t start_op_index,
34 35
             const int64_t end_op_index)
    : program_(program), main_graph_(nullptr) {
36 37
  PADDLE_ENFORCE_GE(start_op_index,
                    0,
38 39 40 41
                    platform::errors::InvalidArgument(
                        "Required start_op_index >= 0, but received "
                        "start_op_index = %d",
                        start_op_index));
42 43
  PADDLE_ENFORCE_GE(end_op_index,
                    start_op_index,
44 45 46
                    platform::errors::InvalidArgument(
                        "Required end_op_index >= start_op_index, but received "
                        "end_op_index: %d < start_op_index: %d",
47 48
                        end_op_index,
                        start_op_index));
49
  PADDLE_ENFORCE_GE(
50 51
      program_.Size(),
      1,
52 53 54 55
      platform::errors::InvalidArgument("Can't construct a graph from this "
                                        "program, it doesn't have a block"));

  const int64_t block_op_size = program_.Block(0).AllOps().size();
56 57
  PADDLE_ENFORCE_LE(end_op_index,
                    block_op_size,
58 59 60
                    platform::errors::InvalidArgument(
                        "Required end_op_index <= block_op_size, but received "
                        "end_op_index: %d > block_op_size: %d",
61 62
                        end_op_index,
                        block_op_size));
63 64 65 66 67
  if (FLAGS_convert_all_blocks) {
    // NOTE(levi): start_op_index and end_op_index only work on the first
    // sub_graph.
    std::unique_ptr<Graph> first_sub_graph = std::make_unique<Graph>(
        program_.Block(0), this, start_op_index, end_op_index);
68
    first_sub_graph->block_id_ = 0;
69 70 71 72
    sub_graphs_.push_back(std::move(first_sub_graph));
    for (size_t idx = 1; idx < program_.Size(); ++idx) {
      std::unique_ptr<Graph> sub_graph =
          std::make_unique<Graph>(program_.Block(idx), this);
73
      sub_graph->block_id_ = idx;
74 75 76 77 78 79 80 81 82 83
      sub_graphs_.push_back(std::move(sub_graph));
    }
  } else {
    auto var_nodes = InitFromProgram(program_, start_op_index, end_op_index);
  }
}

Graph::Graph(const BlockDesc &block, const Graph *main_graph)
    : Graph(block, main_graph, 0, block.AllOps().size()) {}

84 85 86 87
Graph::Graph(const BlockDesc &block,
             const Graph *main_graph,
             const int64_t start_op_index,
             const int64_t end_op_index)
88 89 90
    : main_graph_(main_graph) {
  auto var_nodes = InitFromBlock(block, start_op_index, end_op_index);
}
91

92 93 94
// TODO(levi): delete this interface after when we can convert all
// blocks into sub_graphs.
std::map<std::string, std::vector<ir::Node *>> Graph::InitFromProgram(
95 96
    const ProgramDesc &program,
    const int64_t start_op_index,
97 98 99 100 101 102
    const int64_t end_op_index) {
  VLOG(3) << "block in program:" << program_.Size();
  return InitFromBlock(program.Block(0), start_op_index, end_op_index);
}

std::map<std::string, std::vector<ir::Node *>> Graph::InitFromBlock(
103 104
    const BlockDesc &block,
    const int64_t start_op_index,
105
    const int64_t end_op_index) {
106 107 108
  std::unordered_map<std::string, std::pair<VarDesc *, int>>
      name_to_desc_block_id;

109
  block_id_ = block.ID();
110 111 112 113 114 115 116 117 118 119 120 121 122 123 124
  const BlockDesc *block_var_visible = &block;
  while (block_var_visible != nullptr) {
    for (auto *var : block_var_visible->AllVars()) {
      name_to_desc_block_id.emplace(
          var->Name(), std::make_pair(var, block_var_visible->ID()));
    }
    const BlockDesc *forward_block = block_var_visible->ForwardBlock();
    if (forward_block != nullptr) {
      for (auto *var : forward_block->AllVars()) {
        name_to_desc_block_id.emplace(var->Name(),
                                      std::make_pair(var, forward_block->ID()));
      }
    }
    block_var_visible = block_var_visible->ParentBlock();
  }
125 126
  // var nodes for each var name, will have multiple versions in SSA
  std::map<std::string, std::vector<ir::Node *>> var_nodes;
127
  std::unordered_map<std::string, VarDesc *> not_visited_vars;
128
  for (auto *var : block.AllVars()) {
129
    not_visited_vars.emplace(var->Name(), var);
130 131
  }

132
  int desc_order = 0;
133
  auto all_ops = block.AllOps();
134
  PADDLE_ENFORCE_LE(
135 136
      end_op_index,
      all_ops.size(),
137 138
      platform::errors::InvalidArgument(
          "Required end_op_index <= %d, but received end_op_index = %d",
139 140
          all_ops.size(),
          end_op_index));
141

142 143 144
  for (auto i = start_op_index; i < end_op_index; ++i) {
    auto *op = all_ops[i];
    VLOG(3) << "create OpNode by " << op->Type();
X
clean  
Xin Pan 已提交
145
    ir::Node *node = CreateOpNode(op);
146 147
    node->SetDescOrder(desc_order);
    ++desc_order;
X
Xin Pan 已提交
148 149
    // For input args, reuse the same var name if it was created before.
    // Otherwise, create a new one.
150
    for (auto &each_var_name : op->InputArgumentNames(true)) {
151
      not_visited_vars.erase(each_var_name);
152
      ir::Node *var = nullptr;
X
Xin Pan 已提交
153
      if (var_nodes.find(each_var_name) != var_nodes.end()) {
X
Xin Pan 已提交
154
        var = var_nodes.at(each_var_name).back();
155 156 157
      } else if (name_to_desc_block_id.count(each_var_name) != 0) {
        auto desc_and_block_id = name_to_desc_block_id.at(each_var_name);
        var = CreateVarNode(desc_and_block_id.first, desc_and_block_id.second);
X
Xin Pan 已提交
158
        var_nodes[each_var_name].push_back(var);
159
      } else {
X
Xin Pan 已提交
160 161 162
        // Operation input var can be optional (dispensable). Which means
        // the operation doesn't really need the var at runtime. In this
        // case, the no-existed var is ready at the beginning.
X
polish  
Xin Pan 已提交
163
        var = CreateEmptyNode(each_var_name, ir::Node::Type::kVariable);
X
Xin Pan 已提交
164
        var_nodes[each_var_name].push_back(var);
165 166 167 168
      }
      node->inputs.push_back(var);
      var->outputs.push_back(node);
    }
X
Xin Pan 已提交
169
    // For output args, always create a new var.
170
    std::unordered_set<std::string> out_arg_set;
171
    for (auto &each_var_name : op->OutputArgumentNames()) {
172
      not_visited_vars.erase(each_var_name);
173
      if (each_var_name != kEmptyVarName) {
174 175
        PADDLE_ENFORCE_EQ(out_arg_set.count(each_var_name),
                          0,
176 177 178
                          platform::errors::InvalidArgument(
                              "The input Program is invalid. Variable %s occurs"
                              " in output of %s multiple times.",
179 180
                              each_var_name,
                              op->Type()));
181 182 183
        out_arg_set.insert(each_var_name);
      }

X
Xin Pan 已提交
184
      ir::Node *var = nullptr;
185 186 187
      if (name_to_desc_block_id.count(each_var_name) != 0) {
        auto desc_and_block_id = name_to_desc_block_id.at(each_var_name);
        var = CreateVarNode(desc_and_block_id.first, desc_and_block_id.second);
X
Xin Pan 已提交
188 189 190 191 192 193
      } else {
        // Operation output vars can be @EMPTY@. For example, while_grad
        // can have multi @EMPTY@ outputs with no VarDesc.
        // TODO(panyx0718): Add a test.
        var = CreateEmptyNode(each_var_name, ir::Node::Type::kVariable);
      }
X
Xin Pan 已提交
194
      var_nodes[each_var_name].push_back(var);
195 196 197 198
      node->outputs.push_back(var);
      var->inputs.push_back(node);
    }
  }
199

200 201 202 203 204 205 206 207 208 209 210 211
  if (end_op_index < static_cast<int64_t>(all_ops.size()) ||
      start_op_index > 0) {
    is_partial_ = true;
  }
  if (!is_partial_) {
    for (auto &pair : not_visited_vars) {
      const auto &var_name = pair.first;
      auto *var_desc = pair.second;
      if (var_name != kEmptyVarName) {
        VLOG(10) << "Create isolated var node " << var_name;
        var_nodes[var_name].push_back(CreateVarNode(var_desc));
      }
212 213 214
    }
  }

X
polish  
Xin Pan 已提交
215
  Set<const std::vector<OpDesc *>>(
X
Xin Pan 已提交
216
      details::kStaleProgramOpDescs,
217 218 219 220 221
      new std::vector<OpDesc *>(all_ops.begin() + start_op_index,
                                all_ops.begin() + end_op_index));
  VLOG(3)
      << "kStaleProgramOpDescs.size: "
      << Get<const std::vector<OpDesc *>>(details::kStaleProgramOpDescs).size();
G
Gabor Buella 已提交
222
  return var_nodes;
223
}
X
Xin Pan 已提交
224

225 226
void Graph::ResolveHazard(
    const std::map<std::string, std::vector<ir::Node *>> &var_nodes) {
X
polish  
Xin Pan 已提交
227
  /**
228 229 230 231 232
   * We should handle write after read(WAR) and write after write(WAW) here.
   * Because some of the operators of the program can be executed parallelly.
   * So, to make the program running in the right order, we should add the
   * dependence of WAR and WAW.
   *
X
polish  
Xin Pan 已提交
233 234 235
   *
   * https://en.wikipedia.org/wiki/Hazard_(computer_architecture)#Write_after_read_(WAR)
   */
X
Xin Pan 已提交
236

X
Xin Pan 已提交
237 238 239 240 241 242 243 244
  for (auto &var : var_nodes) {
    auto &versions = var.second;
    if (versions.size() <= 1) continue;

    auto it_new = versions.rbegin();
    auto it_old = versions.rbegin();
    ++it_old;
    for (; it_old != versions.rend(); it_new = it_old, ++it_old) {
M
minqiyang 已提交
245
      VLOG(3) << "deal with var: " << (*it_new)->Name();
X
Xin Pan 已提交
246 247 248 249
      ir::Node *write_op =
          (*it_new)->inputs.empty() ? nullptr : (*it_new)->inputs[0];
      const auto &read_ops = (*it_old)->outputs;

250
      PADDLE_ENFORCE_NOT_NULL(
251 252 253 254
          write_op,
          platform::errors::NotFound(
              "The generate operator of variable %s is null.",
              (*it_new)->Name()));
255 256 257 258

      // Add write after write dependence
      ir::Node *upstream_op =
          (*it_old)->inputs.empty() ? nullptr : (*it_old)->inputs[0];
X
Xin Pan 已提交
259 260
      // TODO(zcd): Add a test.
      if (upstream_op && upstream_op != write_op) {
261 262 263
        ir::Node *dep_var = CreateControlDepVar();
        write_op->inputs.push_back(dep_var);
        upstream_op->outputs.push_back(dep_var);
264
        VLOG(10) << "add dep_var:" << dep_var->Name();
265 266 267 268
        dep_var->outputs.push_back(write_op);
        dep_var->inputs.push_back(upstream_op);
      }

X
Xin Pan 已提交
269 270 271 272 273 274
      for (auto *read_op : read_ops) {
        // Manually add a dependency var from read_op to write_op;
        if (read_op == write_op) {
          // Read Write is the same op.
          continue;
        }
X
Xin Pan 已提交
275 276 277 278 279 280 281 282 283 284 285
        // 2 ops might have been connected via other vars.
        bool has_dep = false;
        for (ir::Node *r_out : read_op->outputs) {
          for (ir::Node *w_in : write_op->inputs) {
            if (r_out == w_in) {
              has_dep = true;
              break;
            }
          }
        }
        if (has_dep) continue;
X
Xin Pan 已提交
286

X
Xin Pan 已提交
287
        ir::Node *dep_var = CreateControlDepVar();
288
        VLOG(10) << "add dep_var:" << dep_var->Name();
X
Xin Pan 已提交
289 290 291 292 293 294 295
        read_op->outputs.push_back(dep_var);
        dep_var->inputs.push_back(read_op);
        write_op->inputs.push_back(dep_var);
        dep_var->outputs.push_back(write_op);
      }
    }
  }
X
better  
Xin Pan 已提交
296
}
X
Xin Pan 已提交
297

298
std::shared_ptr<Graph> Graph::Clone() {
299
  PADDLE_ENFORCE_EQ(
300 301
      this->IsMainGraph(),
      true,
302 303 304 305 306 307 308 309 310 311 312 313 314
      platform::errors::InvalidArgument(
          "This graph is a sub_graph, and can't be cloned individually"));
  if (FLAGS_convert_all_blocks) {
    auto cloned_graph = std::make_shared<Graph>(this->program_);
    cloned_graph->ReleaseSubGraphs();
    for (size_t idx = 0; idx < this->program_.Size(); ++idx) {
      cloned_graph->AddSubGraph(this->CloneSubGraph(idx));
    }
    return cloned_graph;
  } else {
    auto cloned_graph = std::make_shared<Graph>(this->program_);
    cloned_graph->ReleaseNodes();
    cloned_graph->num_node_created_ = 0;
315
    cloned_graph->block_id_ = this->block_id_;
316 317
    std::unordered_map<ir::Node *, ir::Node *> origin_to_cloned;
    for (auto *n : this->node_set_) {
318 319 320
      PADDLE_ENFORCE_NOT_NULL(n,
                              platform::errors::InvalidArgument(
                                  "The node to be cloned is nullptr."));
321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350
      ir::Node *cloned_node = nullptr;
      if (n->IsCtrlVar()) {
        cloned_node = cloned_graph->CreateControlDepVar();
      } else if (!n->var_desc_ && !n->op_desc_) {  // empty node
        cloned_node = cloned_graph->CreateEmptyNode(n->Name(), n->NodeType());
      } else if (n->IsVar()) {
        cloned_node = cloned_graph->CreateVarNode(n->Var());
      } else if (n->IsOp()) {
        cloned_node = cloned_graph->CreateOpNode(n->Op());
      }
      PADDLE_ENFORCE_NOT_NULL(
          cloned_node,
          platform::errors::InvalidArgument(
              "Failed to clone new node from original node in graph."));
      origin_to_cloned[n] = cloned_node;
    }
    for (auto *n : this->node_set_) {
      for (auto it = n->inputs.begin(); it != n->inputs.end(); it++) {
        origin_to_cloned[n]->inputs.push_back(origin_to_cloned[*it]);
      }
      for (auto it = n->outputs.begin(); it != n->outputs.end(); it++) {
        origin_to_cloned[n]->outputs.push_back(origin_to_cloned[*it]);
      }
    }
    return cloned_graph;
  }
}

std::unique_ptr<Graph> Graph::CloneSubGraph(const size_t idx) {
  PADDLE_ENFORCE_EQ(
351 352
      this->IsMainGraph(),
      true,
353 354
      platform::errors::InvalidArgument("This graph is not main_graph"));
  PADDLE_ENFORCE_LT(
355 356
      idx,
      this->sub_graphs_.size(),
357 358 359 360 361
      platform::errors::InvalidArgument("Invalid sub_graph index"));
  std::unique_ptr<Graph> cloned_sub_graph =
      std::make_unique<Graph>(this->program_.Block(idx), this);
  cloned_sub_graph->ReleaseNodes();
  cloned_sub_graph->num_node_created_ = 0;
362
  cloned_sub_graph->block_id_ = idx;
363
  std::unordered_map<ir::Node *, ir::Node *> origin_to_cloned;
364
  for (auto *n : this->sub_graphs_.at(idx)->Nodes()) {
365 366 367
    PADDLE_ENFORCE_NOT_NULL(
        n,
        platform::errors::InvalidArgument("The node to be cloned is nullptr."));
368 369
    ir::Node *cloned_node = nullptr;
    if (n->IsCtrlVar()) {
370
      cloned_node = cloned_sub_graph->CreateControlDepVar();
371
    } else if (!n->var_desc_ && !n->op_desc_) {  // empty node
372
      cloned_node = cloned_sub_graph->CreateEmptyNode(n->Name(), n->NodeType());
373
    } else if (n->IsVar()) {
374
      cloned_node = cloned_sub_graph->CreateVarNode(n->Var());
375
    } else if (n->IsOp()) {
376
      cloned_node = cloned_sub_graph->CreateOpNode(n->Op());
377
    }
378 379 380 381 382
    PADDLE_ENFORCE_NOT_NULL(
        cloned_node,
        platform::errors::InvalidArgument(
            "Failed to clone new node from original node in graph."));
    origin_to_cloned[n] = cloned_node;
383
  }
384
  for (auto *n : this->sub_graphs_.at(idx)->Nodes()) {
385 386 387 388 389 390 391
    for (auto it = n->inputs.begin(); it != n->inputs.end(); it++) {
      origin_to_cloned[n]->inputs.push_back(origin_to_cloned[*it]);
    }
    for (auto it = n->outputs.begin(); it != n->outputs.end(); it++) {
      origin_to_cloned[n]->outputs.push_back(origin_to_cloned[*it]);
    }
  }
392
  return cloned_sub_graph;
393 394
}

X
Xin Pan 已提交
395 396 397
bool IsControlDepVar(const ir::Node &var) {
  return var.Name().find(ir::Node::kControlDepVarName) != std::string::npos;
}
X
Xin Pan 已提交
398
}  // namespace ir
X
Xin Pan 已提交
399
}  // namespace framework
X
start  
Xin Pan 已提交
400
}  // namespace paddle