graph.cc 8.2 KB
Newer Older
X
Xin Pan 已提交
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
X
start  
Xin Pan 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

X
Xin Pan 已提交
15
#include <algorithm>
16 17
#include <memory>
#include <string>
18
#include <unordered_map>
19 20
#include <unordered_set>
#include <vector>
X
Xin Pan 已提交
21

X
start  
Xin Pan 已提交
22
#include "paddle/fluid/framework/ir/graph.h"
X
Xin Pan 已提交
23
#include "paddle/fluid/framework/op_proto_maker.h"
24
#include "paddle/fluid/framework/operator.h"
25 26
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/var_desc.h"
X
start  
Xin Pan 已提交
27 28

namespace paddle {
X
Xin Pan 已提交
29
namespace framework {
X
Xin Pan 已提交
30
namespace ir {
X
Xin Pan 已提交
31

X
clean  
Xin Pan 已提交
32
Graph::Graph(const ProgramDesc &program) : program_(program) {
33 34 35
  auto var_nodes = InitFromProgram(program_);
  ResolveHazard(var_nodes);
}
36

37 38
std::map<std::string, std::vector<ir::Node *>> Graph::InitFromProgram(
    const ProgramDesc &program) {
M
minqiyang 已提交
39
  VLOG(3) << "block in program:" << program_.Size();
40
  std::unordered_map<std::string, VarDesc *> all_vars;
41 42
  // var nodes for each var name, will have multiple versions in SSA
  std::map<std::string, std::vector<ir::Node *>> var_nodes;
43 44 45 46
  for (auto *var : program.Block(0).AllVars()) {
    all_vars.emplace(var->Name(), var);
  }

47 48
  auto not_visited_vars = all_vars;

49
  for (auto *op : program.Block(0).AllOps()) {
X
clean  
Xin Pan 已提交
50
    ir::Node *node = CreateOpNode(op);
X
Xin Pan 已提交
51 52
    // For input args, reuse the same var name if it was created before.
    // Otherwise, create a new one.
53
    for (auto &each_var_name : op->InputArgumentNames()) {
54
      not_visited_vars.erase(each_var_name);
55
      ir::Node *var = nullptr;
X
Xin Pan 已提交
56
      if (var_nodes.find(each_var_name) != var_nodes.end()) {
X
Xin Pan 已提交
57
        var = var_nodes.at(each_var_name).back();
X
Xin Pan 已提交
58
      } else if (all_vars.count(each_var_name) != 0) {
X
clean  
Xin Pan 已提交
59
        var = CreateVarNode(all_vars.at(each_var_name));
X
Xin Pan 已提交
60
        var_nodes[each_var_name].push_back(var);
61
      } else {
X
Xin Pan 已提交
62 63 64
        // Operation input var can be optional (dispensable). Which means
        // the operation doesn't really need the var at runtime. In this
        // case, the no-existed var is ready at the beginning.
X
polish  
Xin Pan 已提交
65
        var = CreateEmptyNode(each_var_name, ir::Node::Type::kVariable);
X
Xin Pan 已提交
66
        var_nodes[each_var_name].push_back(var);
67 68 69 70
      }
      node->inputs.push_back(var);
      var->outputs.push_back(node);
    }
X
Xin Pan 已提交
71
    // For output args, always create a new var.
72
    std::unordered_set<std::string> out_arg_set;
73
    for (auto &each_var_name : op->OutputArgumentNames()) {
74
      not_visited_vars.erase(each_var_name);
75
      if (each_var_name != kEmptyVarName) {
76 77 78 79 80
        PADDLE_ENFORCE_EQ(out_arg_set.count(each_var_name), 0,
                          platform::errors::InvalidArgument(
                              "The input Program is invalid. Variable %s occurs"
                              " in output of %s multiple times.",
                              each_var_name, op->Type()));
81 82 83
        out_arg_set.insert(each_var_name);
      }

X
Xin Pan 已提交
84 85 86 87 88 89 90 91 92
      ir::Node *var = nullptr;
      if (all_vars.count(each_var_name) != 0) {
        var = CreateVarNode(all_vars.at(each_var_name));
      } else {
        // Operation output vars can be @EMPTY@. For example, while_grad
        // can have multi @EMPTY@ outputs with no VarDesc.
        // TODO(panyx0718): Add a test.
        var = CreateEmptyNode(each_var_name, ir::Node::Type::kVariable);
      }
X
Xin Pan 已提交
93
      var_nodes[each_var_name].push_back(var);
94 95 96 97
      node->outputs.push_back(var);
      var->inputs.push_back(node);
    }
  }
98 99 100 101 102 103 104 105 106 107

  for (auto &pair : not_visited_vars) {
    const auto &var_name = pair.first;
    auto *var_desc = pair.second;
    if (var_name != kEmptyVarName) {
      VLOG(10) << "Create isolated var node " << var_name;
      var_nodes[var_name].push_back(CreateVarNode(var_desc));
    }
  }

X
polish  
Xin Pan 已提交
108
  Set<const std::vector<OpDesc *>>(
X
Xin Pan 已提交
109
      details::kStaleProgramOpDescs,
X
polish  
Xin Pan 已提交
110
      new std::vector<OpDesc *>(program.Block(0).AllOps()));
G
Gabor Buella 已提交
111
  return var_nodes;
112
}
X
Xin Pan 已提交
113

114 115
void Graph::ResolveHazard(
    const std::map<std::string, std::vector<ir::Node *>> &var_nodes) {
X
polish  
Xin Pan 已提交
116
  /**
117 118 119 120 121
   * We should handle write after read(WAR) and write after write(WAW) here.
   * Because some of the operators of the program can be executed parallelly.
   * So, to make the program running in the right order, we should add the
   * dependence of WAR and WAW.
   *
X
polish  
Xin Pan 已提交
122 123 124
   *
   * https://en.wikipedia.org/wiki/Hazard_(computer_architecture)#Write_after_read_(WAR)
   */
X
Xin Pan 已提交
125

X
Xin Pan 已提交
126 127 128 129 130 131 132 133
  for (auto &var : var_nodes) {
    auto &versions = var.second;
    if (versions.size() <= 1) continue;

    auto it_new = versions.rbegin();
    auto it_old = versions.rbegin();
    ++it_old;
    for (; it_old != versions.rend(); it_new = it_old, ++it_old) {
M
minqiyang 已提交
134
      VLOG(3) << "deal with var: " << (*it_new)->Name();
X
Xin Pan 已提交
135 136 137 138
      ir::Node *write_op =
          (*it_new)->inputs.empty() ? nullptr : (*it_new)->inputs[0];
      const auto &read_ops = (*it_old)->outputs;

139 140 141 142
      PADDLE_ENFORCE_NOT_NULL(
          write_op, platform::errors::NotFound(
                        "The generate operator of variable %s is null.",
                        (*it_new)->Name()));
143 144 145 146

      // Add write after write dependence
      ir::Node *upstream_op =
          (*it_old)->inputs.empty() ? nullptr : (*it_old)->inputs[0];
X
Xin Pan 已提交
147 148
      // TODO(zcd): Add a test.
      if (upstream_op && upstream_op != write_op) {
149 150 151
        ir::Node *dep_var = CreateControlDepVar();
        write_op->inputs.push_back(dep_var);
        upstream_op->outputs.push_back(dep_var);
152
        VLOG(10) << "add dep_var:" << dep_var->Name();
153 154 155 156
        dep_var->outputs.push_back(write_op);
        dep_var->inputs.push_back(upstream_op);
      }

X
Xin Pan 已提交
157 158 159 160 161 162
      for (auto *read_op : read_ops) {
        // Manually add a dependency var from read_op to write_op;
        if (read_op == write_op) {
          // Read Write is the same op.
          continue;
        }
X
Xin Pan 已提交
163 164 165 166 167 168 169 170 171 172 173
        // 2 ops might have been connected via other vars.
        bool has_dep = false;
        for (ir::Node *r_out : read_op->outputs) {
          for (ir::Node *w_in : write_op->inputs) {
            if (r_out == w_in) {
              has_dep = true;
              break;
            }
          }
        }
        if (has_dep) continue;
X
Xin Pan 已提交
174

X
Xin Pan 已提交
175
        ir::Node *dep_var = CreateControlDepVar();
176
        VLOG(10) << "add dep_var:" << dep_var->Name();
X
Xin Pan 已提交
177 178 179 180 181 182 183
        read_op->outputs.push_back(dep_var);
        dep_var->inputs.push_back(read_op);
        write_op->inputs.push_back(dep_var);
        dep_var->outputs.push_back(write_op);
      }
    }
  }
X
better  
Xin Pan 已提交
184
}
X
Xin Pan 已提交
185

186 187 188 189 190 191
std::shared_ptr<Graph> Graph::Clone() {
  auto cloned_graph = std::make_shared<Graph>(this->program_);
  cloned_graph->ReleaseNodes();
  cloned_graph->num_node_created_ = 0;
  std::unordered_map<ir::Node *, ir::Node *> origin_to_cloned;
  for (auto *n : this->node_set_) {
192 193
    PADDLE_ENFORCE_NOT_NULL(n, platform::errors::InvalidArgument(
                                   "The node to be cloned is nullptr."));
194 195 196 197 198 199 200 201 202 203
    ir::Node *cloned_node = nullptr;
    if (n->IsCtrlVar()) {
      cloned_node = cloned_graph->CreateControlDepVar();
    } else if (!n->var_desc_ && !n->op_desc_) {  // empty node
      cloned_node = cloned_graph->CreateEmptyNode(n->Name(), n->NodeType());
    } else if (n->IsVar()) {
      cloned_node = cloned_graph->CreateVarNode(n->Var());
    } else if (n->IsOp()) {
      cloned_node = cloned_graph->CreateOpNode(n->Op());
    }
204 205 206 207 208
    PADDLE_ENFORCE_NOT_NULL(
        cloned_node,
        platform::errors::InvalidArgument(
            "Failed to clone new node from original node in graph."));
    origin_to_cloned[n] = cloned_node;
209 210 211 212 213 214 215 216 217 218 219 220
  }
  for (auto *n : this->node_set_) {
    for (auto it = n->inputs.begin(); it != n->inputs.end(); it++) {
      origin_to_cloned[n]->inputs.push_back(origin_to_cloned[*it]);
    }
    for (auto it = n->outputs.begin(); it != n->outputs.end(); it++) {
      origin_to_cloned[n]->outputs.push_back(origin_to_cloned[*it]);
    }
  }
  return cloned_graph;
}

X
Xin Pan 已提交
221 222 223
bool IsControlDepVar(const ir::Node &var) {
  return var.Name().find(ir::Node::kControlDepVarName) != std::string::npos;
}
X
Xin Pan 已提交
224
}  // namespace ir
X
Xin Pan 已提交
225
}  // namespace framework
X
start  
Xin Pan 已提交
226
}  // namespace paddle