graph.cc 9.1 KB
Newer Older
X
Xin Pan 已提交
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
X
start  
Xin Pan 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

X
Xin Pan 已提交
15 16 17
#include <algorithm>
#include <unordered_set>

X
start  
Xin Pan 已提交
18
#include "paddle/fluid/framework/ir/graph.h"
X
Xin Pan 已提交
19
#include "paddle/fluid/framework/op_proto_maker.h"
20 21
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/var_desc.h"
X
start  
Xin Pan 已提交
22

M
minqiyang 已提交
23 24 25 26
DEFINE_bool(enforce_when_check_program, true,
            "Checking whether the program is correct or not. We will log "
            "errors rather than throwing exceptions if this flag turned off");

X
start  
Xin Pan 已提交
27
namespace paddle {
X
Xin Pan 已提交
28
namespace framework {
X
Xin Pan 已提交
29
namespace ir {
X
Xin Pan 已提交
30
namespace {
X
Xin Pan 已提交
31

X
Xin Pan 已提交
32 33 34
void CheckProgram(const ProgramDesc &program) {
#define _INT(role) static_cast<int>(role)

M
minqiyang 已提交
35 36 37 38 39 40 41 42 43 44
  std::map<int, bool> visit;
  for (OpDesc *op : program.Block(0).AllOps()) {
    // For backward compatibility, some program doesn't have role added.
    if (!op->HasAttr(OpProtoAndCheckerMaker::OpRoleAttrName())) continue;
    int role_id =
        boost::get<int>(op->GetAttr(OpProtoAndCheckerMaker::OpRoleAttrName()));
    visit[role_id] = true;
    switch (role_id) {
      case _INT(OpRole::kForward):
        if (visit.find(_INT(OpRole::kBackward)) != visit.end()) {
45 46
          LOG(ERROR) << "Cannot add backward operator before forward operator "
                     << op->Type();
M
minqiyang 已提交
47 48 49 50 51 52 53 54 55 56 57 58
        }
        break;
      case _INT(OpRole::kBackward):
      case _INT(OpRole::kBackward) | _INT(OpRole::kLoss):
        if (!FLAGS_enforce_when_check_program) {
          PADDLE_ENFORCE(
              visit.find(_INT(OpRole::kOptimize)) == visit.end(),
              "Cannot add backward operator %s after optimize operator.",
              op->Type());
        } else {
          if (visit.find(_INT(OpRole::kOptimize)) != visit.end()) {
            LOG(ERROR)
M
minqiyang 已提交
59
                << "Cannot add backward operator %s after optimize operator."
M
minqiyang 已提交
60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83
                << op->Type();
          }
        }
        break;
      case _INT(OpRole::kForward) | _INT(OpRole::kLoss):
        if (!FLAGS_enforce_when_check_program) {
          PADDLE_ENFORCE(visit.find(_INT(OpRole::kBackward) |
                                    _INT(OpRole::kLoss)) == visit.end(),
                         "Cannot add backward|loss operator before "
                         "forward|loss operator %s.",
                         op->Type());
          PADDLE_ENFORCE(
              visit.find(_INT(OpRole::kOptimize)) == visit.end(),
              "Cannot add forward|loss operator %s after optimize operator.",
              op->Type());
        } else {
          if (visit.find(_INT(OpRole::kBackward) | _INT(OpRole::kLoss)) !=
              visit.end()) {
            LOG(ERROR) << "Cannot add backward|loss operator before "
                       << "forward|loss operator %s." << op->Type();
          }

          if (visit.find(_INT(OpRole::kOptimize)) != visit.end()) {
            LOG(ERROR) << "Cannot add forward|loss operator %s after optimize "
M
minqiyang 已提交
84 85
                          "operator."
                       << op->Type();
M
minqiyang 已提交
86 87 88 89 90 91 92 93 94 95 96
          }
        }
        break;
      case _INT(OpRole::kOptimize):
      case _INT(OpRole::kOptimize) | _INT(OpRole::kLRSched):
        if (!FLAGS_enforce_when_check_program) {
          PADDLE_ENFORCE(visit.find(_INT(OpRole::kBackward)) != visit.end(),
                         "Optimize operators %s must follow backward operator.",
                         op->Type());
        } else {
          if (visit.find(_INT(OpRole::kBackward)) == visit.end()) {
M
minqiyang 已提交
97 98
            LOG(ERROR) << "Optimize operators %s must follow backward operator."
                       << op->Type();
M
minqiyang 已提交
99 100 101 102 103 104 105 106 107 108 109 110 111
          }
        }
        break;
      case _INT(OpRole::kLRSched):
      case _INT(OpRole::kDist):
      case _INT(OpRole::kRPC):
      case _INT(OpRole::kNotSpecified):
        break;
      default:
        LOG(FATAL) << "Unknown operator role. Don't add new role because "
                      "you don't know what you are doing.";
    }
  }
X
Xin Pan 已提交
112

X
Xin Pan 已提交
113 114 115
#undef _INT
}
}  // namespace
X
Xin Pan 已提交
116

X
clean  
Xin Pan 已提交
117
Graph::Graph(const ProgramDesc &program) : program_(program) {
X
Xin Pan 已提交
118
  CheckProgram(program_);
119 120 121
  auto var_nodes = InitFromProgram(program_);
  ResolveHazard(var_nodes);
}
122

123 124
std::map<std::string, std::vector<ir::Node *>> Graph::InitFromProgram(
    const ProgramDesc &program) {
M
minqiyang 已提交
125
  VLOG(3) << "block in program:" << program_.Size();
126
  std::unordered_map<std::string, VarDesc *> all_vars;
127 128
  // var nodes for each var name, will have multiple versions in SSA
  std::map<std::string, std::vector<ir::Node *>> var_nodes;
129 130 131 132 133
  for (auto *var : program.Block(0).AllVars()) {
    all_vars.emplace(var->Name(), var);
  }

  for (auto *op : program.Block(0).AllOps()) {
X
clean  
Xin Pan 已提交
134
    ir::Node *node = CreateOpNode(op);
X
Xin Pan 已提交
135 136
    // For input args, reuse the same var name if it was created before.
    // Otherwise, create a new one.
137 138
    for (auto &each_var_name : op->InputArgumentNames()) {
      ir::Node *var = nullptr;
X
Xin Pan 已提交
139
      if (var_nodes.find(each_var_name) != var_nodes.end()) {
X
Xin Pan 已提交
140
        var = var_nodes.at(each_var_name).back();
X
Xin Pan 已提交
141
      } else if (all_vars.count(each_var_name) != 0) {
X
clean  
Xin Pan 已提交
142
        var = CreateVarNode(all_vars.at(each_var_name));
X
Xin Pan 已提交
143
        var_nodes[each_var_name].push_back(var);
144
      } else {
X
Xin Pan 已提交
145 146 147
        // Operation input var can be optional (dispensable). Which means
        // the operation doesn't really need the var at runtime. In this
        // case, the no-existed var is ready at the beginning.
X
polish  
Xin Pan 已提交
148
        var = CreateEmptyNode(each_var_name, ir::Node::Type::kVariable);
X
Xin Pan 已提交
149
        var_nodes[each_var_name].push_back(var);
150 151 152 153
      }
      node->inputs.push_back(var);
      var->outputs.push_back(node);
    }
X
Xin Pan 已提交
154
    // For output args, always create a new var.
155
    for (auto &each_var_name : op->OutputArgumentNames()) {
X
Xin Pan 已提交
156 157 158 159 160 161 162 163 164
      ir::Node *var = nullptr;
      if (all_vars.count(each_var_name) != 0) {
        var = CreateVarNode(all_vars.at(each_var_name));
      } else {
        // Operation output vars can be @EMPTY@. For example, while_grad
        // can have multi @EMPTY@ outputs with no VarDesc.
        // TODO(panyx0718): Add a test.
        var = CreateEmptyNode(each_var_name, ir::Node::Type::kVariable);
      }
X
Xin Pan 已提交
165
      var_nodes[each_var_name].push_back(var);
166 167 168 169
      node->outputs.push_back(var);
      var->inputs.push_back(node);
    }
  }
170 171
  return std::move(var_nodes);
}
X
Xin Pan 已提交
172

173 174
void Graph::ResolveHazard(
    const std::map<std::string, std::vector<ir::Node *>> &var_nodes) {
X
polish  
Xin Pan 已提交
175
  /**
176 177 178 179 180
   * We should handle write after read(WAR) and write after write(WAW) here.
   * Because some of the operators of the program can be executed parallelly.
   * So, to make the program running in the right order, we should add the
   * dependence of WAR and WAW.
   *
X
polish  
Xin Pan 已提交
181 182 183
   *
   * https://en.wikipedia.org/wiki/Hazard_(computer_architecture)#Write_after_read_(WAR)
   */
X
Xin Pan 已提交
184

X
Xin Pan 已提交
185 186 187 188 189 190 191 192
  for (auto &var : var_nodes) {
    auto &versions = var.second;
    if (versions.size() <= 1) continue;

    auto it_new = versions.rbegin();
    auto it_old = versions.rbegin();
    ++it_old;
    for (; it_old != versions.rend(); it_new = it_old, ++it_old) {
M
minqiyang 已提交
193
      VLOG(3) << "deal with var: " << (*it_new)->Name();
X
Xin Pan 已提交
194 195 196 197
      ir::Node *write_op =
          (*it_new)->inputs.empty() ? nullptr : (*it_new)->inputs[0];
      const auto &read_ops = (*it_old)->outputs;

D
dzhwinter 已提交
198 199 200 201
      PADDLE_ENFORCE(
          write_op,
          string::Sprintf("The write_op of var %s should not be empty.",
                          (*it_new)->Name()));
202 203 204 205

      // Add write after write dependence
      ir::Node *upstream_op =
          (*it_old)->inputs.empty() ? nullptr : (*it_old)->inputs[0];
X
Xin Pan 已提交
206 207
      // TODO(zcd): Add a test.
      if (upstream_op && upstream_op != write_op) {
208 209 210 211 212 213 214
        ir::Node *dep_var = CreateControlDepVar();
        write_op->inputs.push_back(dep_var);
        upstream_op->outputs.push_back(dep_var);
        dep_var->outputs.push_back(write_op);
        dep_var->inputs.push_back(upstream_op);
      }

X
Xin Pan 已提交
215 216 217 218 219 220
      for (auto *read_op : read_ops) {
        // Manually add a dependency var from read_op to write_op;
        if (read_op == write_op) {
          // Read Write is the same op.
          continue;
        }
X
Xin Pan 已提交
221 222 223 224 225 226 227 228 229 230 231
        // 2 ops might have been connected via other vars.
        bool has_dep = false;
        for (ir::Node *r_out : read_op->outputs) {
          for (ir::Node *w_in : write_op->inputs) {
            if (r_out == w_in) {
              has_dep = true;
              break;
            }
          }
        }
        if (has_dep) continue;
X
Xin Pan 已提交
232

X
Xin Pan 已提交
233
        ir::Node *dep_var = CreateControlDepVar();
X
Xin Pan 已提交
234 235 236 237 238 239 240
        read_op->outputs.push_back(dep_var);
        dep_var->inputs.push_back(read_op);
        write_op->inputs.push_back(dep_var);
        dep_var->outputs.push_back(write_op);
      }
    }
  }
X
better  
Xin Pan 已提交
241
}
X
Xin Pan 已提交
242 243 244 245

bool IsControlDepVar(const ir::Node &var) {
  return var.Name().find(ir::Node::kControlDepVarName) != std::string::npos;
}
X
Xin Pan 已提交
246
}  // namespace ir
X
Xin Pan 已提交
247
}  // namespace framework
X
start  
Xin Pan 已提交
248
}  // namespace paddle