graph.cc 9.0 KB
Newer Older
X
Xin Pan 已提交
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
X
start  
Xin Pan 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

X
Xin Pan 已提交
15 16 17
#include <algorithm>
#include <unordered_set>

X
start  
Xin Pan 已提交
18
#include "paddle/fluid/framework/ir/graph.h"
X
Xin Pan 已提交
19
#include "paddle/fluid/framework/op_proto_maker.h"
20 21
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/var_desc.h"
X
start  
Xin Pan 已提交
22

M
minqiyang 已提交
23 24 25 26
DEFINE_bool(enforce_when_check_program, true,
            "Checking whether the program is correct or not. We will log "
            "errors rather than throwing exceptions if this flag turned off");

X
start  
Xin Pan 已提交
27
namespace paddle {
X
Xin Pan 已提交
28
namespace framework {
X
Xin Pan 已提交
29
namespace ir {
X
Xin Pan 已提交
30
namespace {
X
Xin Pan 已提交
31

X
Xin Pan 已提交
32 33 34
void CheckProgram(const ProgramDesc &program) {
#define _INT(role) static_cast<int>(role)

M
minqiyang 已提交
35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
  std::map<int, bool> visit;
  for (OpDesc *op : program.Block(0).AllOps()) {
    // For backward compatibility, some program doesn't have role added.
    if (!op->HasAttr(OpProtoAndCheckerMaker::OpRoleAttrName())) continue;
    int role_id =
        boost::get<int>(op->GetAttr(OpProtoAndCheckerMaker::OpRoleAttrName()));
    visit[role_id] = true;
    switch (role_id) {
      case _INT(OpRole::kForward):
        if (visit.find(_INT(OpRole::kBackward)) != visit.end()) {
          LOG(ERROR)
              << "Cannot add backward operator before forward operator %s."
              << op->Type();
        }
        break;
      case _INT(OpRole::kBackward):
      case _INT(OpRole::kBackward) | _INT(OpRole::kLoss):
        if (!FLAGS_enforce_when_check_program) {
          PADDLE_ENFORCE(
              visit.find(_INT(OpRole::kOptimize)) == visit.end(),
              "Cannot add backward operator %s after optimize operator.",
              op->Type());
        } else {
          if (visit.find(_INT(OpRole::kOptimize)) != visit.end()) {
            LOG(ERROR)
M
minqiyang 已提交
60
                << "Cannot add backward operator %s after optimize operator."
M
minqiyang 已提交
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84
                << op->Type();
          }
        }
        break;
      case _INT(OpRole::kForward) | _INT(OpRole::kLoss):
        if (!FLAGS_enforce_when_check_program) {
          PADDLE_ENFORCE(visit.find(_INT(OpRole::kBackward) |
                                    _INT(OpRole::kLoss)) == visit.end(),
                         "Cannot add backward|loss operator before "
                         "forward|loss operator %s.",
                         op->Type());
          PADDLE_ENFORCE(
              visit.find(_INT(OpRole::kOptimize)) == visit.end(),
              "Cannot add forward|loss operator %s after optimize operator.",
              op->Type());
        } else {
          if (visit.find(_INT(OpRole::kBackward) | _INT(OpRole::kLoss)) !=
              visit.end()) {
            LOG(ERROR) << "Cannot add backward|loss operator before "
                       << "forward|loss operator %s." << op->Type();
          }

          if (visit.find(_INT(OpRole::kOptimize)) != visit.end()) {
            LOG(ERROR) << "Cannot add forward|loss operator %s after optimize "
M
minqiyang 已提交
85 86
                          "operator."
                       << op->Type();
M
minqiyang 已提交
87 88 89 90 91 92 93 94 95 96 97
          }
        }
        break;
      case _INT(OpRole::kOptimize):
      case _INT(OpRole::kOptimize) | _INT(OpRole::kLRSched):
        if (!FLAGS_enforce_when_check_program) {
          PADDLE_ENFORCE(visit.find(_INT(OpRole::kBackward)) != visit.end(),
                         "Optimize operators %s must follow backward operator.",
                         op->Type());
        } else {
          if (visit.find(_INT(OpRole::kBackward)) == visit.end()) {
M
minqiyang 已提交
98 99
            LOG(ERROR) << "Optimize operators %s must follow backward operator."
                       << op->Type();
M
minqiyang 已提交
100 101 102 103 104 105 106 107 108 109 110 111 112
          }
        }
        break;
      case _INT(OpRole::kLRSched):
      case _INT(OpRole::kDist):
      case _INT(OpRole::kRPC):
      case _INT(OpRole::kNotSpecified):
        break;
      default:
        LOG(FATAL) << "Unknown operator role. Don't add new role because "
                      "you don't know what you are doing.";
    }
  }
X
Xin Pan 已提交
113

X
Xin Pan 已提交
114 115 116
#undef _INT
}
}  // namespace
X
Xin Pan 已提交
117

X
clean  
Xin Pan 已提交
118
Graph::Graph(const ProgramDesc &program) : program_(program) {
X
Xin Pan 已提交
119
  CheckProgram(program_);
120 121 122
  auto var_nodes = InitFromProgram(program_);
  ResolveHazard(var_nodes);
}
123

124 125
std::map<std::string, std::vector<ir::Node *>> Graph::InitFromProgram(
    const ProgramDesc &program) {
M
minqiyang 已提交
126
  VLOG(3) << "block in program:" << program_.Size();
127
  std::unordered_map<std::string, VarDesc *> all_vars;
128 129
  // var nodes for each var name, will have multiple versions in SSA
  std::map<std::string, std::vector<ir::Node *>> var_nodes;
130 131 132 133 134
  for (auto *var : program.Block(0).AllVars()) {
    all_vars.emplace(var->Name(), var);
  }

  for (auto *op : program.Block(0).AllOps()) {
X
clean  
Xin Pan 已提交
135
    ir::Node *node = CreateOpNode(op);
X
Xin Pan 已提交
136 137
    // For input args, reuse the same var name if it was created before.
    // Otherwise, create a new one.
138 139
    for (auto &each_var_name : op->InputArgumentNames()) {
      ir::Node *var = nullptr;
X
Xin Pan 已提交
140
      if (var_nodes.find(each_var_name) != var_nodes.end()) {
X
Xin Pan 已提交
141
        var = var_nodes.at(each_var_name).back();
X
Xin Pan 已提交
142
      } else if (all_vars.count(each_var_name) != 0) {
X
clean  
Xin Pan 已提交
143
        var = CreateVarNode(all_vars.at(each_var_name));
X
Xin Pan 已提交
144
        var_nodes[each_var_name].push_back(var);
145
      } else {
X
Xin Pan 已提交
146 147 148
        // Operation input var can be optional (dispensable). Which means
        // the operation doesn't really need the var at runtime. In this
        // case, the no-existed var is ready at the beginning.
X
polish  
Xin Pan 已提交
149
        var = CreateEmptyNode(each_var_name, ir::Node::Type::kVariable);
X
Xin Pan 已提交
150
        var_nodes[each_var_name].push_back(var);
151 152 153 154
      }
      node->inputs.push_back(var);
      var->outputs.push_back(node);
    }
X
Xin Pan 已提交
155
    // For output args, always create a new var.
156
    for (auto &each_var_name : op->OutputArgumentNames()) {
X
Xin Pan 已提交
157 158 159 160 161 162 163 164 165
      ir::Node *var = nullptr;
      if (all_vars.count(each_var_name) != 0) {
        var = CreateVarNode(all_vars.at(each_var_name));
      } else {
        // Operation output vars can be @EMPTY@. For example, while_grad
        // can have multi @EMPTY@ outputs with no VarDesc.
        // TODO(panyx0718): Add a test.
        var = CreateEmptyNode(each_var_name, ir::Node::Type::kVariable);
      }
X
Xin Pan 已提交
166
      var_nodes[each_var_name].push_back(var);
167 168 169 170
      node->outputs.push_back(var);
      var->inputs.push_back(node);
    }
  }
171 172
  return std::move(var_nodes);
}
X
Xin Pan 已提交
173

174 175
void Graph::ResolveHazard(
    const std::map<std::string, std::vector<ir::Node *>> &var_nodes) {
X
polish  
Xin Pan 已提交
176
  /**
177 178 179 180 181
   * We should handle write after read(WAR) and write after write(WAW) here.
   * Because some of the operators of the program can be executed parallelly.
   * So, to make the program running in the right order, we should add the
   * dependence of WAR and WAW.
   *
X
polish  
Xin Pan 已提交
182 183 184
   *
   * https://en.wikipedia.org/wiki/Hazard_(computer_architecture)#Write_after_read_(WAR)
   */
X
Xin Pan 已提交
185

X
Xin Pan 已提交
186 187 188 189 190 191 192 193
  for (auto &var : var_nodes) {
    auto &versions = var.second;
    if (versions.size() <= 1) continue;

    auto it_new = versions.rbegin();
    auto it_old = versions.rbegin();
    ++it_old;
    for (; it_old != versions.rend(); it_new = it_old, ++it_old) {
M
minqiyang 已提交
194
      VLOG(3) << "deal with var: " << (*it_new)->Name();
X
Xin Pan 已提交
195 196 197 198
      ir::Node *write_op =
          (*it_new)->inputs.empty() ? nullptr : (*it_new)->inputs[0];
      const auto &read_ops = (*it_old)->outputs;

199 200 201 202 203
      PADDLE_ENFORCE(write_op, "The write_op should not be empty.");

      // Add write after write dependence
      ir::Node *upstream_op =
          (*it_old)->inputs.empty() ? nullptr : (*it_old)->inputs[0];
X
Xin Pan 已提交
204 205
      // TODO(zcd): Add a test.
      if (upstream_op && upstream_op != write_op) {
206 207 208 209 210 211 212
        ir::Node *dep_var = CreateControlDepVar();
        write_op->inputs.push_back(dep_var);
        upstream_op->outputs.push_back(dep_var);
        dep_var->outputs.push_back(write_op);
        dep_var->inputs.push_back(upstream_op);
      }

X
Xin Pan 已提交
213 214 215 216 217 218
      for (auto *read_op : read_ops) {
        // Manually add a dependency var from read_op to write_op;
        if (read_op == write_op) {
          // Read Write is the same op.
          continue;
        }
X
Xin Pan 已提交
219 220 221 222 223 224 225 226 227 228 229
        // 2 ops might have been connected via other vars.
        bool has_dep = false;
        for (ir::Node *r_out : read_op->outputs) {
          for (ir::Node *w_in : write_op->inputs) {
            if (r_out == w_in) {
              has_dep = true;
              break;
            }
          }
        }
        if (has_dep) continue;
X
Xin Pan 已提交
230

X
Xin Pan 已提交
231
        ir::Node *dep_var = CreateControlDepVar();
X
Xin Pan 已提交
232 233 234 235 236 237 238
        read_op->outputs.push_back(dep_var);
        dep_var->inputs.push_back(read_op);
        write_op->inputs.push_back(dep_var);
        dep_var->outputs.push_back(write_op);
      }
    }
  }
X
better  
Xin Pan 已提交
239
}
X
Xin Pan 已提交
240 241 242 243

bool IsControlDepVar(const ir::Node &var) {
  return var.Name().find(ir::Node::kControlDepVarName) != std::string::npos;
}
X
Xin Pan 已提交
244
}  // namespace ir
X
Xin Pan 已提交
245
}  // namespace framework
X
start  
Xin Pan 已提交
246
}  // namespace paddle