graph.cc 7.7 KB
Newer Older
X
Xin Pan 已提交
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
X
start  
Xin Pan 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

X
Xin Pan 已提交
15 16 17
#include <algorithm>
#include <unordered_set>

X
start  
Xin Pan 已提交
18
#include "paddle/fluid/framework/ir/graph.h"
X
Xin Pan 已提交
19
#include "paddle/fluid/framework/op_proto_maker.h"
20 21
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/var_desc.h"
X
start  
Xin Pan 已提交
22 23

namespace paddle {
X
Xin Pan 已提交
24
namespace framework {
X
Xin Pan 已提交
25
namespace ir {
X
Xin Pan 已提交
26
namespace {
X
Xin Pan 已提交
27

X
Xin Pan 已提交
28 29 30 31 32 33
void CheckProgram(const ProgramDesc &program) {
  std::map<int, bool> visit;
#define _INT(role) static_cast<int>(role)

  for (size_t i = 0; i < program.Size(); ++i) {
    for (OpDesc *op : program.Block(i).AllOps()) {
X
Xin Pan 已提交
34 35
      // For backward compatibility, some program doesn't have role added.
      if (!op->HasAttr(OpProtoAndCheckerMaker::OpRoleAttrName())) continue;
X
Xin Pan 已提交
36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78
      int role_id = boost::get<int>(
          op->GetAttr(OpProtoAndCheckerMaker::OpRoleAttrName()));
      visit[role_id] = true;
      switch (role_id) {
        case _INT(OpRole::kForward):
          PADDLE_ENFORCE(
              visit.find(_INT(OpRole::kBackward)) == visit.end(),
              "Cannot add forward operator before backward operator.");
          break;
        case _INT(OpRole::kBackward):
        case _INT(OpRole::kBackward) | _INT(OpRole::kLoss):
          PADDLE_ENFORCE(
              visit.find(_INT(OpRole::kOptimize)) == visit.end(),
              "Cannot add backward operator before optimize operator.");
          break;
        case _INT(OpRole::kForward) | _INT(OpRole::kLoss):
          PADDLE_ENFORCE(visit.find(_INT(OpRole::kBackward) |
                                    _INT(OpRole::kLoss)) == visit.end(),
                         "Cannot add backward|loss operator before "
                         "forward|loss operator.");
          PADDLE_ENFORCE(
              visit.find(_INT(OpRole::kOptimize)) == visit.end(),
              "Cannot add backward operator before optimize operator.");
          break;
        case _INT(OpRole::kOptimize):
        case _INT(OpRole::kOptimize) | _INT(OpRole::kLRSched):
          PADDLE_ENFORCE(visit.find(_INT(OpRole::kBackward)) != visit.end(),
                         "Optimize operators must follow backward operator.");
          break;
        case _INT(OpRole::kLRSched):
        case _INT(OpRole::kDist):
        case _INT(OpRole::kRPC):
        case _INT(OpRole::kNotSpecified):
          break;
        default:
          LOG(FATAL) << "Unknown operator role. Don't add new role because "
                        "you don't know what you are doing.";
      }
    }
  }
#undef _INT
}
}  // namespace
X
Xin Pan 已提交
79

X
clean  
Xin Pan 已提交
80
Graph::Graph(const ProgramDesc &program) : program_(program) {
X
Xin Pan 已提交
81
  CheckProgram(program_);
82 83
  // Make the nodes id start from 0.
  Node::ResetId();
84 85 86
  auto var_nodes = InitFromProgram(program_);
  ResolveHazard(var_nodes);
}
87

88 89
std::map<std::string, std::vector<ir::Node *>> Graph::InitFromProgram(
    const ProgramDesc &program) {
Q
qiaolongfei 已提交
90
  VLOG(3) << "block in program:" << program_.Size();
91
  std::unordered_map<std::string, VarDesc *> all_vars;
92 93
  // var nodes for each var name, will have multiple versions in SSA
  std::map<std::string, std::vector<ir::Node *>> var_nodes;
94 95 96 97 98
  for (auto *var : program.Block(0).AllVars()) {
    all_vars.emplace(var->Name(), var);
  }

  for (auto *op : program.Block(0).AllOps()) {
X
clean  
Xin Pan 已提交
99
    ir::Node *node = CreateOpNode(op);
X
Xin Pan 已提交
100 101
    // For input args, reuse the same var name if it was created before.
    // Otherwise, create a new one.
102 103
    for (auto &each_var_name : op->InputArgumentNames()) {
      ir::Node *var = nullptr;
X
Xin Pan 已提交
104
      if (var_nodes.find(each_var_name) != var_nodes.end()) {
X
Xin Pan 已提交
105
        var = var_nodes.at(each_var_name).back();
X
Xin Pan 已提交
106
      } else if (all_vars.count(each_var_name) != 0) {
X
clean  
Xin Pan 已提交
107
        var = CreateVarNode(all_vars.at(each_var_name));
X
Xin Pan 已提交
108
        var_nodes[each_var_name].push_back(var);
109
      } else {
X
Xin Pan 已提交
110 111 112
        // Operation input var can be optional (dispensable). Which means
        // the operation doesn't really need the var at runtime. In this
        // case, the no-existed var is ready at the beginning.
X
polish  
Xin Pan 已提交
113
        var = CreateEmptyNode(each_var_name, ir::Node::Type::kVariable);
X
Xin Pan 已提交
114
        var_nodes[each_var_name].push_back(var);
115 116 117 118
      }
      node->inputs.push_back(var);
      var->outputs.push_back(node);
    }
X
Xin Pan 已提交
119
    // For output args, always create a new var.
120
    for (auto &each_var_name : op->OutputArgumentNames()) {
X
Xin Pan 已提交
121 122 123 124 125 126 127 128 129
      ir::Node *var = nullptr;
      if (all_vars.count(each_var_name) != 0) {
        var = CreateVarNode(all_vars.at(each_var_name));
      } else {
        // Operation output vars can be @EMPTY@. For example, while_grad
        // can have multi @EMPTY@ outputs with no VarDesc.
        // TODO(panyx0718): Add a test.
        var = CreateEmptyNode(each_var_name, ir::Node::Type::kVariable);
      }
X
Xin Pan 已提交
130
      var_nodes[each_var_name].push_back(var);
131 132 133 134
      node->outputs.push_back(var);
      var->inputs.push_back(node);
    }
  }
135 136
  return std::move(var_nodes);
}
X
Xin Pan 已提交
137

138 139
void Graph::ResolveHazard(
    const std::map<std::string, std::vector<ir::Node *>> &var_nodes) {
X
polish  
Xin Pan 已提交
140
  /**
141 142 143 144 145
   * We should handle write after read(WAR) and write after write(WAW) here.
   * Because some of the operators of the program can be executed parallelly.
   * So, to make the program running in the right order, we should add the
   * dependence of WAR and WAW.
   *
X
polish  
Xin Pan 已提交
146 147 148
   *
   * https://en.wikipedia.org/wiki/Hazard_(computer_architecture)#Write_after_read_(WAR)
   */
X
Xin Pan 已提交
149

X
Xin Pan 已提交
150 151 152 153 154 155 156 157
  for (auto &var : var_nodes) {
    auto &versions = var.second;
    if (versions.size() <= 1) continue;

    auto it_new = versions.rbegin();
    auto it_old = versions.rbegin();
    ++it_old;
    for (; it_old != versions.rend(); it_new = it_old, ++it_old) {
158
      VLOG(3) << "deal with var: " << (*it_new)->Name();
X
Xin Pan 已提交
159 160 161 162
      ir::Node *write_op =
          (*it_new)->inputs.empty() ? nullptr : (*it_new)->inputs[0];
      const auto &read_ops = (*it_old)->outputs;

163 164 165 166 167
      PADDLE_ENFORCE(write_op, "The write_op should not be empty.");

      // Add write after write dependence
      ir::Node *upstream_op =
          (*it_old)->inputs.empty() ? nullptr : (*it_old)->inputs[0];
X
Xin Pan 已提交
168 169
      // TODO(zcd): Add a test.
      if (upstream_op && upstream_op != write_op) {
170 171 172 173 174 175 176
        ir::Node *dep_var = CreateControlDepVar();
        write_op->inputs.push_back(dep_var);
        upstream_op->outputs.push_back(dep_var);
        dep_var->outputs.push_back(write_op);
        dep_var->inputs.push_back(upstream_op);
      }

X
Xin Pan 已提交
177 178 179 180 181 182
      for (auto *read_op : read_ops) {
        // Manually add a dependency var from read_op to write_op;
        if (read_op == write_op) {
          // Read Write is the same op.
          continue;
        }
X
Xin Pan 已提交
183 184 185 186 187 188 189 190 191 192 193
        // 2 ops might have been connected via other vars.
        bool has_dep = false;
        for (ir::Node *r_out : read_op->outputs) {
          for (ir::Node *w_in : write_op->inputs) {
            if (r_out == w_in) {
              has_dep = true;
              break;
            }
          }
        }
        if (has_dep) continue;
X
Xin Pan 已提交
194

X
Xin Pan 已提交
195
        ir::Node *dep_var = CreateControlDepVar();
X
Xin Pan 已提交
196 197 198 199 200 201 202
        read_op->outputs.push_back(dep_var);
        dep_var->inputs.push_back(read_op);
        write_op->inputs.push_back(dep_var);
        dep_var->outputs.push_back(write_op);
      }
    }
  }
X
better  
Xin Pan 已提交
203
}
X
Xin Pan 已提交
204 205 206 207

bool IsControlDepVar(const ir::Node &var) {
  return var.Name().find(ir::Node::kControlDepVarName) != std::string::npos;
}
X
Xin Pan 已提交
208
}  // namespace ir
X
Xin Pan 已提交
209
}  // namespace framework
X
start  
Xin Pan 已提交
210
}  // namespace paddle