graph_pattern_detector.cc 26.0 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

Q
Qiao Longfei 已提交
15
#include <array>
16 17 18 19
#include <string>
#include <vector>

#include "paddle/fluid/framework/ir/graph_helper.h"
20
#include "paddle/fluid/framework/ir/graph_pattern_detector.h"
21
#include "paddle/fluid/framework/ir/graph_traits.h"
22
#include "paddle/fluid/framework/ir/graph_viz_pass.h"
C
chengduo 已提交
23
#include "paddle/fluid/framework/operator.h"
24
#include "paddle/fluid/platform/enforce.h"
Y
Yan Chunwei 已提交
25
#include "paddle/fluid/string/pretty_log.h"
Y
Yan Chunwei 已提交
26
#include "paddle/fluid/string/printf.h"
27 28 29 30
namespace paddle {
namespace framework {
namespace ir {

Y
Yan Chunwei 已提交
31 32 33 34
using string::PrettyLogEndl;
using string::PrettyLog;
using string::Style;

35 36
size_t PDPattern::id_ = 0UL;

C
chengduo 已提交
37
PDNode *PDPattern::NewNode(const std::string &name) {
Y
Yan Chunwei 已提交
38 39 40 41 42 43 44
  if (!name.empty()) {
    PADDLE_ENFORCE_EQ(node_map_.count(name), 0,
                      "PDNode's name should be unique, get duplicate [%s]",
                      name);
  }

  nodes_.emplace_back(new PDNode(this, name));
C
chengduo 已提交
45
  auto *cur = nodes_.back().get();
Y
Yan Chunwei 已提交
46 47 48 49
  node_map_[name] = cur;
  return cur;
}

C
chengduo 已提交
50
PDNode *PDPattern::NewNode(PDNode::teller_t &&teller, const std::string &name) {
51 52 53 54 55 56
  if (!name.empty()) {
    PADDLE_ENFORCE_EQ(node_map_.count(name), 0,
                      "PDNode's name should be unique, get duplicate [%s]",
                      name);
  }

57
  nodes_.emplace_back(new PDNode(std::move(teller), this, name));
C
chengduo 已提交
58
  auto *cur = nodes_.back().get();
59
  node_map_[name] = cur;
60 61 62
  return cur;
}

C
chengduo 已提交
63
PDNode *PDPattern::RetrieveNode(const std::string &id) const {
64 65 66 67 68 69 70 71
  auto it = node_map_.find(id);
  if (it == node_map_.end()) {
    return nullptr;
  }

  return it->second;
}

C
chengduo 已提交
72
void PDPattern::AddEdge(PDNode *a, PDNode *b) {
73 74 75 76 77 78
  PADDLE_ENFORCE(a);
  PADDLE_ENFORCE(b);
  PADDLE_ENFORCE(a != b, "can't connect to the same nodes.");
  edges_.emplace_back(a, b);
}

C
chengduo 已提交
79
void GraphPatternDetector::operator()(Graph *graph,
80
                                      GraphPatternDetector::handle_t handler) {
81 82 83 84
  if (!MarkPDNodesInGraph(*graph)) {
    return;
  }

85 86 87
  auto subgraphs = DetectPatterns();
  UniquePatterns(&subgraphs);
  RemoveOverlappedMatch(&subgraphs);
Y
Yan Chunwei 已提交
88
  ValidateByNodeRole(&subgraphs);
89

Y
Yan Chunwei 已提交
90
  if (subgraphs.empty()) return;
Y
Yan Chunwei 已提交
91
  PrettyLogEndl(Style::detail(), "---  detect %d subgraphs", subgraphs.size());
92
  int id = 0;
C
chengduo 已提交
93
  for (auto &g : subgraphs) {
L
luotao1 已提交
94
    VLOG(3) << "optimizing #" << id++ << " subgraph";
95 96 97 98
    handler(g, graph);
  }
}

C
chengduo 已提交
99
bool GraphPatternDetector::MarkPDNodesInGraph(const ir::Graph &graph) {
100
  VLOG(3) << "mark pdnodes in graph";
101 102
  if (graph.Nodes().empty()) return false;

C
chengduo 已提交
103 104
  for (auto &node : GraphTraits::DFS(graph)) {
    for (const auto &pdnode : pattern_.nodes()) {
105
      if (pdnode->Tell(&node)) {
106
        VLOG(4) << "pdnode " << pdnode->name() << " marked";
107 108 109 110
        pdnodes2nodes_[pdnode.get()].insert(&node);
      }
    }
  }
Y
Yan Chunwei 已提交
111
  // Check to early stop if some PDNode can't find matched Node.
C
chengduo 已提交
112
  for (auto &pdnode : pattern_.nodes()) {
Y
Yan Chunwei 已提交
113 114
    if (!pdnodes2nodes_.count(pdnode.get())) {
      VLOG(4) << pdnode->name() << " can't find matched Node, early stop";
Y
Yan Chunwei 已提交
115
      // return false;
Y
Yan Chunwei 已提交
116 117
    }
  }
C
chengduo 已提交
118 119 120
  for (auto &item : pdnodes2nodes_) {
    for (auto &n : item.second) {
      GetMarkedNodes(const_cast<Graph *>(&graph)).insert(n);
Y
Yan Chunwei 已提交
121 122
    }
  }
123
  VLOG(3) << pdnodes2nodes_.size() << " nodes marked";
124

125 126 127
  return !pdnodes2nodes_.empty();
}

Y
Yan Chunwei 已提交
128 129 130
// The intermediate Nodes can only link to the nodes inside the pattern, or this
// subgraph will be droped.
void GraphPatternDetector::ValidateByNodeRole(
C
chengduo 已提交
131
    std::vector<GraphPatternDetector::subgraph_t> *subgraphs) {
Y
Yan Chunwei 已提交
132 133 134 135 136
  std::vector<GraphPatternDetector::subgraph_t> result;

  subgraphs->erase(
      std::remove_if(
          subgraphs->begin(), subgraphs->end(),
C
chengduo 已提交
137
          [](const GraphPatternDetector::subgraph_t &subgraph) -> bool {
Y
Yan Chunwei 已提交
138
            // Collect the inputs and outputs.
C
chengduo 已提交
139 140
            std::unordered_set<Node *> ios;
            for (auto &item : subgraph) {
Y
Yan Chunwei 已提交
141 142 143 144
              if (!item.first->IsIntermediate()) {
                ios.insert(item.second);
              }
            }
C
chengduo 已提交
145
            for (auto &item : subgraph) {
Y
Yan Chunwei 已提交
146
              if (item.first->IsIntermediate()) {
C
chengduo 已提交
147
                for (auto *x : item.second->inputs) {
Y
Yan Chunwei 已提交
148 149 150 151
                  if (!ios.count(x)) {
                    return true;
                  }
                }
C
chengduo 已提交
152
                for (auto *x : item.second->outputs) {
Y
Yan Chunwei 已提交
153 154 155 156 157 158 159 160 161 162 163
                  if (!ios.count(x)) {
                    return true;
                  }
                }
              }
            }
            return false;
          }),
      subgraphs->end());
}

164
struct HitGroup {
C
chengduo 已提交
165
  std::unordered_map<PDNode *, Node *> roles;
166

C
chengduo 已提交
167
  bool Match(Node *node, PDNode *pat) {
168 169 170 171
    if (nodes_.count(node)) {
      if (!roles.count(pat)) return false;
      return roles[pat] == node;
    }
172 173 174
    return !roles.count(pat) || roles.at(pat) == node;
  }

C
chengduo 已提交
175
  void Register(Node *node, PDNode *pat) {
176 177 178 179 180
    roles[pat] = node;
    nodes_.insert(node);
  }

 private:
C
chengduo 已提交
181
  std::unordered_set<Node *> nodes_;
182 183 184
};

// Tell whether Node a links to b.
C
chengduo 已提交
185 186
bool IsNodesLink(Node *a, Node *b) {
  for (auto *node : a->outputs) {
187 188 189 190 191 192 193
    if (b == node) {
      return true;
    }
  }
  return false;
}

194 195
std::vector<GraphPatternDetector::subgraph_t>
GraphPatternDetector::DetectPatterns() {
196
  // Init empty subgraphs.
197
  std::vector<GraphPatternDetector::subgraph_t> result;
198
  std::vector<HitGroup> init_groups;
199 200
  std::array<std::vector<HitGroup>, 2> bi_records;
  // PADDLE_ENFORCE(!pattern_.edges().empty(), "At least one edge is needed");
C
chengduo 已提交
201
  auto *first_pnode = pattern_.edges().empty() ? pattern().nodes().front().get()
202
                                               : pattern_.edges().front().first;
203
  if (!pdnodes2nodes_.count(first_pnode)) return result;
C
chengduo 已提交
204
  for (auto *node : pdnodes2nodes_[first_pnode]) {
205 206 207 208 209 210 211 212 213 214
    HitGroup group;
    group.roles[first_pnode] = node;
    init_groups.emplace_back(group);
  }

  int step = 0;
  bi_records[0] = std::move(init_groups);

  // Extend a PDNode to subgraphs by deducing the connection relations defined
  // in edges of PDNodes.
C
chengduo 已提交
215
  for (const auto &edge : pattern_.edges()) {
216
    VLOG(4) << "check " << edge.first->name() << " -> " << edge.second->name();
Y
Yan Chunwei 已提交
217
    // TODO(Superjomn) Fix bug here, the groups might be duplicate here.
218 219
    // Each role has two PDNodes, which indicates two roles.
    // Detect two Nodes that can match these two roles and they are connected.
C
chengduo 已提交
220 221
    auto &pre_groups = bi_records[step % 2];
    auto &cur_groups = bi_records[1 - (step++ % 2)];
222
    cur_groups.clear();
223
    if (pre_groups.empty()) break;
224
    // source -> target
C
chengduo 已提交
225 226
    for (Node *source : pdnodes2nodes_[edge.first]) {
      for (Node *target : pdnodes2nodes_[edge.second]) {
Y
Yan Chunwei 已提交
227
        VLOG(8) << "check " << source->id() << " -- " << target->id();
228
        // TODO(Superjomn) add some prune strategies.
C
chengduo 已提交
229
        for (const auto &group : pre_groups) {
230 231 232 233 234 235 236 237 238 239 240 241 242
          HitGroup new_group = group;
          if (IsNodesLink(source, target) &&
              new_group.Match(source, edge.first)) {
            new_group.Register(source, edge.first);
            if (new_group.Match(target, edge.second)) {
              new_group.Register(target, edge.second);
              cur_groups.push_back(new_group);
              // TODO(Superjomn) need to unique
            }
          }
        }
      }
    }
243
    VLOG(3) << "step " << step << " get records: " << cur_groups.size();
C
chengduo 已提交
244 245
    for (auto &group : cur_groups) {
      for (auto &item : group.roles) {
Y
Yan Chunwei 已提交
246 247 248 249
        VLOG(4) << "node " << item.second->id() << " as " << item.first->name();
      }
      VLOG(4) << "=========================================================";
    }
250 251
  }

C
chengduo 已提交
252
  for (auto &group : bi_records[step % 2]) {
253
    GraphPatternDetector::subgraph_t subgraph;
C
chengduo 已提交
254
    for (auto &role : group.roles) {
255 256 257 258 259 260 261
      subgraph.emplace(role.first, role.second);
    }
    result.emplace_back(subgraph);
  }
  return result;
}

262
void GraphPatternDetector::UniquePatterns(
C
chengduo 已提交
263
    std::vector<GraphPatternDetector::subgraph_t> *subgraphs) {
264
  if (subgraphs->empty()) return;
265
  std::vector<GraphPatternDetector::subgraph_t> result;
266 267

  std::unordered_set<size_t> set;
C
chengduo 已提交
268
  for (auto &g : *subgraphs) {
269
    size_t key = 0;
C
chengduo 已提交
270 271 272
    for (auto &item : g) {
      key ^= std::hash<void *>{}(item.first);
      key ^= std::hash<void *>{}(item.second);
273 274 275 276 277 278 279 280 281
    }
    if (!set.count(key)) {
      result.emplace_back(g);
      set.insert(key);
    }
  }
  *subgraphs = result;
}

282
void GraphPatternDetector::RemoveOverlappedMatch(
C
chengduo 已提交
283
    std::vector<subgraph_t> *subgraphs) {
284
  std::vector<subgraph_t> result;
C
chengduo 已提交
285
  std::unordered_set<Node *> node_set;
286

C
chengduo 已提交
287
  for (const auto &subgraph : *subgraphs) {
288
    bool valid = true;
C
chengduo 已提交
289
    for (auto &item : subgraph) {
Y
Yan Chunwei 已提交
290
      if (item.first->IsIntermediate() && node_set.count(item.second)) {
291 292 293 294 295
        valid = false;
        break;
      }
    }
    if (valid) {
C
chengduo 已提交
296
      for (auto &item : subgraph) {
297 298 299 300 301 302 303 304
        node_set.insert(item.second);
      }
      result.push_back(subgraph);
    }
  }
  *subgraphs = result;
}

305 306 307 308 309
std::string PDPattern::DotString() const {
  using inference::analysis::Dot;
  Dot dot;
  int id = 0;
  // Create Nodes
C
chengduo 已提交
310 311
  std::unordered_map<PDNode *, std::string> node2dot;
  for (const auto &node : nodes()) {
312 313 314 315 316
    std::string node_id = "Node" + std::to_string(id++);
    dot.AddNode(node_id, {}, node->name());
    node2dot[node.get()] = node_id;
  }
  // Create Edges
C
chengduo 已提交
317
  for (const auto &edge : edges()) {
318 319 320 321
    if (!node2dot.count(edge.first) || !node2dot.count(edge.second)) {
      LOG(ERROR) << "no node " << edge.first << " " << edge.second;
      continue;
    }
C
chengduo 已提交
322 323
    auto &src = node2dot.at(edge.first);
    auto &trg = node2dot.at(edge.second);
324 325 326 327 328
    dot.AddEdge(src, trg, {});
  }
  return dot.Build();
}

C
chengduo 已提交
329
PDNode &PDNode::LinksTo(const std::vector<PDNode *> &others) {
330
  // extend outlinks.
C
chengduo 已提交
331
  for (PDNode *x : others) {
332 333 334 335 336
    pattern_->AddEdge(this, x);
  }
  return *this;
}

C
chengduo 已提交
337
PDNode &PDNode::LinksFrom(const std::vector<PDNode *> &others) {
338
  // extend outlinks.
C
chengduo 已提交
339
  for (PDNode *x : others) {
340 341 342 343 344
    pattern_->AddEdge(x, this);
  }
  return *this;
}

C
chengduo 已提交
345 346
PDNode *PDNode::assert_is_op() {
  asserts_.emplace_back([](Node *x) { return x && x->IsOp(); });
Y
Yan Chunwei 已提交
347 348
  return this;
}
C
chengduo 已提交
349 350 351

PDNode *PDNode::assert_is_op(const std::string &op_type) {
  asserts_.emplace_back([op_type](Node *x) {
Y
Yan Chunwei 已提交
352 353 354 355
    return x && x->IsOp() && x->Op()->Type() == op_type;
  });
  return this;
}
C
chengduo 已提交
356 357 358 359 360 361 362 363

PDNode *PDNode::assert_is_var() {
  asserts_.emplace_back([](Node *x) { return x && x->IsVar(); });
  return this;
}

PDNode *PDNode::assert_is_not_ctrl_var() {
  asserts_.emplace_back([](Node *x) { return x && !x->IsCtrlVar(); });
Y
Yan Chunwei 已提交
364 365
  return this;
}
C
chengduo 已提交
366 367

PDNode *PDNode::assert_var_not_persistable() {
Y
Yan Chunwei 已提交
368
  assert_is_var();
C
chengduo 已提交
369
  asserts_.emplace_back([](Node *x) { return !x->Var()->Persistable(); });
Y
Yan Chunwei 已提交
370 371
  return this;
}
C
chengduo 已提交
372 373

PDNode *PDNode::assert_is_persistable_var() {
Y
Yan Chunwei 已提交
374
  assert_is_var();
C
chengduo 已提交
375
  asserts_.emplace_back([=](Node *x) { return x->Var()->Persistable(); });
Y
Yan Chunwei 已提交
376 377
  return this;
}
C
chengduo 已提交
378 379 380

PDNode *PDNode::assert_is_op_nth_input(const std::string &op_type,
                                       const std::string &argument, int nth) {
Y
Yan Chunwei 已提交
381 382
  assert_is_var();
  assert_is_op_input(op_type);
C
chengduo 已提交
383 384
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
385 386 387
      if (op->IsOp() && op->Op()->Type() == op_type &&
          IsNthInput(x, op, argument, nth))
        return true;
Y
Yan Chunwei 已提交
388 389 390 391 392
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
393 394 395

PDNode *PDNode::assert_is_op_nth_output(const std::string &op_type,
                                        const std::string &argument, int nth) {
Y
Yan Chunwei 已提交
396
  assert_is_var();
C
chengduo 已提交
397 398
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
399 400 401
      if (op->IsOp() && op->Op()->Type() == op_type &&
          IsNthOutput(x, op, argument, nth))
        return true;
Y
Yan Chunwei 已提交
402 403 404 405 406
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
407 408

PDNode *PDNode::assert_is_only_input_of_op(const std::string &op_type) {
Y
Yan Chunwei 已提交
409
  assert_is_var();
C
chengduo 已提交
410 411
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
Y
Yan Chunwei 已提交
412 413 414 415 416 417 418 419 420
      if (op && op->IsOp() && op->Op() && op->Op()->Type() == op_type &&
          op->inputs.size() == 1) {
        return true;
      }
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
421 422

PDNode *PDNode::assert_is_only_output_of_op(const std::string &op_type) {
Y
Yan Chunwei 已提交
423
  assert_is_var();
C
chengduo 已提交
424 425
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
Y
Yan Chunwei 已提交
426 427 428 429 430 431 432 433 434
      if (op && op->IsOp() && op->Op() && op->Op()->Type() == op_type &&
          op->outputs.size() == 1) {
        return true;
      }
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
435 436

PDNode *PDNode::assert_is_op_output(const std::string &op_type) {
Y
Yan Chunwei 已提交
437
  assert_is_var();
C
chengduo 已提交
438 439
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
Y
Yan Chunwei 已提交
440 441 442 443 444 445 446 447
      if (op && op->IsOp() && op->Op() && op->Op()->Type() == op_type) {
        return true;
      }
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
448 449 450

PDNode *PDNode::assert_is_op_output(const std::string &op_type,
                                    const std::string &argument) {
451 452 453 454
  assert_is_var();
  assert_is_op_nth_output(op_type, argument, 0);
  return this;
}
C
chengduo 已提交
455
PDNode *PDNode::assert_is_op_input(const std::string &op_type) {
Y
Yan Chunwei 已提交
456
  assert_is_var();
C
chengduo 已提交
457 458
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
Y
Yan Chunwei 已提交
459 460 461 462 463 464 465 466
      if (op && op->IsOp() && op->Op() && op->Op()->Type() == op_type) {
        return true;
      }
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
467 468 469

PDNode *PDNode::assert_is_op_input(const std::string &op_type,
                                   const std::string &argument) {
470 471 472 473
  assert_is_var();
  assert_is_op_nth_input(op_type, argument, 0);
  return this;
}
C
chengduo 已提交
474 475

PDNode *PDNode::assert_op_has_n_inputs(const std::string &op_type, size_t n) {
Y
Yan Chunwei 已提交
476
  assert_is_op(op_type);
C
chengduo 已提交
477
  asserts_.emplace_back([=](Node *x) { return x->inputs.size() == n; });
Y
Yan Chunwei 已提交
478 479
  return this;
}
C
chengduo 已提交
480 481

PDNode *PDNode::assert_op_has_n_outputs(const std::string &op_type, size_t n) {
Y
Yan Chunwei 已提交
482
  assert_is_op(op_type);
C
chengduo 已提交
483
  asserts_.emplace_back([=](Node *x) { return x->outputs.size() == n; });
Y
Yan Chunwei 已提交
484 485
  return this;
}
C
chengduo 已提交
486 487

PDNode *PDNode::assert_more(PDNode::teller_t &&teller) {
Y
Yan Chunwei 已提交
488 489 490 491
  asserts_.emplace_back(std::move(teller));
  return this;
}

C
chengduo 已提交
492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574
PDNode *PDNode::assert_is_ops(const std::unordered_set<std::string> &op_types) {
  asserts_.emplace_back([op_types](Node *x) {
    return x && x->IsOp() && op_types.count(x->Op()->Type());
  });
  return this;
}

PDNode *PDNode::assert_is_ops_nth_input(
    const std::unordered_set<std::string> &op_types,
    const std::string &argument, int nth) {
  assert_is_var();
  assert_is_ops_input(op_types);
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
      if (op->IsOp() && op_types.count(op->Op()->Type()) &&
          IsNthInput(x, op, argument, nth))
        return true;
    }
    return false;
  });
  return this;
}

PDNode *PDNode::assert_is_ops_nth_output(
    const std::unordered_set<std::string> &op_types,
    const std::string &argument, int nth) {
  assert_is_var();
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
      if (op->IsOp() && op_types.count(op->Op()->Type()) &&
          IsNthOutput(x, op, argument, nth))
        return true;
    }
    return false;
  });
  return this;
}
PDNode *PDNode::assert_is_ops_output(
    const std::unordered_set<std::string> &op_types) {
  assert_is_var();
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
      if (op && op->IsOp() && op->Op() && op_types.count(op->Op()->Type())) {
        return true;
      }
    }
    return false;
  });
  return this;
}

PDNode *PDNode::assert_is_ops_output(
    const std::unordered_set<std::string> &op_types,
    const std::string &argument) {
  assert_is_var();
  assert_is_ops_nth_output(op_types, argument, 0);
  return this;
}

PDNode *PDNode::assert_is_ops_input(
    const std::unordered_set<std::string> &op_types) {
  assert_is_var();
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
      if (op && op->IsOp() && op->Op() && op_types.count(op->Op()->Type())) {
        return true;
      }
    }
    return false;
  });
  return this;
}

PDNode *PDNode::assert_is_ops_input(
    const std::unordered_set<std::string> &op_types,
    const std::string &argument) {
  assert_is_var();
  assert_is_ops_nth_input(op_types, argument, 0);
  return this;
}

bool VarLinksToOp(Node *node, const std::string &op_type) {
  for (auto *out : node->outputs) {
575 576 577 578 579 580
    if (out->IsOp() && out->Op()->Type() == op_type) {
      return true;
    }
  }
  return false;
}
C
chengduo 已提交
581 582

bool IsNthInput(Node *var, Node *op, const std::string &argument, size_t nth) {
583 584 585 586 587
  PADDLE_ENFORCE(var->IsVar());
  PADDLE_ENFORCE(op->IsOp());
  if (op->Op()->Input(argument).size() <= nth) return false;
  return var->Name() == op->Op()->Input(argument)[nth];
}
C
chengduo 已提交
588 589

bool IsNthOutput(Node *var, Node *op, const std::string &argument, size_t nth) {
590 591 592 593 594
  PADDLE_ENFORCE(var->IsVar());
  PADDLE_ENFORCE(op->IsOp());
  if (op->Op()->Output(argument).size() <= nth) return false;
  return var->Name() == op->Op()->Output(argument)[nth];
}
C
chengduo 已提交
595 596 597 598 599

void GraphSafeRemoveNodes(Graph *graph,
                          const std::unordered_set<const Node *> &nodes) {
  for (auto *node : nodes) {
    graph->RemoveNode(const_cast<Node *>(node));
600 601
  }

C
chengduo 已提交
602
  for (auto *node : graph->Nodes()) {
603 604
    for (auto it = node->inputs.begin(); it != node->inputs.end();) {
      if (nodes.count(*it)) {
C
chengduo 已提交
605
        it = const_cast<Node *>(node)->inputs.erase(it);
606
      } else {
607
        it++;
608
      }
609 610 611
    }
    for (auto it = node->outputs.begin(); it != node->outputs.end();) {
      if (nodes.count(*it)) {
C
chengduo 已提交
612
        it = const_cast<Node *>(node)->outputs.erase(it);
613
      } else {
614
        it++;
615
      }
616 617 618
    }
  }
}
C
chengduo 已提交
619 620 621

bool VarLinksFromOp(Node *node, const std::string &op_type) {
  for (auto *out : node->inputs) {
622 623 624 625 626 627 628
    if (out->IsOp() && out->Op()->Type() == op_type) {
      return true;
    }
  }
  return false;
}

C
chengduo 已提交
629 630
PDNode *patterns::ConvReLU::operator()(
    paddle::framework::ir::PDNode *conv_input) {
631 632
  // Create Operators
  conv_input->assert_is_op_input("conv2d", "Input");
C
chengduo 已提交
633 634
  auto *conv_op = pattern->NewNode(conv_repr())->assert_is_op("conv2d");
  auto *relu_op = pattern->NewNode(relu_repr())->assert_is_op("relu");
635 636
  // Create variables
  // Filter
C
chengduo 已提交
637
  auto *conv_weight_var = pattern->NewNode(conv_weight_repr())
638 639 640 641
                              ->AsInput()
                              ->assert_is_persistable_var()
                              ->assert_is_op_input("conv2d", "Filter");
  // intermediate variable, will be removed in the IR after fuse.
C
chengduo 已提交
642
  auto *conv_out_var = pattern->NewNode(conv_out_repr())
643 644 645 646
                           ->AsIntermediate()
                           ->assert_is_only_output_of_op("conv2d")
                           ->assert_is_op_input("relu");
  // output
C
chengduo 已提交
647
  auto *relu_out_var = pattern->NewNode(relu_out_repr())
648 649 650
                           ->AsOutput()
                           ->assert_is_op_output("relu");

651
  conv_op->LinksFrom({conv_input, conv_weight_var}).LinksTo({conv_out_var});
652 653 654 655
  relu_op->LinksFrom({conv_out_var}).LinksTo({relu_out_var});
  return relu_out_var;
}

C
chengduo 已提交
656
PDNode *patterns::FC::operator()(paddle::framework::ir::PDNode *x,
Y
Yan Chunwei 已提交
657 658 659
                                 bool with_bias) {
  // Create shared nodes.
  x->assert_is_op_input("mul", "X");
C
chengduo 已提交
660
  auto *mul = pattern->NewNode(mul_repr())->assert_is_op("mul");
Y
Yan Chunwei 已提交
661

C
chengduo 已提交
662
  auto *mul_w_var = pattern->NewNode(w_repr())
Y
Yan Chunwei 已提交
663 664 665 666
                        ->AsInput()
                        ->assert_is_persistable_var()
                        ->assert_is_op_input("mul", "Y");

C
chengduo 已提交
667
  auto *mul_out_var =
Y
Yan Chunwei 已提交
668 669 670 671 672 673 674 675 676 677
      pattern->NewNode(mul_out_repr())->assert_is_op_output("mul");

  if (!with_bias) {  // not with bias
    // Add links.
    mul->LinksFrom({x, mul_w_var}).LinksTo({mul_out_var});
    return mul_out_var;

  } else {  // with bias
    mul_out_var->AsIntermediate()->assert_is_op_input("elementwise_add");
    // Create operators.
C
chengduo 已提交
678
    auto *elementwise_add = pattern->NewNode(elementwise_add_repr())
Y
Yan Chunwei 已提交
679 680
                                ->assert_is_op("elementwise_add");
    // Create variables.
C
chengduo 已提交
681
    auto *bias = pattern->NewNode(bias_repr())
Y
Yan Chunwei 已提交
682 683 684
                     ->assert_is_op_input("elementwise_add")
                     ->AsInput();

C
chengduo 已提交
685
    auto *fc_out = pattern->NewNode(Out_repr())
Y
Yan Chunwei 已提交
686 687 688 689 690 691
                       ->AsOutput()
                       ->assert_is_op_output("elementwise_add");

    mul->LinksFrom({mul_w_var, x}).LinksTo({mul_out_var});
    elementwise_add->LinksFrom({mul_out_var, bias}).LinksTo({fc_out});
    return fc_out;
692 693
  }
}
T
tensor-tang 已提交
694

695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712
PDNode *patterns::Embedding::operator()(PDNode *x) {
  x->assert_is_op_input("lookup_table", "Ids");
  auto *lookup_table_op =
      pattern->NewNode(lookup_table_repr())->assert_is_op("lookup_table");
#define NEW_NODE(arg__, io__)                    \
  auto *arg__ = pattern->NewNode(arg__##_repr()) \
                    ->assert_is_op_##io__("lookup_table", #arg__);

  NEW_NODE(W, input);

  NEW_NODE(Out, output);
#undef NEW_NODE

  lookup_table_op->LinksFrom({x, W});
  lookup_table_op->LinksTo({Out});
  return Out;
}

C
chengduo 已提交
713
PDNode *patterns::LSTM::operator()(PDNode *x) {
714
  x->assert_is_op_input("lstm", "Input");
C
chengduo 已提交
715
  auto *lstm_op = pattern->NewNode(lstm_repr())->assert_is_op("lstm");
Y
Yan Chunwei 已提交
716
#define NEW_NODE(arg__, io__) \
C
chengduo 已提交
717
  auto *arg__ =               \
Y
Yan Chunwei 已提交
718
      pattern->NewNode(arg__##_repr())->assert_is_op_##io__("lstm", #arg__);
719 720 721 722 723

  // Currently, the H0 and C0 are optional
  // TODO(Superjomn) upgrade the fuse framework to support optional.
  // NEW_NODE(H0, input);
  // NEW_NODE(C0, input);
Y
Yan Chunwei 已提交
724 725
  NEW_NODE(Weight, input);
  NEW_NODE(Bias, input);
726

Y
Yan Chunwei 已提交
727 728 729 730 731
  NEW_NODE(Hidden, output);
  NEW_NODE(Cell, output);
  NEW_NODE(BatchGate, output);
  NEW_NODE(BatchCellPreAct, output);
#undef NEW_NODE
732 733 734 735 736

  lstm_op->LinksFrom({x, Weight, Bias});
  lstm_op->LinksTo({Hidden, Cell, BatchGate, BatchCellPreAct});
  return Hidden;
}
T
tensor-tang 已提交
737

C
chengduo 已提交
738
PDNode *patterns::GRU::operator()(PDNode *x) {
T
tensor-tang 已提交
739
  x->assert_is_op_input("gru", "Input");
C
chengduo 已提交
740
  auto *gru_op = pattern->NewNode(gru_repr())->assert_is_op("gru");
Y
Yan Chunwei 已提交
741
#define NEW_NODE(arg__, io__) \
C
chengduo 已提交
742
  auto *arg__ =               \
Y
Yan Chunwei 已提交
743
      pattern->NewNode(arg__##_repr())->assert_is_op_##io__("gru", #arg__);
T
tensor-tang 已提交
744

Y
Yan Chunwei 已提交
745
  NEW_NODE(Weight, input);
T
tensor-tang 已提交
746 747
  // TODO(Superjomn): upgrade the fuse framework to support optional.
  // H0 and bias are optional
Y
Yan Chunwei 已提交
748
  NEW_NODE(Bias, input);  // also optional
T
tensor-tang 已提交
749 750
  // NEW_NODE(H0, input);

Y
Yan Chunwei 已提交
751
  NEW_NODE(Hidden, output);
T
tensor-tang 已提交
752
  // below are intermediate
Y
Yan Chunwei 已提交
753 754 755 756
  NEW_NODE(BatchGate, output);
  NEW_NODE(BatchResetHiddenPrev, output);
  NEW_NODE(BatchHidden, output);
#undef NEW_NODE
T
tensor-tang 已提交
757

T
tensor-tang 已提交
758 759 760 761
  BatchGate->AsIntermediate();
  BatchResetHiddenPrev->AsIntermediate();
  BatchHidden->AsIntermediate();

T
tensor-tang 已提交
762 763 764 765 766
  gru_op->LinksFrom({x, Weight, Bias});
  gru_op->LinksTo({Hidden, BatchGate, BatchResetHiddenPrev, BatchHidden});
  return Hidden;
}

C
chengduo 已提交
767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860
PDNode *patterns::ActElewiseAdd::operator()(
    paddle::framework::ir::PDNode *in_var,
    std::unordered_set<std::string> act_types) {
  in_var->assert_is_ops_input(act_types, "X");

  auto *act = pattern->NewNode(act_repr())->assert_is_ops(act_types);
  auto *act_out_var = pattern->NewNode(act_out_repr())
                          ->assert_is_not_ctrl_var()
                          ->assert_is_ops_output(act_types);
  act_out_var->AsIntermediate()->assert_is_op_input("elementwise_add");

  auto *ele_x_var = pattern->NewNode(ele_x_repr())
                        ->assert_is_not_ctrl_var()
                        ->assert_is_op_input("elementwise_add")
                        ->AsInput();
  auto *elementwise_add =
      pattern->NewNode(ele_add_repr())->assert_is_op("elementwise_add");

  auto *elewise_add_out = pattern->NewNode(elewise_add_out_repr())
                              ->AsOutput()
                              ->assert_is_op_output("elementwise_add", "Out");

  act->LinksFrom({in_var}).LinksTo({act_out_var});
  elementwise_add->LinksFrom({act_out_var, ele_x_var})
      .LinksTo({elewise_add_out});

  return elewise_add_out;
}

PDNode *patterns::ElewiseAddAct::operator()(
    paddle::framework::ir::PDNode *ele_x_var,
    std::unordered_set<std::string> act_types) {
  auto *ele_y_var = pattern->NewNode(ele_y_repr())
                        ->assert_is_op_input("elementwise_add", "Y");

  auto *ele_add =
      pattern->NewNode(ele_add_repr())->assert_is_op("elementwise_add");

  auto *ele_out_var = pattern->NewNode(elewise_add_out_repr())
                          ->assert_is_op_output("elementwise_add", "Out");

  ele_out_var->AsIntermediate()->assert_is_ops_input(act_types);

  auto *act = pattern->NewNode(act_repr())->assert_is_ops(act_types);

  auto *act_out_var =
      pattern->NewNode(act_out_repr())->assert_is_ops_output(act_types, "Out");

  ele_add->LinksFrom({ele_x_var, ele_y_var}).LinksTo({ele_out_var});
  act->LinksFrom({ele_out_var}).LinksTo({act_out_var});

  return act_out_var;
}

PDNode *patterns::ElewiseAddActInplaceGrad::operator()(
    paddle::framework::ir::PDNode *d_act_out_var,
    std::unordered_set<std::string> act_types) {
  // act_grad: in["Out", "Out@GRAD"], out["X@GRAD"]
  // ele_add_grad: in["Y", "Out@GRAD"], out["X@GRAD", "Y@GRAD"]
  auto *act_grad = pattern->NewNode(act_grad_repr())->assert_is_ops(act_types);

  auto *act_out_var =
      pattern->NewNode(act_out_repr())->assert_is_ops_input(act_types, "Out");

  auto *d_intermediate_var =
      pattern->NewNode(d_itermediate_out_repr())
          ->assert_is_ops_output(act_types, GradVarName("X"));

  act_grad->LinksFrom({d_act_out_var, act_out_var})
      .LinksTo({d_intermediate_var});

  auto *ele_y_var = pattern->NewNode(ele_y_repr())
                        ->assert_is_not_ctrl_var()
                        ->assert_is_op_input("elementwise_add_grad", "Y");

  auto *ele_add_grad = pattern->NewNode(ele_add_grad_repr())
                           ->assert_is_op("elementwise_add_grad");

  auto *d_ele_x_var =
      pattern->NewNode(d_ele_x_repr())
          ->assert_is_not_ctrl_var()
          ->assert_is_op_output("elementwise_add_grad", GradVarName("X"));

  auto *d_ele_y_var =
      pattern->NewNode(d_ele_y_repr())
          ->assert_is_not_ctrl_var()
          ->assert_is_op_output("elementwise_add_grad", GradVarName("Y"));

  ele_add_grad->LinksFrom({d_intermediate_var, ele_y_var})
      .LinksTo({d_ele_x_var, d_ele_y_var});

  return ele_add_grad;
}

861 862 863
}  // namespace ir
}  // namespace framework
}  // namespace paddle