graph_pattern_detector.cc 35.6 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

Q
Qiao Longfei 已提交
15
#include <array>
16 17 18 19
#include <string>
#include <vector>

#include "paddle/fluid/framework/ir/graph_helper.h"
20
#include "paddle/fluid/framework/ir/graph_pattern_detector.h"
21
#include "paddle/fluid/framework/ir/graph_traits.h"
22
#include "paddle/fluid/framework/ir/graph_viz_pass.h"
C
chengduo 已提交
23
#include "paddle/fluid/framework/operator.h"
24
#include "paddle/fluid/platform/enforce.h"
Y
Yan Chunwei 已提交
25
#include "paddle/fluid/string/pretty_log.h"
Y
Yan Chunwei 已提交
26
#include "paddle/fluid/string/printf.h"
27 28 29 30
namespace paddle {
namespace framework {
namespace ir {

Y
Yan Chunwei 已提交
31 32 33 34
using string::PrettyLogEndl;
using string::PrettyLog;
using string::Style;

35 36
size_t PDPattern::id_ = 0UL;

C
chengduo 已提交
37
PDNode *PDPattern::NewNode(const std::string &name) {
Y
Yan Chunwei 已提交
38 39 40 41 42 43 44
  if (!name.empty()) {
    PADDLE_ENFORCE_EQ(node_map_.count(name), 0,
                      "PDNode's name should be unique, get duplicate [%s]",
                      name);
  }

  nodes_.emplace_back(new PDNode(this, name));
C
chengduo 已提交
45
  auto *cur = nodes_.back().get();
Y
Yan Chunwei 已提交
46 47 48 49
  node_map_[name] = cur;
  return cur;
}

C
chengduo 已提交
50
PDNode *PDPattern::NewNode(PDNode::teller_t &&teller, const std::string &name) {
51 52 53 54 55 56
  if (!name.empty()) {
    PADDLE_ENFORCE_EQ(node_map_.count(name), 0,
                      "PDNode's name should be unique, get duplicate [%s]",
                      name);
  }

57
  nodes_.emplace_back(new PDNode(std::move(teller), this, name));
C
chengduo 已提交
58
  auto *cur = nodes_.back().get();
59
  node_map_[name] = cur;
60 61 62
  return cur;
}

C
chengduo 已提交
63
PDNode *PDPattern::RetrieveNode(const std::string &id) const {
64 65 66 67 68 69 70 71
  auto it = node_map_.find(id);
  if (it == node_map_.end()) {
    return nullptr;
  }

  return it->second;
}

C
chengduo 已提交
72
void PDPattern::AddEdge(PDNode *a, PDNode *b) {
73 74 75 76 77 78
  PADDLE_ENFORCE(a);
  PADDLE_ENFORCE(b);
  PADDLE_ENFORCE(a != b, "can't connect to the same nodes.");
  edges_.emplace_back(a, b);
}

C
chengduo 已提交
79
void GraphPatternDetector::operator()(Graph *graph,
80
                                      GraphPatternDetector::handle_t handler) {
81 82 83 84
  if (!MarkPDNodesInGraph(*graph)) {
    return;
  }

85 86 87
  auto subgraphs = DetectPatterns();
  UniquePatterns(&subgraphs);
  RemoveOverlappedMatch(&subgraphs);
Y
Yan Chunwei 已提交
88
  ValidateByNodeRole(&subgraphs);
89

Y
Yan Chunwei 已提交
90
  if (subgraphs.empty()) return;
Y
Yan Chunwei 已提交
91
  PrettyLogEndl(Style::detail(), "---  detect %d subgraphs", subgraphs.size());
92
  int id = 0;
C
chengduo 已提交
93
  for (auto &g : subgraphs) {
L
luotao1 已提交
94
    VLOG(3) << "optimizing #" << id++ << " subgraph";
95 96 97 98
    handler(g, graph);
  }
}

C
chengduo 已提交
99
bool GraphPatternDetector::MarkPDNodesInGraph(const ir::Graph &graph) {
100
  VLOG(3) << "mark pdnodes in graph";
101 102
  if (graph.Nodes().empty()) return false;

C
chengduo 已提交
103 104
  for (auto &node : GraphTraits::DFS(graph)) {
    for (const auto &pdnode : pattern_.nodes()) {
105
      if (pdnode->Tell(&node)) {
106
        VLOG(4) << "pdnode " << pdnode->name() << " marked";
107 108 109 110
        pdnodes2nodes_[pdnode.get()].insert(&node);
      }
    }
  }
Y
Yan Chunwei 已提交
111
  // Check to early stop if some PDNode can't find matched Node.
C
chengduo 已提交
112
  for (auto &pdnode : pattern_.nodes()) {
Y
Yan Chunwei 已提交
113 114
    if (!pdnodes2nodes_.count(pdnode.get())) {
      VLOG(4) << pdnode->name() << " can't find matched Node, early stop";
Y
Yan Chunwei 已提交
115
      // return false;
Y
Yan Chunwei 已提交
116 117
    }
  }
C
chengduo 已提交
118 119 120
  for (auto &item : pdnodes2nodes_) {
    for (auto &n : item.second) {
      GetMarkedNodes(const_cast<Graph *>(&graph)).insert(n);
Y
Yan Chunwei 已提交
121 122
    }
  }
123
  VLOG(3) << pdnodes2nodes_.size() << " nodes marked";
124

125 126 127
  return !pdnodes2nodes_.empty();
}

Y
Yan Chunwei 已提交
128 129 130
// The intermediate Nodes can only link to the nodes inside the pattern, or this
// subgraph will be droped.
void GraphPatternDetector::ValidateByNodeRole(
C
chengduo 已提交
131
    std::vector<GraphPatternDetector::subgraph_t> *subgraphs) {
Y
Yan Chunwei 已提交
132 133 134 135 136
  std::vector<GraphPatternDetector::subgraph_t> result;

  subgraphs->erase(
      std::remove_if(
          subgraphs->begin(), subgraphs->end(),
C
chengduo 已提交
137
          [](const GraphPatternDetector::subgraph_t &subgraph) -> bool {
Y
Yan Chunwei 已提交
138
            // Collect the inputs and outputs.
C
chengduo 已提交
139 140
            std::unordered_set<Node *> ios;
            for (auto &item : subgraph) {
Y
Yan Chunwei 已提交
141 142 143 144
              if (!item.first->IsIntermediate()) {
                ios.insert(item.second);
              }
            }
C
chengduo 已提交
145
            for (auto &item : subgraph) {
Y
Yan Chunwei 已提交
146
              if (item.first->IsIntermediate()) {
C
chengduo 已提交
147
                for (auto *x : item.second->inputs) {
Y
Yan Chunwei 已提交
148 149 150 151
                  if (!ios.count(x)) {
                    return true;
                  }
                }
C
chengduo 已提交
152
                for (auto *x : item.second->outputs) {
Y
Yan Chunwei 已提交
153 154 155 156 157 158 159 160 161 162 163
                  if (!ios.count(x)) {
                    return true;
                  }
                }
              }
            }
            return false;
          }),
      subgraphs->end());
}

164
struct HitGroup {
C
chengduo 已提交
165
  std::unordered_map<PDNode *, Node *> roles;
166

C
chengduo 已提交
167
  bool Match(Node *node, PDNode *pat) {
168 169 170 171
    if (nodes_.count(node)) {
      if (!roles.count(pat)) return false;
      return roles[pat] == node;
    }
172 173 174
    return !roles.count(pat) || roles.at(pat) == node;
  }

C
chengduo 已提交
175
  void Register(Node *node, PDNode *pat) {
176 177 178 179 180
    roles[pat] = node;
    nodes_.insert(node);
  }

 private:
C
chengduo 已提交
181
  std::unordered_set<Node *> nodes_;
182 183 184
};

// Tell whether Node a links to b.
C
chengduo 已提交
185 186
bool IsNodesLink(Node *a, Node *b) {
  for (auto *node : a->outputs) {
187 188 189 190 191 192 193
    if (b == node) {
      return true;
    }
  }
  return false;
}

194 195
std::vector<GraphPatternDetector::subgraph_t>
GraphPatternDetector::DetectPatterns() {
196
  // Init empty subgraphs.
197
  std::vector<GraphPatternDetector::subgraph_t> result;
198
  std::vector<HitGroup> init_groups;
199 200
  std::array<std::vector<HitGroup>, 2> bi_records;
  // PADDLE_ENFORCE(!pattern_.edges().empty(), "At least one edge is needed");
C
chengduo 已提交
201
  auto *first_pnode = pattern_.edges().empty() ? pattern().nodes().front().get()
202
                                               : pattern_.edges().front().first;
203
  if (!pdnodes2nodes_.count(first_pnode)) return result;
C
chengduo 已提交
204
  for (auto *node : pdnodes2nodes_[first_pnode]) {
205 206 207 208 209 210 211 212 213 214
    HitGroup group;
    group.roles[first_pnode] = node;
    init_groups.emplace_back(group);
  }

  int step = 0;
  bi_records[0] = std::move(init_groups);

  // Extend a PDNode to subgraphs by deducing the connection relations defined
  // in edges of PDNodes.
C
chengduo 已提交
215
  for (const auto &edge : pattern_.edges()) {
216
    VLOG(4) << "check " << edge.first->name() << " -> " << edge.second->name();
Y
Yan Chunwei 已提交
217
    // TODO(Superjomn) Fix bug here, the groups might be duplicate here.
218 219
    // Each role has two PDNodes, which indicates two roles.
    // Detect two Nodes that can match these two roles and they are connected.
C
chengduo 已提交
220 221
    auto &pre_groups = bi_records[step % 2];
    auto &cur_groups = bi_records[1 - (step++ % 2)];
222
    cur_groups.clear();
223
    if (pre_groups.empty()) break;
224
    // source -> target
C
chengduo 已提交
225 226
    for (Node *source : pdnodes2nodes_[edge.first]) {
      for (Node *target : pdnodes2nodes_[edge.second]) {
Y
Yan Chunwei 已提交
227
        VLOG(8) << "check " << source->id() << " -- " << target->id();
228
        // TODO(Superjomn) add some prune strategies.
C
chengduo 已提交
229
        for (const auto &group : pre_groups) {
230 231 232 233 234 235 236 237 238 239 240 241 242
          HitGroup new_group = group;
          if (IsNodesLink(source, target) &&
              new_group.Match(source, edge.first)) {
            new_group.Register(source, edge.first);
            if (new_group.Match(target, edge.second)) {
              new_group.Register(target, edge.second);
              cur_groups.push_back(new_group);
              // TODO(Superjomn) need to unique
            }
          }
        }
      }
    }
243
    VLOG(3) << "step " << step << " get records: " << cur_groups.size();
C
chengduo 已提交
244 245
    for (auto &group : cur_groups) {
      for (auto &item : group.roles) {
Y
Yan Chunwei 已提交
246 247 248 249
        VLOG(4) << "node " << item.second->id() << " as " << item.first->name();
      }
      VLOG(4) << "=========================================================";
    }
250 251
  }

C
chengduo 已提交
252
  for (auto &group : bi_records[step % 2]) {
253
    GraphPatternDetector::subgraph_t subgraph;
C
chengduo 已提交
254
    for (auto &role : group.roles) {
255 256 257 258 259 260 261
      subgraph.emplace(role.first, role.second);
    }
    result.emplace_back(subgraph);
  }
  return result;
}

Y
Yan Chunwei 已提交
262 263 264 265 266 267 268 269 270
bool GraphItemCMP(const std::pair<PDNode *, Node *> &a,
                  const std::pair<PDNode *, Node *> &b) {
  if (a.first != b.first) {
    return a.first < b.first;
  } else {
    return a.second < b.second;
  }
}

271 272
// TODO(Superjomn) enhance the function as it marks unique unique as duplicates
// see https://github.com/PaddlePaddle/Paddle/issues/13550
273
void GraphPatternDetector::UniquePatterns(
C
chengduo 已提交
274
    std::vector<GraphPatternDetector::subgraph_t> *subgraphs) {
275
  if (subgraphs->empty()) return;
276
  std::vector<GraphPatternDetector::subgraph_t> result;
277 278

  std::unordered_set<size_t> set;
Y
Yan Chunwei 已提交
279
  std::hash<std::string> hasher;
C
chengduo 已提交
280
  for (auto &g : *subgraphs) {
Y
Yan Chunwei 已提交
281 282 283 284 285 286
    // Sort the items in the sub-graph, and transform to a string key.
    std::vector<std::pair<PDNode *, Node *>> sorted_keys(g.begin(), g.end());
    std::sort(sorted_keys.begin(), sorted_keys.end(), GraphItemCMP);
    std::stringstream ss;
    for (auto &item : sorted_keys) {
      ss << item.first << ":" << item.second;
287
    }
Y
Yan Chunwei 已提交
288
    auto key = hasher(ss.str());
289 290 291 292 293 294 295 296
    if (!set.count(key)) {
      result.emplace_back(g);
      set.insert(key);
    }
  }
  *subgraphs = result;
}

297
void GraphPatternDetector::RemoveOverlappedMatch(
C
chengduo 已提交
298
    std::vector<subgraph_t> *subgraphs) {
299
  std::vector<subgraph_t> result;
C
chengduo 已提交
300
  std::unordered_set<Node *> node_set;
301

C
chengduo 已提交
302
  for (const auto &subgraph : *subgraphs) {
303
    bool valid = true;
C
chengduo 已提交
304
    for (auto &item : subgraph) {
Y
Yan Chunwei 已提交
305
      if (item.first->IsIntermediate() && node_set.count(item.second)) {
306 307 308 309 310
        valid = false;
        break;
      }
    }
    if (valid) {
C
chengduo 已提交
311
      for (auto &item : subgraph) {
312 313 314 315 316 317 318 319
        node_set.insert(item.second);
      }
      result.push_back(subgraph);
    }
  }
  *subgraphs = result;
}

320 321 322 323 324
std::string PDPattern::DotString() const {
  using inference::analysis::Dot;
  Dot dot;
  int id = 0;
  // Create Nodes
C
chengduo 已提交
325 326
  std::unordered_map<PDNode *, std::string> node2dot;
  for (const auto &node : nodes()) {
327 328 329 330 331
    std::string node_id = "Node" + std::to_string(id++);
    dot.AddNode(node_id, {}, node->name());
    node2dot[node.get()] = node_id;
  }
  // Create Edges
C
chengduo 已提交
332
  for (const auto &edge : edges()) {
333 334 335 336
    if (!node2dot.count(edge.first) || !node2dot.count(edge.second)) {
      LOG(ERROR) << "no node " << edge.first << " " << edge.second;
      continue;
    }
C
chengduo 已提交
337 338
    auto &src = node2dot.at(edge.first);
    auto &trg = node2dot.at(edge.second);
339 340 341 342 343
    dot.AddEdge(src, trg, {});
  }
  return dot.Build();
}

C
chengduo 已提交
344
PDNode &PDNode::LinksTo(const std::vector<PDNode *> &others) {
345
  // extend outlinks.
C
chengduo 已提交
346
  for (PDNode *x : others) {
347 348 349 350 351
    pattern_->AddEdge(this, x);
  }
  return *this;
}

C
chengduo 已提交
352
PDNode &PDNode::LinksFrom(const std::vector<PDNode *> &others) {
353
  // extend outlinks.
C
chengduo 已提交
354
  for (PDNode *x : others) {
355 356 357 358 359
    pattern_->AddEdge(x, this);
  }
  return *this;
}

C
chengduo 已提交
360 361
PDNode *PDNode::assert_is_op() {
  asserts_.emplace_back([](Node *x) { return x && x->IsOp(); });
Y
Yan Chunwei 已提交
362 363
  return this;
}
C
chengduo 已提交
364 365 366

PDNode *PDNode::assert_is_op(const std::string &op_type) {
  asserts_.emplace_back([op_type](Node *x) {
Y
Yan Chunwei 已提交
367 368 369 370
    return x && x->IsOp() && x->Op()->Type() == op_type;
  });
  return this;
}
C
chengduo 已提交
371 372 373 374 375 376 377 378

PDNode *PDNode::assert_is_var() {
  asserts_.emplace_back([](Node *x) { return x && x->IsVar(); });
  return this;
}

PDNode *PDNode::assert_is_not_ctrl_var() {
  asserts_.emplace_back([](Node *x) { return x && !x->IsCtrlVar(); });
Y
Yan Chunwei 已提交
379 380
  return this;
}
C
chengduo 已提交
381 382

PDNode *PDNode::assert_var_not_persistable() {
Y
Yan Chunwei 已提交
383
  assert_is_var();
C
chengduo 已提交
384
  asserts_.emplace_back([](Node *x) { return !x->Var()->Persistable(); });
Y
Yan Chunwei 已提交
385 386
  return this;
}
C
chengduo 已提交
387 388

PDNode *PDNode::assert_is_persistable_var() {
Y
Yan Chunwei 已提交
389
  assert_is_var();
C
chengduo 已提交
390
  asserts_.emplace_back([=](Node *x) { return x->Var()->Persistable(); });
Y
Yan Chunwei 已提交
391 392
  return this;
}
C
chengduo 已提交
393 394 395

PDNode *PDNode::assert_is_op_nth_input(const std::string &op_type,
                                       const std::string &argument, int nth) {
Y
Yan Chunwei 已提交
396 397
  assert_is_var();
  assert_is_op_input(op_type);
C
chengduo 已提交
398 399
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
400 401 402
      if (op->IsOp() && op->Op()->Type() == op_type &&
          IsNthInput(x, op, argument, nth))
        return true;
Y
Yan Chunwei 已提交
403 404 405 406 407
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
408 409 410

PDNode *PDNode::assert_is_op_nth_output(const std::string &op_type,
                                        const std::string &argument, int nth) {
Y
Yan Chunwei 已提交
411
  assert_is_var();
C
chengduo 已提交
412 413
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
414 415 416
      if (op->IsOp() && op->Op()->Type() == op_type &&
          IsNthOutput(x, op, argument, nth))
        return true;
Y
Yan Chunwei 已提交
417 418 419 420 421
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
422 423

PDNode *PDNode::assert_is_only_input_of_op(const std::string &op_type) {
Y
Yan Chunwei 已提交
424
  assert_is_var();
C
chengduo 已提交
425 426
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
Y
Yan Chunwei 已提交
427 428 429 430 431 432 433 434 435
      if (op && op->IsOp() && op->Op() && op->Op()->Type() == op_type &&
          op->inputs.size() == 1) {
        return true;
      }
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
436 437

PDNode *PDNode::assert_is_only_output_of_op(const std::string &op_type) {
Y
Yan Chunwei 已提交
438
  assert_is_var();
C
chengduo 已提交
439 440
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
Y
Yan Chunwei 已提交
441 442 443 444 445 446 447 448 449
      if (op && op->IsOp() && op->Op() && op->Op()->Type() == op_type &&
          op->outputs.size() == 1) {
        return true;
      }
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
450 451

PDNode *PDNode::assert_is_op_output(const std::string &op_type) {
Y
Yan Chunwei 已提交
452
  assert_is_var();
C
chengduo 已提交
453 454
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
Y
Yan Chunwei 已提交
455 456 457 458 459 460 461 462
      if (op && op->IsOp() && op->Op() && op->Op()->Type() == op_type) {
        return true;
      }
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
463 464 465

PDNode *PDNode::assert_is_op_output(const std::string &op_type,
                                    const std::string &argument) {
466 467 468 469
  assert_is_var();
  assert_is_op_nth_output(op_type, argument, 0);
  return this;
}
C
chengduo 已提交
470
PDNode *PDNode::assert_is_op_input(const std::string &op_type) {
Y
Yan Chunwei 已提交
471
  assert_is_var();
C
chengduo 已提交
472 473
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
Y
Yan Chunwei 已提交
474 475 476 477 478 479 480 481
      if (op && op->IsOp() && op->Op() && op->Op()->Type() == op_type) {
        return true;
      }
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
482 483 484

PDNode *PDNode::assert_is_op_input(const std::string &op_type,
                                   const std::string &argument) {
485 486 487 488
  assert_is_var();
  assert_is_op_nth_input(op_type, argument, 0);
  return this;
}
C
chengduo 已提交
489 490

PDNode *PDNode::assert_op_has_n_inputs(const std::string &op_type, size_t n) {
Y
Yan Chunwei 已提交
491
  assert_is_op(op_type);
C
chengduo 已提交
492
  asserts_.emplace_back([=](Node *x) { return x->inputs.size() == n; });
Y
Yan Chunwei 已提交
493 494
  return this;
}
C
chengduo 已提交
495 496

PDNode *PDNode::assert_op_has_n_outputs(const std::string &op_type, size_t n) {
Y
Yan Chunwei 已提交
497
  assert_is_op(op_type);
C
chengduo 已提交
498
  asserts_.emplace_back([=](Node *x) { return x->outputs.size() == n; });
Y
Yan Chunwei 已提交
499 500
  return this;
}
C
chengduo 已提交
501 502

PDNode *PDNode::assert_more(PDNode::teller_t &&teller) {
Y
Yan Chunwei 已提交
503 504 505 506
  asserts_.emplace_back(std::move(teller));
  return this;
}

C
chengduo 已提交
507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589
PDNode *PDNode::assert_is_ops(const std::unordered_set<std::string> &op_types) {
  asserts_.emplace_back([op_types](Node *x) {
    return x && x->IsOp() && op_types.count(x->Op()->Type());
  });
  return this;
}

PDNode *PDNode::assert_is_ops_nth_input(
    const std::unordered_set<std::string> &op_types,
    const std::string &argument, int nth) {
  assert_is_var();
  assert_is_ops_input(op_types);
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
      if (op->IsOp() && op_types.count(op->Op()->Type()) &&
          IsNthInput(x, op, argument, nth))
        return true;
    }
    return false;
  });
  return this;
}

PDNode *PDNode::assert_is_ops_nth_output(
    const std::unordered_set<std::string> &op_types,
    const std::string &argument, int nth) {
  assert_is_var();
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
      if (op->IsOp() && op_types.count(op->Op()->Type()) &&
          IsNthOutput(x, op, argument, nth))
        return true;
    }
    return false;
  });
  return this;
}
PDNode *PDNode::assert_is_ops_output(
    const std::unordered_set<std::string> &op_types) {
  assert_is_var();
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
      if (op && op->IsOp() && op->Op() && op_types.count(op->Op()->Type())) {
        return true;
      }
    }
    return false;
  });
  return this;
}

PDNode *PDNode::assert_is_ops_output(
    const std::unordered_set<std::string> &op_types,
    const std::string &argument) {
  assert_is_var();
  assert_is_ops_nth_output(op_types, argument, 0);
  return this;
}

PDNode *PDNode::assert_is_ops_input(
    const std::unordered_set<std::string> &op_types) {
  assert_is_var();
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
      if (op && op->IsOp() && op->Op() && op_types.count(op->Op()->Type())) {
        return true;
      }
    }
    return false;
  });
  return this;
}

PDNode *PDNode::assert_is_ops_input(
    const std::unordered_set<std::string> &op_types,
    const std::string &argument) {
  assert_is_var();
  assert_is_ops_nth_input(op_types, argument, 0);
  return this;
}

bool VarLinksToOp(Node *node, const std::string &op_type) {
  for (auto *out : node->outputs) {
590 591 592 593 594 595
    if (out->IsOp() && out->Op()->Type() == op_type) {
      return true;
    }
  }
  return false;
}
C
chengduo 已提交
596 597

bool IsNthInput(Node *var, Node *op, const std::string &argument, size_t nth) {
598 599 600 601 602
  PADDLE_ENFORCE(var->IsVar());
  PADDLE_ENFORCE(op->IsOp());
  if (op->Op()->Input(argument).size() <= nth) return false;
  return var->Name() == op->Op()->Input(argument)[nth];
}
C
chengduo 已提交
603 604

bool IsNthOutput(Node *var, Node *op, const std::string &argument, size_t nth) {
605 606 607 608 609
  PADDLE_ENFORCE(var->IsVar());
  PADDLE_ENFORCE(op->IsOp());
  if (op->Op()->Output(argument).size() <= nth) return false;
  return var->Name() == op->Op()->Output(argument)[nth];
}
C
chengduo 已提交
610 611 612 613 614

void GraphSafeRemoveNodes(Graph *graph,
                          const std::unordered_set<const Node *> &nodes) {
  for (auto *node : nodes) {
    graph->RemoveNode(const_cast<Node *>(node));
615 616
  }

C
chengduo 已提交
617
  for (auto *node : graph->Nodes()) {
618 619
    for (auto it = node->inputs.begin(); it != node->inputs.end();) {
      if (nodes.count(*it)) {
C
chengduo 已提交
620
        it = const_cast<Node *>(node)->inputs.erase(it);
621
      } else {
622
        it++;
623
      }
624 625 626
    }
    for (auto it = node->outputs.begin(); it != node->outputs.end();) {
      if (nodes.count(*it)) {
C
chengduo 已提交
627
        it = const_cast<Node *>(node)->outputs.erase(it);
628
      } else {
629
        it++;
630
      }
631 632 633
    }
  }
}
C
chengduo 已提交
634 635 636

bool VarLinksFromOp(Node *node, const std::string &op_type) {
  for (auto *out : node->inputs) {
637 638 639 640 641 642 643
    if (out->IsOp() && out->Op()->Type() == op_type) {
      return true;
    }
  }
  return false;
}

S
Sylwester Fraczek 已提交
644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749
PDNode *patterns::ConvBN::operator()(paddle::framework::ir::PDNode *conv_input,
                                     bool with_eltwise_add) {
  // Create Operators
  conv_input->assert_is_op_input("conv2d", "Input");
  auto *conv_op = pattern->NewNode(conv_repr())->assert_is_op("conv2d");

  PDNode *eltwise_op = nullptr;
  if (with_eltwise_add) {
    eltwise_op =
        pattern->NewNode(eltwise_repr())->assert_is_op("elementwise_add");
  }
  auto *batch_norm_op =
      pattern->NewNode(batch_norm_repr())->assert_is_op("batch_norm");
  // Create variables
  // Conv Filter
  auto *conv_weight_var = pattern->NewNode(conv_weight_repr())
                              ->AsInput()
                              ->assert_is_persistable_var()
                              ->assert_is_op_input("conv2d", "Filter");

  auto *conv_out_var = pattern->NewNode(conv_out_repr())
                           ->AsIntermediate()
                           ->assert_is_only_output_of_op("conv2d");

  PDNode *eltwise_y_in_var = nullptr;
  PDNode *eltwise_out_var = nullptr;
  if (with_eltwise_add) {
    // Conv output as Bias input
    conv_out_var->assert_is_op_input("elementwise_add", "X");
    // Bias
    eltwise_y_in_var = pattern->NewNode(eltwise_y_in_repr())
                           ->assert_is_op_input("elementwise_add", "Y")
                           ->AsInput();
    eltwise_out_var = pattern->NewNode(eltwise_out_repr())
                          ->AsIntermediate()
                          ->assert_is_only_output_of_op("elementwise_add");
  } else {
    // Conv output as BN input
    conv_out_var->assert_is_op_input("batch_norm", "X");
  }

  // BN Scale
  auto *bn_scale_var = pattern->NewNode(bn_scale_repr())
                           ->AsInput()
                           ->assert_is_persistable_var()
                           ->assert_is_op_input("batch_norm", "Scale");
  // BN Bias
  auto *bn_bias_var = pattern->NewNode(bn_bias_repr())
                          ->AsInput()
                          ->assert_is_persistable_var()
                          ->assert_is_op_input("batch_norm", "Bias");
  // BN Mean
  auto *bn_mean_var = pattern->NewNode(bn_mean_repr())
                          ->AsInput()
                          ->assert_is_persistable_var()
                          ->assert_is_op_input("batch_norm", "Mean");
  // BN Variance
  auto *bn_variance_var = pattern->NewNode(bn_variance_repr())
                              ->AsInput()
                              ->assert_is_persistable_var()
                              ->assert_is_op_input("batch_norm", "Variance");

  // BN output
  auto *bn_out_var = pattern->NewNode(bn_out_repr())
                         ->AsOutput()
                         ->assert_is_op_output("batch_norm");

  auto *bn_mean_out_var = pattern->NewNode(bn_mean_out_repr())
                              ->AsOutput()
                              ->assert_is_op_output("batch_norm", "MeanOut");

  auto *bn_variance_out_var =
      pattern->NewNode(bn_variance_out_repr())
          ->AsOutput()
          ->assert_is_op_output("batch_norm", "VarianceOut");

  auto *bn_saved_mean_var =
      pattern->NewNode(bn_saved_mean_repr())
          ->AsOutput()
          ->assert_is_op_output("batch_norm", "SavedMean");

  auto *bn_saved_variance_var =
      pattern->NewNode(bn_saved_variance_repr())
          ->AsOutput()
          ->assert_is_op_output("batch_norm", "SavedVariance");

  conv_op->LinksFrom({conv_input, conv_weight_var}).LinksTo({conv_out_var});

  if (with_eltwise_add) {
    eltwise_op->LinksFrom({conv_out_var, eltwise_y_in_var})
        .LinksTo({eltwise_out_var});
    batch_norm_op
        ->LinksFrom({eltwise_out_var, bn_scale_var, bn_bias_var, bn_mean_var,
                     bn_variance_var})
        .LinksTo({bn_out_var, bn_mean_out_var, bn_variance_out_var,
                  bn_saved_mean_var, bn_saved_variance_var});
  } else {
    batch_norm_op
        ->LinksFrom({conv_out_var, bn_scale_var, bn_bias_var, bn_mean_var,
                     bn_variance_var})
        .LinksTo({bn_out_var, bn_mean_out_var, bn_variance_out_var,
                  bn_saved_mean_var, bn_saved_variance_var});
  }
  return bn_out_var;
}

C
chengduo 已提交
750 751
PDNode *patterns::ConvReLU::operator()(
    paddle::framework::ir::PDNode *conv_input) {
752 753
  // Create Operators
  conv_input->assert_is_op_input("conv2d", "Input");
C
chengduo 已提交
754 755
  auto *conv_op = pattern->NewNode(conv_repr())->assert_is_op("conv2d");
  auto *relu_op = pattern->NewNode(relu_repr())->assert_is_op("relu");
756 757
  // Create variables
  // Filter
C
chengduo 已提交
758
  auto *conv_weight_var = pattern->NewNode(conv_weight_repr())
759 760 761 762
                              ->AsInput()
                              ->assert_is_persistable_var()
                              ->assert_is_op_input("conv2d", "Filter");
  // intermediate variable, will be removed in the IR after fuse.
C
chengduo 已提交
763
  auto *conv_out_var = pattern->NewNode(conv_out_repr())
764 765 766 767
                           ->AsIntermediate()
                           ->assert_is_only_output_of_op("conv2d")
                           ->assert_is_op_input("relu");
  // output
C
chengduo 已提交
768
  auto *relu_out_var = pattern->NewNode(relu_out_repr())
769 770 771
                           ->AsOutput()
                           ->assert_is_op_output("relu");

772
  conv_op->LinksFrom({conv_input, conv_weight_var}).LinksTo({conv_out_var});
773 774 775 776
  relu_op->LinksFrom({conv_out_var}).LinksTo({relu_out_var});
  return relu_out_var;
}

T
tensor-tang 已提交
777 778 779 780
PDNode *patterns::SeqConvEltAddRelu::operator()(
    paddle::framework::ir::PDNode *seqconv_input) {
  // Create Operators
  seqconv_input->assert_is_op_input("sequence_conv", "X");
T
tensor-tang 已提交
781 782 783 784
  auto *seqconv_op = pattern->NewNode(seqconv_repr())
                         ->assert_is_op("sequence_conv")
                         ->assert_op_attr<bool>("paddingTrainable", false)
                         ->assert_op_attr<int>("contextStride", 1);
T
tensor-tang 已提交
785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821

  auto *eltadd_op =
      pattern->NewNode(eltadd_repr())->assert_is_op("elementwise_add");
  auto *relu_op = pattern->NewNode(relu_repr())->assert_is_op("relu");
  // Create variables
  // Filter
  auto *seqconv_weight_var =
      pattern->NewNode(seqconv_weight_repr())
          ->AsInput()
          ->assert_is_persistable_var()
          ->assert_is_op_input("sequence_conv", "Filter");
  // Bias
  auto *eltadd_bias_var = pattern->NewNode(eltadd_bias_repr())
                              ->AsInput()
                              ->assert_is_op_input("elementwise_add");
  // intermediate variable, will be removed in the IR after fuse.
  auto *seqconv_out_var = pattern->NewNode(seqconv_out_repr())
                              ->AsIntermediate()
                              ->assert_is_only_output_of_op("sequence_conv")
                              ->assert_is_op_input("elementwise_add");
  auto *eltadd_out_var = pattern->NewNode(eltadd_out_repr())
                             ->AsIntermediate()
                             ->assert_is_only_output_of_op("elementwise_add")
                             ->assert_is_only_input_of_op("relu");
  // output
  auto *relu_out_var = pattern->NewNode(relu_out_repr())
                           ->AsOutput()
                           ->assert_is_op_output("relu");

  seqconv_op->LinksFrom({seqconv_input, seqconv_weight_var})
      .LinksTo({seqconv_out_var});
  eltadd_op->LinksFrom({seqconv_out_var, eltadd_bias_var})
      .LinksTo({eltadd_out_var});
  relu_op->LinksFrom({eltadd_out_var}).LinksTo({relu_out_var});
  return relu_out_var;
}

C
chengduo 已提交
822
PDNode *patterns::FC::operator()(paddle::framework::ir::PDNode *x,
Y
Yan Chunwei 已提交
823 824 825
                                 bool with_bias) {
  // Create shared nodes.
  x->assert_is_op_input("mul", "X");
C
chengduo 已提交
826
  auto *mul = pattern->NewNode(mul_repr())->assert_is_op("mul");
Y
Yan Chunwei 已提交
827

C
chengduo 已提交
828
  auto *mul_w_var = pattern->NewNode(w_repr())
Y
Yan Chunwei 已提交
829 830 831 832
                        ->AsInput()
                        ->assert_is_persistable_var()
                        ->assert_is_op_input("mul", "Y");

C
chengduo 已提交
833
  auto *mul_out_var =
Y
Yan Chunwei 已提交
834 835 836 837 838 839 840 841 842 843
      pattern->NewNode(mul_out_repr())->assert_is_op_output("mul");

  if (!with_bias) {  // not with bias
    // Add links.
    mul->LinksFrom({x, mul_w_var}).LinksTo({mul_out_var});
    return mul_out_var;

  } else {  // with bias
    mul_out_var->AsIntermediate()->assert_is_op_input("elementwise_add");
    // Create operators.
C
chengduo 已提交
844
    auto *elementwise_add = pattern->NewNode(elementwise_add_repr())
Y
Yan Chunwei 已提交
845 846
                                ->assert_is_op("elementwise_add");
    // Create variables.
C
chengduo 已提交
847
    auto *bias = pattern->NewNode(bias_repr())
Y
Yan Chunwei 已提交
848 849 850
                     ->assert_is_op_input("elementwise_add")
                     ->AsInput();

C
chengduo 已提交
851
    auto *fc_out = pattern->NewNode(Out_repr())
Y
Yan Chunwei 已提交
852 853 854 855 856 857
                       ->AsOutput()
                       ->assert_is_op_output("elementwise_add");

    mul->LinksFrom({mul_w_var, x}).LinksTo({mul_out_var});
    elementwise_add->LinksFrom({mul_out_var, bias}).LinksTo({fc_out});
    return fc_out;
858 859
  }
}
T
tensor-tang 已提交
860

861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878
PDNode *patterns::Embedding::operator()(PDNode *x) {
  x->assert_is_op_input("lookup_table", "Ids");
  auto *lookup_table_op =
      pattern->NewNode(lookup_table_repr())->assert_is_op("lookup_table");
#define NEW_NODE(arg__, io__)                    \
  auto *arg__ = pattern->NewNode(arg__##_repr()) \
                    ->assert_is_op_##io__("lookup_table", #arg__);

  NEW_NODE(W, input);

  NEW_NODE(Out, output);
#undef NEW_NODE

  lookup_table_op->LinksFrom({x, W});
  lookup_table_op->LinksTo({Out});
  return Out;
}

C
chengduo 已提交
879
PDNode *patterns::LSTM::operator()(PDNode *x) {
880
  x->assert_is_op_input("lstm", "Input");
C
chengduo 已提交
881
  auto *lstm_op = pattern->NewNode(lstm_repr())->assert_is_op("lstm");
Y
Yan Chunwei 已提交
882
#define NEW_NODE(arg__, io__) \
C
chengduo 已提交
883
  auto *arg__ =               \
Y
Yan Chunwei 已提交
884
      pattern->NewNode(arg__##_repr())->assert_is_op_##io__("lstm", #arg__);
885 886 887 888 889

  // Currently, the H0 and C0 are optional
  // TODO(Superjomn) upgrade the fuse framework to support optional.
  // NEW_NODE(H0, input);
  // NEW_NODE(C0, input);
Y
Yan Chunwei 已提交
890 891
  NEW_NODE(Weight, input);
  NEW_NODE(Bias, input);
892

Y
Yan Chunwei 已提交
893 894 895 896 897
  NEW_NODE(Hidden, output);
  NEW_NODE(Cell, output);
  NEW_NODE(BatchGate, output);
  NEW_NODE(BatchCellPreAct, output);
#undef NEW_NODE
898 899 900 901 902

  lstm_op->LinksFrom({x, Weight, Bias});
  lstm_op->LinksTo({Hidden, Cell, BatchGate, BatchCellPreAct});
  return Hidden;
}
T
tensor-tang 已提交
903

C
chengduo 已提交
904
PDNode *patterns::GRU::operator()(PDNode *x) {
T
tensor-tang 已提交
905
  x->assert_is_op_input("gru", "Input");
C
chengduo 已提交
906
  auto *gru_op = pattern->NewNode(gru_repr())->assert_is_op("gru");
Y
Yan Chunwei 已提交
907
#define NEW_NODE(arg__, io__) \
C
chengduo 已提交
908
  auto *arg__ =               \
Y
Yan Chunwei 已提交
909
      pattern->NewNode(arg__##_repr())->assert_is_op_##io__("gru", #arg__);
T
tensor-tang 已提交
910

Y
Yan Chunwei 已提交
911
  NEW_NODE(Weight, input);
T
tensor-tang 已提交
912 913
  // TODO(Superjomn): upgrade the fuse framework to support optional.
  // H0 and bias are optional
Y
Yan Chunwei 已提交
914
  NEW_NODE(Bias, input);  // also optional
T
tensor-tang 已提交
915 916
  // NEW_NODE(H0, input);

Y
Yan Chunwei 已提交
917
  NEW_NODE(Hidden, output);
T
tensor-tang 已提交
918
  // below are intermediate
Y
Yan Chunwei 已提交
919 920 921 922
  NEW_NODE(BatchGate, output);
  NEW_NODE(BatchResetHiddenPrev, output);
  NEW_NODE(BatchHidden, output);
#undef NEW_NODE
T
tensor-tang 已提交
923

T
tensor-tang 已提交
924 925 926 927
  BatchGate->AsIntermediate();
  BatchResetHiddenPrev->AsIntermediate();
  BatchHidden->AsIntermediate();

T
tensor-tang 已提交
928 929 930 931 932
  gru_op->LinksFrom({x, Weight, Bias});
  gru_op->LinksTo({Hidden, BatchGate, BatchResetHiddenPrev, BatchHidden});
  return Hidden;
}

C
chengduo 已提交
933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026
PDNode *patterns::ActElewiseAdd::operator()(
    paddle::framework::ir::PDNode *in_var,
    std::unordered_set<std::string> act_types) {
  in_var->assert_is_ops_input(act_types, "X");

  auto *act = pattern->NewNode(act_repr())->assert_is_ops(act_types);
  auto *act_out_var = pattern->NewNode(act_out_repr())
                          ->assert_is_not_ctrl_var()
                          ->assert_is_ops_output(act_types);
  act_out_var->AsIntermediate()->assert_is_op_input("elementwise_add");

  auto *ele_x_var = pattern->NewNode(ele_x_repr())
                        ->assert_is_not_ctrl_var()
                        ->assert_is_op_input("elementwise_add")
                        ->AsInput();
  auto *elementwise_add =
      pattern->NewNode(ele_add_repr())->assert_is_op("elementwise_add");

  auto *elewise_add_out = pattern->NewNode(elewise_add_out_repr())
                              ->AsOutput()
                              ->assert_is_op_output("elementwise_add", "Out");

  act->LinksFrom({in_var}).LinksTo({act_out_var});
  elementwise_add->LinksFrom({act_out_var, ele_x_var})
      .LinksTo({elewise_add_out});

  return elewise_add_out;
}

PDNode *patterns::ElewiseAddAct::operator()(
    paddle::framework::ir::PDNode *ele_x_var,
    std::unordered_set<std::string> act_types) {
  auto *ele_y_var = pattern->NewNode(ele_y_repr())
                        ->assert_is_op_input("elementwise_add", "Y");

  auto *ele_add =
      pattern->NewNode(ele_add_repr())->assert_is_op("elementwise_add");

  auto *ele_out_var = pattern->NewNode(elewise_add_out_repr())
                          ->assert_is_op_output("elementwise_add", "Out");

  ele_out_var->AsIntermediate()->assert_is_ops_input(act_types);

  auto *act = pattern->NewNode(act_repr())->assert_is_ops(act_types);

  auto *act_out_var =
      pattern->NewNode(act_out_repr())->assert_is_ops_output(act_types, "Out");

  ele_add->LinksFrom({ele_x_var, ele_y_var}).LinksTo({ele_out_var});
  act->LinksFrom({ele_out_var}).LinksTo({act_out_var});

  return act_out_var;
}

PDNode *patterns::ElewiseAddActInplaceGrad::operator()(
    paddle::framework::ir::PDNode *d_act_out_var,
    std::unordered_set<std::string> act_types) {
  // act_grad: in["Out", "Out@GRAD"], out["X@GRAD"]
  // ele_add_grad: in["Y", "Out@GRAD"], out["X@GRAD", "Y@GRAD"]
  auto *act_grad = pattern->NewNode(act_grad_repr())->assert_is_ops(act_types);

  auto *act_out_var =
      pattern->NewNode(act_out_repr())->assert_is_ops_input(act_types, "Out");

  auto *d_intermediate_var =
      pattern->NewNode(d_itermediate_out_repr())
          ->assert_is_ops_output(act_types, GradVarName("X"));

  act_grad->LinksFrom({d_act_out_var, act_out_var})
      .LinksTo({d_intermediate_var});

  auto *ele_y_var = pattern->NewNode(ele_y_repr())
                        ->assert_is_not_ctrl_var()
                        ->assert_is_op_input("elementwise_add_grad", "Y");

  auto *ele_add_grad = pattern->NewNode(ele_add_grad_repr())
                           ->assert_is_op("elementwise_add_grad");

  auto *d_ele_x_var =
      pattern->NewNode(d_ele_x_repr())
          ->assert_is_not_ctrl_var()
          ->assert_is_op_output("elementwise_add_grad", GradVarName("X"));

  auto *d_ele_y_var =
      pattern->NewNode(d_ele_y_repr())
          ->assert_is_not_ctrl_var()
          ->assert_is_op_output("elementwise_add_grad", GradVarName("Y"));

  ele_add_grad->LinksFrom({d_intermediate_var, ele_y_var})
      .LinksTo({d_ele_x_var, d_ele_y_var});

  return ele_add_grad;
}

M
Michal Gallus 已提交
1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047
PDNode *patterns::ConvBias::operator()(
    paddle::framework::ir::PDNode *conv_input) {
  // Create Operators
  conv_input->assert_is_op_input("conv2d", "Input");
  auto *conv_op = pattern->NewNode(conv_repr())->assert_is_op("conv2d");
  auto *eltiwse_op =
      pattern->NewNode(eltwise_repr())->assert_is_op("elementwise_add");
  // Create variables
  // Filter
  auto *conv_weight_var = pattern->NewNode(conv_weight_repr())
                              ->AsInput()
                              ->assert_is_persistable_var()
                              ->assert_is_op_input("conv2d", "Filter");
  // intermediate variable, will be removed in the IR after fuse.
  auto *conv_out_var = pattern->NewNode(conv_out_repr())
                           ->AsIntermediate()
                           ->assert_is_only_output_of_op("conv2d")
                           ->assert_is_op_input("elementwise_add");
  // Bias stored in elementwise_add
  auto *eltwise_bias_var = pattern->NewNode(eltwise_bias_repr())
                               ->AsInput()
M
Michal Gallus 已提交
1048
                               ->assert_is_persistable_var()
M
Michal Gallus 已提交
1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059
                               ->assert_is_op_input("elementwise_add", "Y");
  // output
  auto *eltwise_out_var = pattern->NewNode(eltwise_out_repr())
                              ->AsOutput()
                              ->assert_is_op_output("elementwise_add");
  conv_op->LinksFrom({conv_input, conv_weight_var}).LinksTo({conv_out_var});
  eltiwse_op->LinksFrom({conv_out_var, eltwise_bias_var})
      .LinksTo({eltwise_out_var});
  return eltwise_out_var;
}

1060 1061 1062 1063
PDNode *patterns::Conv::operator()() {
  auto conv_op = pattern->NewNode(conv_op_repr())->assert_is_op("conv2d");

  auto input_var = pattern->NewNode(conv_input_repr())
1064
                       ->AsInput()
1065 1066 1067
                       ->assert_is_op_input("conv2d", "Input");

  auto filter_var = pattern->NewNode(conv_filter_repr())
1068
                        ->AsInput()
1069 1070 1071
                        ->assert_is_op_input("conv2d", "Filter");

  auto output_var = pattern->NewNode(conv_output_repr())
1072
                        ->AsOutput()
1073 1074
                        ->assert_is_op_output("conv2d", "Output");

1075
  conv_op->LinksFrom({input_var, filter_var});
1076 1077 1078 1079 1080
  conv_op->LinksTo({output_var});

  return output_var;
}

1081
PDNode *patterns::ElementwiseAdd::operator()(PDNode *x_var) {
1082 1083 1084
  auto elementwise_add_op = pattern->NewNode(elementwise_add_op_repr())
                                ->assert_is_op("elementwise_add");

1085
  x_var->assert_is_op_input("elementwise_add", "X");
1086

1087 1088 1089
  auto y_var = pattern->NewNode(elementwise_add_x_repr())
                   ->AsInput()
                   ->assert_is_op_input("elementwise_add", "Y");
1090 1091 1092 1093 1094

  auto out_var = pattern->NewNode(elementwise_add_out_repr())
                     ->AsOutput()
                     ->assert_is_op_output("elementwise_add", "Out");

1095
  elementwise_add_op->LinksFrom({x_var, y_var});
1096 1097 1098 1099
  elementwise_add_op->LinksTo({out_var});

  return out_var;
}
1100 1101 1102
}  // namespace ir
}  // namespace framework
}  // namespace paddle