graph_pattern_detector.cc 35.6 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

15
#include <algorithm>
Q
Qiao Longfei 已提交
16
#include <array>
17 18 19 20
#include <string>
#include <vector>

#include "paddle/fluid/framework/ir/graph_helper.h"
21
#include "paddle/fluid/framework/ir/graph_pattern_detector.h"
22
#include "paddle/fluid/framework/ir/graph_traits.h"
23
#include "paddle/fluid/framework/ir/graph_viz_pass.h"
C
chengduo 已提交
24
#include "paddle/fluid/framework/operator.h"
25
#include "paddle/fluid/platform/enforce.h"
Y
Yan Chunwei 已提交
26
#include "paddle/fluid/string/pretty_log.h"
Y
Yan Chunwei 已提交
27
#include "paddle/fluid/string/printf.h"
28 29 30 31
namespace paddle {
namespace framework {
namespace ir {

Y
Yan Chunwei 已提交
32 33 34 35
using string::PrettyLogEndl;
using string::PrettyLog;
using string::Style;

36 37
size_t PDPattern::id_ = 0UL;

C
chengduo 已提交
38
PDNode *PDPattern::NewNode(const std::string &name) {
Y
Yan Chunwei 已提交
39 40 41 42 43 44 45
  if (!name.empty()) {
    PADDLE_ENFORCE_EQ(node_map_.count(name), 0,
                      "PDNode's name should be unique, get duplicate [%s]",
                      name);
  }

  nodes_.emplace_back(new PDNode(this, name));
C
chengduo 已提交
46
  auto *cur = nodes_.back().get();
Y
Yan Chunwei 已提交
47 48 49 50
  node_map_[name] = cur;
  return cur;
}

C
chengduo 已提交
51
PDNode *PDPattern::NewNode(PDNode::teller_t &&teller, const std::string &name) {
52 53 54 55 56 57
  if (!name.empty()) {
    PADDLE_ENFORCE_EQ(node_map_.count(name), 0,
                      "PDNode's name should be unique, get duplicate [%s]",
                      name);
  }

58
  nodes_.emplace_back(new PDNode(std::move(teller), this, name));
C
chengduo 已提交
59
  auto *cur = nodes_.back().get();
60
  node_map_[name] = cur;
61 62 63
  return cur;
}

C
chengduo 已提交
64
PDNode *PDPattern::RetrieveNode(const std::string &id) const {
65 66 67 68 69 70 71 72
  auto it = node_map_.find(id);
  if (it == node_map_.end()) {
    return nullptr;
  }

  return it->second;
}

C
chengduo 已提交
73
void PDPattern::AddEdge(PDNode *a, PDNode *b) {
74 75 76 77 78 79
  PADDLE_ENFORCE(a);
  PADDLE_ENFORCE(b);
  PADDLE_ENFORCE(a != b, "can't connect to the same nodes.");
  edges_.emplace_back(a, b);
}

C
chengduo 已提交
80
void GraphPatternDetector::operator()(Graph *graph,
81
                                      GraphPatternDetector::handle_t handler) {
82 83 84 85
  if (!MarkPDNodesInGraph(*graph)) {
    return;
  }

86 87 88
  auto subgraphs = DetectPatterns();
  UniquePatterns(&subgraphs);
  RemoveOverlappedMatch(&subgraphs);
Y
Yan Chunwei 已提交
89
  ValidateByNodeRole(&subgraphs);
90

Y
Yan Chunwei 已提交
91
  if (subgraphs.empty()) return;
Y
Yan Chunwei 已提交
92
  PrettyLogEndl(Style::detail(), "---  detect %d subgraphs", subgraphs.size());
93
  int id = 0;
C
chengduo 已提交
94
  for (auto &g : subgraphs) {
95
    VLOG(30) << "optimizing #" << id++ << " subgraph";
96 97 98 99
    handler(g, graph);
  }
}

C
chengduo 已提交
100
bool GraphPatternDetector::MarkPDNodesInGraph(const ir::Graph &graph) {
101
  VLOG(30) << "mark pdnodes in graph";
102 103
  if (graph.Nodes().empty()) return false;

C
chengduo 已提交
104 105
  for (auto &node : GraphTraits::DFS(graph)) {
    for (const auto &pdnode : pattern_.nodes()) {
106
      if (pdnode->Tell(&node)) {
107
        VLOG(40) << "pdnode " << pdnode->name() << " marked";
108 109 110 111
        pdnodes2nodes_[pdnode.get()].insert(&node);
      }
    }
  }
Y
Yan Chunwei 已提交
112
  // Check to early stop if some PDNode can't find matched Node.
C
chengduo 已提交
113
  for (auto &pdnode : pattern_.nodes()) {
Y
Yan Chunwei 已提交
114
    if (!pdnodes2nodes_.count(pdnode.get())) {
115
      VLOG(40) << pdnode->name() << " can't find matched Node, early stop";
Y
Yan Chunwei 已提交
116
      // return false;
Y
Yan Chunwei 已提交
117 118
    }
  }
C
chengduo 已提交
119 120 121
  for (auto &item : pdnodes2nodes_) {
    for (auto &n : item.second) {
      GetMarkedNodes(const_cast<Graph *>(&graph)).insert(n);
Y
Yan Chunwei 已提交
122 123
    }
  }
124
  VLOG(30) << pdnodes2nodes_.size() << " nodes marked";
125

126 127 128
  return !pdnodes2nodes_.empty();
}

Y
Yan Chunwei 已提交
129 130 131
// The intermediate Nodes can only link to the nodes inside the pattern, or this
// subgraph will be droped.
void GraphPatternDetector::ValidateByNodeRole(
C
chengduo 已提交
132
    std::vector<GraphPatternDetector::subgraph_t> *subgraphs) {
Y
Yan Chunwei 已提交
133 134 135 136 137
  std::vector<GraphPatternDetector::subgraph_t> result;

  subgraphs->erase(
      std::remove_if(
          subgraphs->begin(), subgraphs->end(),
C
chengduo 已提交
138
          [](const GraphPatternDetector::subgraph_t &subgraph) -> bool {
Y
Yan Chunwei 已提交
139
            // Collect the inputs and outputs.
C
chengduo 已提交
140 141
            std::unordered_set<Node *> ios;
            for (auto &item : subgraph) {
Y
Yan Chunwei 已提交
142 143 144 145
              if (!item.first->IsIntermediate()) {
                ios.insert(item.second);
              }
            }
C
chengduo 已提交
146
            for (auto &item : subgraph) {
Y
Yan Chunwei 已提交
147
              if (item.first->IsIntermediate()) {
C
chengduo 已提交
148
                for (auto *x : item.second->inputs) {
Y
Yan Chunwei 已提交
149 150 151 152
                  if (!ios.count(x)) {
                    return true;
                  }
                }
C
chengduo 已提交
153
                for (auto *x : item.second->outputs) {
Y
Yan Chunwei 已提交
154 155 156 157 158 159 160 161 162 163 164
                  if (!ios.count(x)) {
                    return true;
                  }
                }
              }
            }
            return false;
          }),
      subgraphs->end());
}

165
struct HitGroup {
C
chengduo 已提交
166
  std::unordered_map<PDNode *, Node *> roles;
167

C
chengduo 已提交
168
  bool Match(Node *node, PDNode *pat) {
169 170 171 172
    if (nodes_.count(node)) {
      if (!roles.count(pat)) return false;
      return roles[pat] == node;
    }
173 174 175
    return !roles.count(pat) || roles.at(pat) == node;
  }

C
chengduo 已提交
176
  void Register(Node *node, PDNode *pat) {
177 178 179 180 181
    roles[pat] = node;
    nodes_.insert(node);
  }

 private:
C
chengduo 已提交
182
  std::unordered_set<Node *> nodes_;
183 184 185
};

// Tell whether Node a links to b.
C
chengduo 已提交
186 187
bool IsNodesLink(Node *a, Node *b) {
  for (auto *node : a->outputs) {
188 189 190 191 192 193 194
    if (b == node) {
      return true;
    }
  }
  return false;
}

195 196
std::vector<GraphPatternDetector::subgraph_t>
GraphPatternDetector::DetectPatterns() {
197
  // Init empty subgraphs.
198
  std::vector<GraphPatternDetector::subgraph_t> result;
199
  std::vector<HitGroup> init_groups;
200 201
  std::array<std::vector<HitGroup>, 2> bi_records;
  // PADDLE_ENFORCE(!pattern_.edges().empty(), "At least one edge is needed");
C
chengduo 已提交
202
  auto *first_pnode = pattern_.edges().empty() ? pattern().nodes().front().get()
203
                                               : pattern_.edges().front().first;
204
  if (!pdnodes2nodes_.count(first_pnode)) return result;
C
chengduo 已提交
205
  for (auto *node : pdnodes2nodes_[first_pnode]) {
206 207 208 209 210 211 212 213 214 215
    HitGroup group;
    group.roles[first_pnode] = node;
    init_groups.emplace_back(group);
  }

  int step = 0;
  bi_records[0] = std::move(init_groups);

  // Extend a PDNode to subgraphs by deducing the connection relations defined
  // in edges of PDNodes.
C
chengduo 已提交
216
  for (const auto &edge : pattern_.edges()) {
217
    VLOG(40) << "check " << edge.first->name() << " -> " << edge.second->name();
Y
Yan Chunwei 已提交
218
    // TODO(Superjomn) Fix bug here, the groups might be duplicate here.
219 220
    // Each role has two PDNodes, which indicates two roles.
    // Detect two Nodes that can match these two roles and they are connected.
C
chengduo 已提交
221 222
    auto &pre_groups = bi_records[step % 2];
    auto &cur_groups = bi_records[1 - (step++ % 2)];
223
    cur_groups.clear();
224
    if (pre_groups.empty()) break;
225
    // source -> target
C
chengduo 已提交
226 227
    for (Node *source : pdnodes2nodes_[edge.first]) {
      for (Node *target : pdnodes2nodes_[edge.second]) {
228
        VLOG(80) << "check " << source->id() << " -- " << target->id();
229
        // TODO(Superjomn) add some prune strategies.
C
chengduo 已提交
230
        for (const auto &group : pre_groups) {
231 232 233 234 235 236 237 238 239 240 241 242 243
          HitGroup new_group = group;
          if (IsNodesLink(source, target) &&
              new_group.Match(source, edge.first)) {
            new_group.Register(source, edge.first);
            if (new_group.Match(target, edge.second)) {
              new_group.Register(target, edge.second);
              cur_groups.push_back(new_group);
              // TODO(Superjomn) need to unique
            }
          }
        }
      }
    }
244
    VLOG(30) << "step " << step << " get records: " << cur_groups.size();
C
chengduo 已提交
245 246
    for (auto &group : cur_groups) {
      for (auto &item : group.roles) {
247 248
        VLOG(40) << "node " << item.second->id() << " as "
                 << item.first->name();
Y
Yan Chunwei 已提交
249
      }
250
      VLOG(40) << "=========================================================";
Y
Yan Chunwei 已提交
251
    }
252 253
  }

C
chengduo 已提交
254
  for (auto &group : bi_records[step % 2]) {
255
    GraphPatternDetector::subgraph_t subgraph;
C
chengduo 已提交
256
    for (auto &role : group.roles) {
257 258 259 260 261 262 263
      subgraph.emplace(role.first, role.second);
    }
    result.emplace_back(subgraph);
  }
  return result;
}

Y
Yan Chunwei 已提交
264 265 266 267 268 269 270 271 272
bool GraphItemCMP(const std::pair<PDNode *, Node *> &a,
                  const std::pair<PDNode *, Node *> &b) {
  if (a.first != b.first) {
    return a.first < b.first;
  } else {
    return a.second < b.second;
  }
}

273 274
// TODO(Superjomn) enhance the function as it marks unique unique as duplicates
// see https://github.com/PaddlePaddle/Paddle/issues/13550
275
void GraphPatternDetector::UniquePatterns(
C
chengduo 已提交
276
    std::vector<GraphPatternDetector::subgraph_t> *subgraphs) {
277
  if (subgraphs->empty()) return;
278
  std::vector<GraphPatternDetector::subgraph_t> result;
279 280

  std::unordered_set<size_t> set;
Y
Yan Chunwei 已提交
281
  std::hash<std::string> hasher;
C
chengduo 已提交
282
  for (auto &g : *subgraphs) {
Y
Yan Chunwei 已提交
283 284 285 286 287 288
    // Sort the items in the sub-graph, and transform to a string key.
    std::vector<std::pair<PDNode *, Node *>> sorted_keys(g.begin(), g.end());
    std::sort(sorted_keys.begin(), sorted_keys.end(), GraphItemCMP);
    std::stringstream ss;
    for (auto &item : sorted_keys) {
      ss << item.first << ":" << item.second;
289
    }
Y
Yan Chunwei 已提交
290
    auto key = hasher(ss.str());
291 292 293 294 295 296 297 298
    if (!set.count(key)) {
      result.emplace_back(g);
      set.insert(key);
    }
  }
  *subgraphs = result;
}

299
void GraphPatternDetector::RemoveOverlappedMatch(
C
chengduo 已提交
300
    std::vector<subgraph_t> *subgraphs) {
301
  std::vector<subgraph_t> result;
C
chengduo 已提交
302
  std::unordered_set<Node *> node_set;
303

C
chengduo 已提交
304
  for (const auto &subgraph : *subgraphs) {
305
    bool valid = true;
C
chengduo 已提交
306
    for (auto &item : subgraph) {
Y
Yan Chunwei 已提交
307
      if (item.first->IsIntermediate() && node_set.count(item.second)) {
308 309 310 311 312
        valid = false;
        break;
      }
    }
    if (valid) {
C
chengduo 已提交
313
      for (auto &item : subgraph) {
314 315 316 317 318 319 320 321
        node_set.insert(item.second);
      }
      result.push_back(subgraph);
    }
  }
  *subgraphs = result;
}

322 323 324 325 326
std::string PDPattern::DotString() const {
  using inference::analysis::Dot;
  Dot dot;
  int id = 0;
  // Create Nodes
C
chengduo 已提交
327 328
  std::unordered_map<PDNode *, std::string> node2dot;
  for (const auto &node : nodes()) {
329 330 331 332 333
    std::string node_id = "Node" + std::to_string(id++);
    dot.AddNode(node_id, {}, node->name());
    node2dot[node.get()] = node_id;
  }
  // Create Edges
C
chengduo 已提交
334
  for (const auto &edge : edges()) {
335 336 337 338
    if (!node2dot.count(edge.first) || !node2dot.count(edge.second)) {
      LOG(ERROR) << "no node " << edge.first << " " << edge.second;
      continue;
    }
C
chengduo 已提交
339 340
    auto &src = node2dot.at(edge.first);
    auto &trg = node2dot.at(edge.second);
341 342 343 344 345
    dot.AddEdge(src, trg, {});
  }
  return dot.Build();
}

C
chengduo 已提交
346
PDNode &PDNode::LinksTo(const std::vector<PDNode *> &others) {
347
  // extend outlinks.
C
chengduo 已提交
348
  for (PDNode *x : others) {
349 350 351 352 353
    pattern_->AddEdge(this, x);
  }
  return *this;
}

C
chengduo 已提交
354
PDNode &PDNode::LinksFrom(const std::vector<PDNode *> &others) {
355
  // extend outlinks.
C
chengduo 已提交
356
  for (PDNode *x : others) {
357 358 359 360 361
    pattern_->AddEdge(x, this);
  }
  return *this;
}

C
chengduo 已提交
362 363
PDNode *PDNode::assert_is_op() {
  asserts_.emplace_back([](Node *x) { return x && x->IsOp(); });
Y
Yan Chunwei 已提交
364 365
  return this;
}
C
chengduo 已提交
366 367 368

PDNode *PDNode::assert_is_op(const std::string &op_type) {
  asserts_.emplace_back([op_type](Node *x) {
Y
Yan Chunwei 已提交
369 370 371 372
    return x && x->IsOp() && x->Op()->Type() == op_type;
  });
  return this;
}
C
chengduo 已提交
373 374 375 376 377 378 379 380

PDNode *PDNode::assert_is_var() {
  asserts_.emplace_back([](Node *x) { return x && x->IsVar(); });
  return this;
}

PDNode *PDNode::assert_is_not_ctrl_var() {
  asserts_.emplace_back([](Node *x) { return x && !x->IsCtrlVar(); });
Y
Yan Chunwei 已提交
381 382
  return this;
}
C
chengduo 已提交
383 384

PDNode *PDNode::assert_var_not_persistable() {
Y
Yan Chunwei 已提交
385
  assert_is_var();
C
chengduo 已提交
386
  asserts_.emplace_back([](Node *x) { return !x->Var()->Persistable(); });
Y
Yan Chunwei 已提交
387 388
  return this;
}
C
chengduo 已提交
389 390

PDNode *PDNode::assert_is_persistable_var() {
Y
Yan Chunwei 已提交
391
  assert_is_var();
C
chengduo 已提交
392
  asserts_.emplace_back([=](Node *x) { return x->Var()->Persistable(); });
Y
Yan Chunwei 已提交
393 394
  return this;
}
C
chengduo 已提交
395 396 397

PDNode *PDNode::assert_is_op_nth_input(const std::string &op_type,
                                       const std::string &argument, int nth) {
Y
Yan Chunwei 已提交
398 399
  assert_is_var();
  assert_is_op_input(op_type);
C
chengduo 已提交
400 401
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
402 403 404
      if (op->IsOp() && op->Op()->Type() == op_type &&
          IsNthInput(x, op, argument, nth))
        return true;
Y
Yan Chunwei 已提交
405 406 407 408 409
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
410 411 412

PDNode *PDNode::assert_is_op_nth_output(const std::string &op_type,
                                        const std::string &argument, int nth) {
Y
Yan Chunwei 已提交
413
  assert_is_var();
C
chengduo 已提交
414 415
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
416 417 418
      if (op->IsOp() && op->Op()->Type() == op_type &&
          IsNthOutput(x, op, argument, nth))
        return true;
Y
Yan Chunwei 已提交
419 420 421 422 423
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
424 425

PDNode *PDNode::assert_is_only_input_of_op(const std::string &op_type) {
Y
Yan Chunwei 已提交
426
  assert_is_var();
C
chengduo 已提交
427 428
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
Y
Yan Chunwei 已提交
429 430 431 432 433 434 435 436 437
      if (op && op->IsOp() && op->Op() && op->Op()->Type() == op_type &&
          op->inputs.size() == 1) {
        return true;
      }
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
438 439

PDNode *PDNode::assert_is_only_output_of_op(const std::string &op_type) {
Y
Yan Chunwei 已提交
440
  assert_is_var();
C
chengduo 已提交
441 442
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
Y
Yan Chunwei 已提交
443 444 445 446 447 448 449 450 451
      if (op && op->IsOp() && op->Op() && op->Op()->Type() == op_type &&
          op->outputs.size() == 1) {
        return true;
      }
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
452 453

PDNode *PDNode::assert_is_op_output(const std::string &op_type) {
Y
Yan Chunwei 已提交
454
  assert_is_var();
C
chengduo 已提交
455 456
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
Y
Yan Chunwei 已提交
457 458 459 460 461 462 463 464
      if (op && op->IsOp() && op->Op() && op->Op()->Type() == op_type) {
        return true;
      }
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
465 466 467

PDNode *PDNode::assert_is_op_output(const std::string &op_type,
                                    const std::string &argument) {
468 469 470 471
  assert_is_var();
  assert_is_op_nth_output(op_type, argument, 0);
  return this;
}
C
chengduo 已提交
472
PDNode *PDNode::assert_is_op_input(const std::string &op_type) {
Y
Yan Chunwei 已提交
473
  assert_is_var();
C
chengduo 已提交
474 475
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
Y
Yan Chunwei 已提交
476 477 478 479 480 481 482 483
      if (op && op->IsOp() && op->Op() && op->Op()->Type() == op_type) {
        return true;
      }
    }
    return false;
  });
  return this;
}
C
chengduo 已提交
484 485 486

PDNode *PDNode::assert_is_op_input(const std::string &op_type,
                                   const std::string &argument) {
487 488 489 490
  assert_is_var();
  assert_is_op_nth_input(op_type, argument, 0);
  return this;
}
C
chengduo 已提交
491 492

PDNode *PDNode::assert_op_has_n_inputs(const std::string &op_type, size_t n) {
Y
Yan Chunwei 已提交
493
  assert_is_op(op_type);
C
chengduo 已提交
494
  asserts_.emplace_back([=](Node *x) { return x->inputs.size() == n; });
Y
Yan Chunwei 已提交
495 496
  return this;
}
C
chengduo 已提交
497 498

PDNode *PDNode::assert_op_has_n_outputs(const std::string &op_type, size_t n) {
Y
Yan Chunwei 已提交
499
  assert_is_op(op_type);
C
chengduo 已提交
500
  asserts_.emplace_back([=](Node *x) { return x->outputs.size() == n; });
Y
Yan Chunwei 已提交
501 502
  return this;
}
C
chengduo 已提交
503 504

PDNode *PDNode::assert_more(PDNode::teller_t &&teller) {
Y
Yan Chunwei 已提交
505 506 507 508
  asserts_.emplace_back(std::move(teller));
  return this;
}

C
chengduo 已提交
509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591
PDNode *PDNode::assert_is_ops(const std::unordered_set<std::string> &op_types) {
  asserts_.emplace_back([op_types](Node *x) {
    return x && x->IsOp() && op_types.count(x->Op()->Type());
  });
  return this;
}

PDNode *PDNode::assert_is_ops_nth_input(
    const std::unordered_set<std::string> &op_types,
    const std::string &argument, int nth) {
  assert_is_var();
  assert_is_ops_input(op_types);
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
      if (op->IsOp() && op_types.count(op->Op()->Type()) &&
          IsNthInput(x, op, argument, nth))
        return true;
    }
    return false;
  });
  return this;
}

PDNode *PDNode::assert_is_ops_nth_output(
    const std::unordered_set<std::string> &op_types,
    const std::string &argument, int nth) {
  assert_is_var();
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
      if (op->IsOp() && op_types.count(op->Op()->Type()) &&
          IsNthOutput(x, op, argument, nth))
        return true;
    }
    return false;
  });
  return this;
}
PDNode *PDNode::assert_is_ops_output(
    const std::unordered_set<std::string> &op_types) {
  assert_is_var();
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->inputs) {
      if (op && op->IsOp() && op->Op() && op_types.count(op->Op()->Type())) {
        return true;
      }
    }
    return false;
  });
  return this;
}

PDNode *PDNode::assert_is_ops_output(
    const std::unordered_set<std::string> &op_types,
    const std::string &argument) {
  assert_is_var();
  assert_is_ops_nth_output(op_types, argument, 0);
  return this;
}

PDNode *PDNode::assert_is_ops_input(
    const std::unordered_set<std::string> &op_types) {
  assert_is_var();
  asserts_.emplace_back([=](Node *x) {
    for (auto *op : x->outputs) {
      if (op && op->IsOp() && op->Op() && op_types.count(op->Op()->Type())) {
        return true;
      }
    }
    return false;
  });
  return this;
}

PDNode *PDNode::assert_is_ops_input(
    const std::unordered_set<std::string> &op_types,
    const std::string &argument) {
  assert_is_var();
  assert_is_ops_nth_input(op_types, argument, 0);
  return this;
}

bool VarLinksToOp(Node *node, const std::string &op_type) {
  for (auto *out : node->outputs) {
592 593 594 595 596 597
    if (out->IsOp() && out->Op()->Type() == op_type) {
      return true;
    }
  }
  return false;
}
C
chengduo 已提交
598 599

bool IsNthInput(Node *var, Node *op, const std::string &argument, size_t nth) {
600 601 602 603 604
  PADDLE_ENFORCE(var->IsVar());
  PADDLE_ENFORCE(op->IsOp());
  if (op->Op()->Input(argument).size() <= nth) return false;
  return var->Name() == op->Op()->Input(argument)[nth];
}
C
chengduo 已提交
605 606

bool IsNthOutput(Node *var, Node *op, const std::string &argument, size_t nth) {
607 608 609 610 611
  PADDLE_ENFORCE(var->IsVar());
  PADDLE_ENFORCE(op->IsOp());
  if (op->Op()->Output(argument).size() <= nth) return false;
  return var->Name() == op->Op()->Output(argument)[nth];
}
C
chengduo 已提交
612 613 614 615 616

void GraphSafeRemoveNodes(Graph *graph,
                          const std::unordered_set<const Node *> &nodes) {
  for (auto *node : nodes) {
    graph->RemoveNode(const_cast<Node *>(node));
617 618
  }

C
chengduo 已提交
619
  for (auto *node : graph->Nodes()) {
620 621
    for (auto it = node->inputs.begin(); it != node->inputs.end();) {
      if (nodes.count(*it)) {
C
chengduo 已提交
622
        it = const_cast<Node *>(node)->inputs.erase(it);
623
      } else {
624
        it++;
625
      }
626 627 628
    }
    for (auto it = node->outputs.begin(); it != node->outputs.end();) {
      if (nodes.count(*it)) {
C
chengduo 已提交
629
        it = const_cast<Node *>(node)->outputs.erase(it);
630
      } else {
631
        it++;
632
      }
633 634 635
    }
  }
}
C
chengduo 已提交
636 637 638

bool VarLinksFromOp(Node *node, const std::string &op_type) {
  for (auto *out : node->inputs) {
639 640 641 642 643 644 645
    if (out->IsOp() && out->Op()->Type() == op_type) {
      return true;
    }
  }
  return false;
}

S
Sylwester Fraczek 已提交
646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751
PDNode *patterns::ConvBN::operator()(paddle::framework::ir::PDNode *conv_input,
                                     bool with_eltwise_add) {
  // Create Operators
  conv_input->assert_is_op_input("conv2d", "Input");
  auto *conv_op = pattern->NewNode(conv_repr())->assert_is_op("conv2d");

  PDNode *eltwise_op = nullptr;
  if (with_eltwise_add) {
    eltwise_op =
        pattern->NewNode(eltwise_repr())->assert_is_op("elementwise_add");
  }
  auto *batch_norm_op =
      pattern->NewNode(batch_norm_repr())->assert_is_op("batch_norm");
  // Create variables
  // Conv Filter
  auto *conv_weight_var = pattern->NewNode(conv_weight_repr())
                              ->AsInput()
                              ->assert_is_persistable_var()
                              ->assert_is_op_input("conv2d", "Filter");

  auto *conv_out_var = pattern->NewNode(conv_out_repr())
                           ->AsIntermediate()
                           ->assert_is_only_output_of_op("conv2d");

  PDNode *eltwise_y_in_var = nullptr;
  PDNode *eltwise_out_var = nullptr;
  if (with_eltwise_add) {
    // Conv output as Bias input
    conv_out_var->assert_is_op_input("elementwise_add", "X");
    // Bias
    eltwise_y_in_var = pattern->NewNode(eltwise_y_in_repr())
                           ->assert_is_op_input("elementwise_add", "Y")
                           ->AsInput();
    eltwise_out_var = pattern->NewNode(eltwise_out_repr())
                          ->AsIntermediate()
                          ->assert_is_only_output_of_op("elementwise_add");
  } else {
    // Conv output as BN input
    conv_out_var->assert_is_op_input("batch_norm", "X");
  }

  // BN Scale
  auto *bn_scale_var = pattern->NewNode(bn_scale_repr())
                           ->AsInput()
                           ->assert_is_persistable_var()
                           ->assert_is_op_input("batch_norm", "Scale");
  // BN Bias
  auto *bn_bias_var = pattern->NewNode(bn_bias_repr())
                          ->AsInput()
                          ->assert_is_persistable_var()
                          ->assert_is_op_input("batch_norm", "Bias");
  // BN Mean
  auto *bn_mean_var = pattern->NewNode(bn_mean_repr())
                          ->AsInput()
                          ->assert_is_persistable_var()
                          ->assert_is_op_input("batch_norm", "Mean");
  // BN Variance
  auto *bn_variance_var = pattern->NewNode(bn_variance_repr())
                              ->AsInput()
                              ->assert_is_persistable_var()
                              ->assert_is_op_input("batch_norm", "Variance");

  // BN output
  auto *bn_out_var = pattern->NewNode(bn_out_repr())
                         ->AsOutput()
                         ->assert_is_op_output("batch_norm");

  auto *bn_mean_out_var = pattern->NewNode(bn_mean_out_repr())
                              ->AsOutput()
                              ->assert_is_op_output("batch_norm", "MeanOut");

  auto *bn_variance_out_var =
      pattern->NewNode(bn_variance_out_repr())
          ->AsOutput()
          ->assert_is_op_output("batch_norm", "VarianceOut");

  auto *bn_saved_mean_var =
      pattern->NewNode(bn_saved_mean_repr())
          ->AsOutput()
          ->assert_is_op_output("batch_norm", "SavedMean");

  auto *bn_saved_variance_var =
      pattern->NewNode(bn_saved_variance_repr())
          ->AsOutput()
          ->assert_is_op_output("batch_norm", "SavedVariance");

  conv_op->LinksFrom({conv_input, conv_weight_var}).LinksTo({conv_out_var});

  if (with_eltwise_add) {
    eltwise_op->LinksFrom({conv_out_var, eltwise_y_in_var})
        .LinksTo({eltwise_out_var});
    batch_norm_op
        ->LinksFrom({eltwise_out_var, bn_scale_var, bn_bias_var, bn_mean_var,
                     bn_variance_var})
        .LinksTo({bn_out_var, bn_mean_out_var, bn_variance_out_var,
                  bn_saved_mean_var, bn_saved_variance_var});
  } else {
    batch_norm_op
        ->LinksFrom({conv_out_var, bn_scale_var, bn_bias_var, bn_mean_var,
                     bn_variance_var})
        .LinksTo({bn_out_var, bn_mean_out_var, bn_variance_out_var,
                  bn_saved_mean_var, bn_saved_variance_var});
  }
  return bn_out_var;
}

C
chengduo 已提交
752 753
PDNode *patterns::ConvReLU::operator()(
    paddle::framework::ir::PDNode *conv_input) {
754 755
  // Create Operators
  conv_input->assert_is_op_input("conv2d", "Input");
C
chengduo 已提交
756 757
  auto *conv_op = pattern->NewNode(conv_repr())->assert_is_op("conv2d");
  auto *relu_op = pattern->NewNode(relu_repr())->assert_is_op("relu");
758 759
  // Create variables
  // Filter
C
chengduo 已提交
760
  auto *conv_weight_var = pattern->NewNode(conv_weight_repr())
761 762 763 764
                              ->AsInput()
                              ->assert_is_persistable_var()
                              ->assert_is_op_input("conv2d", "Filter");
  // intermediate variable, will be removed in the IR after fuse.
C
chengduo 已提交
765
  auto *conv_out_var = pattern->NewNode(conv_out_repr())
766 767 768 769
                           ->AsIntermediate()
                           ->assert_is_only_output_of_op("conv2d")
                           ->assert_is_op_input("relu");
  // output
C
chengduo 已提交
770
  auto *relu_out_var = pattern->NewNode(relu_out_repr())
771 772 773
                           ->AsOutput()
                           ->assert_is_op_output("relu");

774
  conv_op->LinksFrom({conv_input, conv_weight_var}).LinksTo({conv_out_var});
775 776 777 778
  relu_op->LinksFrom({conv_out_var}).LinksTo({relu_out_var});
  return relu_out_var;
}

T
tensor-tang 已提交
779 780 781 782
PDNode *patterns::SeqConvEltAddRelu::operator()(
    paddle::framework::ir::PDNode *seqconv_input) {
  // Create Operators
  seqconv_input->assert_is_op_input("sequence_conv", "X");
T
tensor-tang 已提交
783 784 785 786
  auto *seqconv_op = pattern->NewNode(seqconv_repr())
                         ->assert_is_op("sequence_conv")
                         ->assert_op_attr<bool>("paddingTrainable", false)
                         ->assert_op_attr<int>("contextStride", 1);
T
tensor-tang 已提交
787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823

  auto *eltadd_op =
      pattern->NewNode(eltadd_repr())->assert_is_op("elementwise_add");
  auto *relu_op = pattern->NewNode(relu_repr())->assert_is_op("relu");
  // Create variables
  // Filter
  auto *seqconv_weight_var =
      pattern->NewNode(seqconv_weight_repr())
          ->AsInput()
          ->assert_is_persistable_var()
          ->assert_is_op_input("sequence_conv", "Filter");
  // Bias
  auto *eltadd_bias_var = pattern->NewNode(eltadd_bias_repr())
                              ->AsInput()
                              ->assert_is_op_input("elementwise_add");
  // intermediate variable, will be removed in the IR after fuse.
  auto *seqconv_out_var = pattern->NewNode(seqconv_out_repr())
                              ->AsIntermediate()
                              ->assert_is_only_output_of_op("sequence_conv")
                              ->assert_is_op_input("elementwise_add");
  auto *eltadd_out_var = pattern->NewNode(eltadd_out_repr())
                             ->AsIntermediate()
                             ->assert_is_only_output_of_op("elementwise_add")
                             ->assert_is_only_input_of_op("relu");
  // output
  auto *relu_out_var = pattern->NewNode(relu_out_repr())
                           ->AsOutput()
                           ->assert_is_op_output("relu");

  seqconv_op->LinksFrom({seqconv_input, seqconv_weight_var})
      .LinksTo({seqconv_out_var});
  eltadd_op->LinksFrom({seqconv_out_var, eltadd_bias_var})
      .LinksTo({eltadd_out_var});
  relu_op->LinksFrom({eltadd_out_var}).LinksTo({relu_out_var});
  return relu_out_var;
}

C
chengduo 已提交
824
PDNode *patterns::FC::operator()(paddle::framework::ir::PDNode *x,
Y
Yan Chunwei 已提交
825 826 827
                                 bool with_bias) {
  // Create shared nodes.
  x->assert_is_op_input("mul", "X");
C
chengduo 已提交
828
  auto *mul = pattern->NewNode(mul_repr())->assert_is_op("mul");
Y
Yan Chunwei 已提交
829

C
chengduo 已提交
830
  auto *mul_w_var = pattern->NewNode(w_repr())
Y
Yan Chunwei 已提交
831 832 833 834
                        ->AsInput()
                        ->assert_is_persistable_var()
                        ->assert_is_op_input("mul", "Y");

C
chengduo 已提交
835
  auto *mul_out_var =
Y
Yan Chunwei 已提交
836 837 838 839 840 841 842 843 844 845
      pattern->NewNode(mul_out_repr())->assert_is_op_output("mul");

  if (!with_bias) {  // not with bias
    // Add links.
    mul->LinksFrom({x, mul_w_var}).LinksTo({mul_out_var});
    return mul_out_var;

  } else {  // with bias
    mul_out_var->AsIntermediate()->assert_is_op_input("elementwise_add");
    // Create operators.
C
chengduo 已提交
846
    auto *elementwise_add = pattern->NewNode(elementwise_add_repr())
Y
Yan Chunwei 已提交
847 848
                                ->assert_is_op("elementwise_add");
    // Create variables.
C
chengduo 已提交
849
    auto *bias = pattern->NewNode(bias_repr())
Y
Yan Chunwei 已提交
850 851 852
                     ->assert_is_op_input("elementwise_add")
                     ->AsInput();

C
chengduo 已提交
853
    auto *fc_out = pattern->NewNode(Out_repr())
Y
Yan Chunwei 已提交
854 855 856 857 858 859
                       ->AsOutput()
                       ->assert_is_op_output("elementwise_add");

    mul->LinksFrom({mul_w_var, x}).LinksTo({mul_out_var});
    elementwise_add->LinksFrom({mul_out_var, bias}).LinksTo({fc_out});
    return fc_out;
860 861
  }
}
T
tensor-tang 已提交
862

863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880
PDNode *patterns::Embedding::operator()(PDNode *x) {
  x->assert_is_op_input("lookup_table", "Ids");
  auto *lookup_table_op =
      pattern->NewNode(lookup_table_repr())->assert_is_op("lookup_table");
#define NEW_NODE(arg__, io__)                    \
  auto *arg__ = pattern->NewNode(arg__##_repr()) \
                    ->assert_is_op_##io__("lookup_table", #arg__);

  NEW_NODE(W, input);

  NEW_NODE(Out, output);
#undef NEW_NODE

  lookup_table_op->LinksFrom({x, W});
  lookup_table_op->LinksTo({Out});
  return Out;
}

C
chengduo 已提交
881
PDNode *patterns::LSTM::operator()(PDNode *x) {
882
  x->assert_is_op_input("lstm", "Input");
C
chengduo 已提交
883
  auto *lstm_op = pattern->NewNode(lstm_repr())->assert_is_op("lstm");
Y
Yan Chunwei 已提交
884
#define NEW_NODE(arg__, io__) \
C
chengduo 已提交
885
  auto *arg__ =               \
Y
Yan Chunwei 已提交
886
      pattern->NewNode(arg__##_repr())->assert_is_op_##io__("lstm", #arg__);
887 888 889 890 891

  // Currently, the H0 and C0 are optional
  // TODO(Superjomn) upgrade the fuse framework to support optional.
  // NEW_NODE(H0, input);
  // NEW_NODE(C0, input);
Y
Yan Chunwei 已提交
892 893
  NEW_NODE(Weight, input);
  NEW_NODE(Bias, input);
894

Y
Yan Chunwei 已提交
895 896 897 898 899
  NEW_NODE(Hidden, output);
  NEW_NODE(Cell, output);
  NEW_NODE(BatchGate, output);
  NEW_NODE(BatchCellPreAct, output);
#undef NEW_NODE
900 901 902 903 904

  lstm_op->LinksFrom({x, Weight, Bias});
  lstm_op->LinksTo({Hidden, Cell, BatchGate, BatchCellPreAct});
  return Hidden;
}
T
tensor-tang 已提交
905

C
chengduo 已提交
906
PDNode *patterns::GRU::operator()(PDNode *x) {
T
tensor-tang 已提交
907
  x->assert_is_op_input("gru", "Input");
C
chengduo 已提交
908
  auto *gru_op = pattern->NewNode(gru_repr())->assert_is_op("gru");
Y
Yan Chunwei 已提交
909
#define NEW_NODE(arg__, io__) \
C
chengduo 已提交
910
  auto *arg__ =               \
Y
Yan Chunwei 已提交
911
      pattern->NewNode(arg__##_repr())->assert_is_op_##io__("gru", #arg__);
T
tensor-tang 已提交
912

Y
Yan Chunwei 已提交
913
  NEW_NODE(Weight, input);
T
tensor-tang 已提交
914 915
  // TODO(Superjomn): upgrade the fuse framework to support optional.
  // H0 and bias are optional
Y
Yan Chunwei 已提交
916
  NEW_NODE(Bias, input);  // also optional
T
tensor-tang 已提交
917 918
  // NEW_NODE(H0, input);

Y
Yan Chunwei 已提交
919
  NEW_NODE(Hidden, output);
T
tensor-tang 已提交
920
  // below are intermediate
Y
Yan Chunwei 已提交
921 922 923 924
  NEW_NODE(BatchGate, output);
  NEW_NODE(BatchResetHiddenPrev, output);
  NEW_NODE(BatchHidden, output);
#undef NEW_NODE
T
tensor-tang 已提交
925

T
tensor-tang 已提交
926 927 928 929
  BatchGate->AsIntermediate();
  BatchResetHiddenPrev->AsIntermediate();
  BatchHidden->AsIntermediate();

T
tensor-tang 已提交
930 931 932 933 934
  gru_op->LinksFrom({x, Weight, Bias});
  gru_op->LinksTo({Hidden, BatchGate, BatchResetHiddenPrev, BatchHidden});
  return Hidden;
}

C
chengduo 已提交
935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028
PDNode *patterns::ActElewiseAdd::operator()(
    paddle::framework::ir::PDNode *in_var,
    std::unordered_set<std::string> act_types) {
  in_var->assert_is_ops_input(act_types, "X");

  auto *act = pattern->NewNode(act_repr())->assert_is_ops(act_types);
  auto *act_out_var = pattern->NewNode(act_out_repr())
                          ->assert_is_not_ctrl_var()
                          ->assert_is_ops_output(act_types);
  act_out_var->AsIntermediate()->assert_is_op_input("elementwise_add");

  auto *ele_x_var = pattern->NewNode(ele_x_repr())
                        ->assert_is_not_ctrl_var()
                        ->assert_is_op_input("elementwise_add")
                        ->AsInput();
  auto *elementwise_add =
      pattern->NewNode(ele_add_repr())->assert_is_op("elementwise_add");

  auto *elewise_add_out = pattern->NewNode(elewise_add_out_repr())
                              ->AsOutput()
                              ->assert_is_op_output("elementwise_add", "Out");

  act->LinksFrom({in_var}).LinksTo({act_out_var});
  elementwise_add->LinksFrom({act_out_var, ele_x_var})
      .LinksTo({elewise_add_out});

  return elewise_add_out;
}

PDNode *patterns::ElewiseAddAct::operator()(
    paddle::framework::ir::PDNode *ele_x_var,
    std::unordered_set<std::string> act_types) {
  auto *ele_y_var = pattern->NewNode(ele_y_repr())
                        ->assert_is_op_input("elementwise_add", "Y");

  auto *ele_add =
      pattern->NewNode(ele_add_repr())->assert_is_op("elementwise_add");

  auto *ele_out_var = pattern->NewNode(elewise_add_out_repr())
                          ->assert_is_op_output("elementwise_add", "Out");

  ele_out_var->AsIntermediate()->assert_is_ops_input(act_types);

  auto *act = pattern->NewNode(act_repr())->assert_is_ops(act_types);

  auto *act_out_var =
      pattern->NewNode(act_out_repr())->assert_is_ops_output(act_types, "Out");

  ele_add->LinksFrom({ele_x_var, ele_y_var}).LinksTo({ele_out_var});
  act->LinksFrom({ele_out_var}).LinksTo({act_out_var});

  return act_out_var;
}

PDNode *patterns::ElewiseAddActInplaceGrad::operator()(
    paddle::framework::ir::PDNode *d_act_out_var,
    std::unordered_set<std::string> act_types) {
  // act_grad: in["Out", "Out@GRAD"], out["X@GRAD"]
  // ele_add_grad: in["Y", "Out@GRAD"], out["X@GRAD", "Y@GRAD"]
  auto *act_grad = pattern->NewNode(act_grad_repr())->assert_is_ops(act_types);

  auto *act_out_var =
      pattern->NewNode(act_out_repr())->assert_is_ops_input(act_types, "Out");

  auto *d_intermediate_var =
      pattern->NewNode(d_itermediate_out_repr())
          ->assert_is_ops_output(act_types, GradVarName("X"));

  act_grad->LinksFrom({d_act_out_var, act_out_var})
      .LinksTo({d_intermediate_var});

  auto *ele_y_var = pattern->NewNode(ele_y_repr())
                        ->assert_is_not_ctrl_var()
                        ->assert_is_op_input("elementwise_add_grad", "Y");

  auto *ele_add_grad = pattern->NewNode(ele_add_grad_repr())
                           ->assert_is_op("elementwise_add_grad");

  auto *d_ele_x_var =
      pattern->NewNode(d_ele_x_repr())
          ->assert_is_not_ctrl_var()
          ->assert_is_op_output("elementwise_add_grad", GradVarName("X"));

  auto *d_ele_y_var =
      pattern->NewNode(d_ele_y_repr())
          ->assert_is_not_ctrl_var()
          ->assert_is_op_output("elementwise_add_grad", GradVarName("Y"));

  ele_add_grad->LinksFrom({d_intermediate_var, ele_y_var})
      .LinksTo({d_ele_x_var, d_ele_y_var});

  return ele_add_grad;
}

M
Michal Gallus 已提交
1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049
PDNode *patterns::ConvBias::operator()(
    paddle::framework::ir::PDNode *conv_input) {
  // Create Operators
  conv_input->assert_is_op_input("conv2d", "Input");
  auto *conv_op = pattern->NewNode(conv_repr())->assert_is_op("conv2d");
  auto *eltiwse_op =
      pattern->NewNode(eltwise_repr())->assert_is_op("elementwise_add");
  // Create variables
  // Filter
  auto *conv_weight_var = pattern->NewNode(conv_weight_repr())
                              ->AsInput()
                              ->assert_is_persistable_var()
                              ->assert_is_op_input("conv2d", "Filter");
  // intermediate variable, will be removed in the IR after fuse.
  auto *conv_out_var = pattern->NewNode(conv_out_repr())
                           ->AsIntermediate()
                           ->assert_is_only_output_of_op("conv2d")
                           ->assert_is_op_input("elementwise_add");
  // Bias stored in elementwise_add
  auto *eltwise_bias_var = pattern->NewNode(eltwise_bias_repr())
                               ->AsInput()
M
Michal Gallus 已提交
1050
                               ->assert_is_persistable_var()
M
Michal Gallus 已提交
1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061
                               ->assert_is_op_input("elementwise_add", "Y");
  // output
  auto *eltwise_out_var = pattern->NewNode(eltwise_out_repr())
                              ->AsOutput()
                              ->assert_is_op_output("elementwise_add");
  conv_op->LinksFrom({conv_input, conv_weight_var}).LinksTo({conv_out_var});
  eltiwse_op->LinksFrom({conv_out_var, eltwise_bias_var})
      .LinksTo({eltwise_out_var});
  return eltwise_out_var;
}

1062 1063 1064 1065
PDNode *patterns::Conv::operator()() {
  auto conv_op = pattern->NewNode(conv_op_repr())->assert_is_op("conv2d");

  auto input_var = pattern->NewNode(conv_input_repr())
1066
                       ->AsInput()
1067 1068 1069
                       ->assert_is_op_input("conv2d", "Input");

  auto filter_var = pattern->NewNode(conv_filter_repr())
1070
                        ->AsInput()
1071 1072 1073
                        ->assert_is_op_input("conv2d", "Filter");

  auto output_var = pattern->NewNode(conv_output_repr())
1074
                        ->AsOutput()
1075 1076
                        ->assert_is_op_output("conv2d", "Output");

1077
  conv_op->LinksFrom({input_var, filter_var});
1078 1079 1080 1081 1082
  conv_op->LinksTo({output_var});

  return output_var;
}

1083
PDNode *patterns::ElementwiseAdd::operator()(PDNode *x_var) {
1084 1085 1086
  auto elementwise_add_op = pattern->NewNode(elementwise_add_op_repr())
                                ->assert_is_op("elementwise_add");

1087
  x_var->assert_is_op_input("elementwise_add", "X");
1088

1089 1090 1091
  auto y_var = pattern->NewNode(elementwise_add_x_repr())
                   ->AsInput()
                   ->assert_is_op_input("elementwise_add", "Y");
1092 1093 1094 1095 1096

  auto out_var = pattern->NewNode(elementwise_add_out_repr())
                     ->AsOutput()
                     ->assert_is_op_output("elementwise_add", "Out");

1097
  elementwise_add_op->LinksFrom({x_var, y_var});
1098 1099 1100 1101
  elementwise_add_op->LinksTo({out_var});

  return out_var;
}
1102 1103 1104
}  // namespace ir
}  // namespace framework
}  // namespace paddle