subgraph.h 7.4 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
/* Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

#include <string>
18
#include <unordered_map>
19 20
#include <unordered_set>
#include <vector>
21
#include "paddle/fluid/framework/ir/fusion_group/operation.h"
22
#include "paddle/fluid/framework/ir/node.h"
23
#include "paddle/fluid/framework/ir/subgraph_detector.h"
24 25 26 27 28 29

namespace paddle {
namespace framework {
namespace ir {
namespace fusion_group {

30 31
class SubGraph {
 public:
32
  SubGraph() = default;
33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51
  explicit SubGraph(int type) : type_(type) {}
  SubGraph(int type, std::string func_name, bool save_intermediate_out,
           const std::unordered_set<Node*>& nodes_set)
      : type_(type),
        func_name_(func_name),
        save_intermediate_out_(save_intermediate_out) {
    for (auto* n : nodes_set) {
      nodes_set_.insert(n);
      if (n && n->IsOp() && n->Op()) {
        // If the node is an op node, then add its input/output var nodes
        //  into the subgraph.
        for (auto* in : n->inputs) {
          nodes_set_.insert(in);
        }
        for (auto* out : n->outputs) {
          nodes_set_.insert(out);
        }
      }
    }
52
    ExtractDataType();
53
  }
54

55 56 57 58 59 60 61 62 63 64 65
  bool IsValid(int min_subgraph_size) {
    int num_operations = GetNumOperations();
    if (num_operations < min_subgraph_size) {
      VLOG(2) << "There are only " << num_operations
              << " operations in the subgraph. Expected at least "
              << min_subgraph_size;
      return false;
    }

    return ExtractDataType();
  }
66

67
  int GetType() const { return type_; }
68
  std::string GetDataType() const { return data_type_; }
69

70 71 72 73
  void SetFuncName(std::string func_name) { func_name_ = func_name; }
  std::string GetFuncName() const { return func_name_; }

  const std::unordered_set<Node*>& Nodes() const { return nodes_set_; }
74
  const std::vector<Node*>& SortedNodes() {
75 76
    if (!is_sorted_) {
      TopologicalSort();
77
    }
78
    return sorted_nodes_;
79 80
  }

81
  size_t GetNumNodes() { return nodes_set_.size(); }
82

83
  bool Has(Node* n) { return nodes_set_.find(n) != nodes_set_.end(); }
84

85 86
  int GetNumOperations() {
    int num_operations = 0;
87
    for (auto* n : nodes_set_) {
88 89 90 91 92 93 94
      if (n && n->IsOp() && n->Op()) {
        num_operations++;
      }
    }
    return num_operations;
  }

95 96
  std::vector<Node*> GetInputVarNodes() {
    // The order of input nodes should be consistent anywhere.
97
    std::vector<Node*> input_vars;
98
    for (auto* n : SortedNodes()) {
99 100 101 102 103 104 105 106 107
      if (n && n->IsVar() && n->Var()) {
        bool is_found = true;
        // When the inputs size is 0, it is also considered the input var of
        // subgraph.
        if (n->inputs.size() == 0U) {
          is_found = false;
        }
        // Normally a var node has only one input op node.
        for (auto* in : n->inputs) {
108
          if (!Has(in)) {
109 110 111 112 113 114 115 116 117 118 119
            is_found = false;
          }
        }
        if (!is_found) {
          input_vars.push_back(n);
        }
      }
    }
    return input_vars;
  }

120 121
  std::vector<Node*> GetOutputVarNodes() {
    // The order of output nodes should be consistant anywhere..
122
    std::vector<Node*> output_vars_all;
123 124
    for (auto* n : SortedNodes()) {
      if (n && n->IsVar() && n->Var()) {
125 126 127 128 129 130
        // If the var_node is the output of some op_node in the subgraph, it
        // is considered the output var node of the subgraph.
        bool is_found = false;
        for (auto* in : n->inputs) {
          if (Has(in)) {
            is_found = true;
131 132
          }
        }
133 134 135
        if (is_found) {
          output_vars_all.push_back(n);
        }
136 137 138
      }
    }

139 140
    if (save_intermediate_out_) {
      return output_vars_all;
141 142
    }

143 144 145 146 147 148 149 150 151 152 153 154
    std::vector<Node*> output_vars_outside;
    for (auto* n : output_vars_all) {
      // If one of the var_node's outputs is the input of some operator
      // outside the subgraph, it is considered the output var node of the
      // subgraph.
      bool is_found = true;
      if (n->outputs.size() == 0U) {
        is_found = false;
      }
      for (auto* out : n->outputs) {
        if (!Has(out)) {
          is_found = false;
155
        }
156 157 158
      }
      if (!is_found) {
        output_vars_outside.push_back(n);
159 160
      }
    }
161
    return output_vars_outside;
162 163
  }

164
 private:
165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195
  bool ExtractDataType() {
    bool is_first = true;
    proto::VarType::Type data_type = proto::VarType::FP32;
    for (auto* n : nodes_set_) {
      if (n && n->IsVar() && n->Var()) {
        if (n->Var()->GetType() != proto::VarType::LOD_TENSOR) {
          // All var node in a subgraph should hold a LoDTensor.
          return false;
        }
        if (is_first) {
          data_type = n->Var()->GetDataType();
          is_first = false;
        } else if (n->Var()->GetDataType() != data_type) {
          // DataType of VarDesc in a subgraph is not the same.
          return false;
        }
      }
    }
    if (data_type == proto::VarType::FP32) {
      data_type_ = "float";
    } else if (data_type == proto::VarType::FP64) {
      data_type_ = "double";
    } else if (data_type == proto::VarType::FP16) {
      data_type_ = "float16";
    } else {
      VLOG(2) << "Only support fp32, fp64 and fp16 in fusion_group.";
      return false;
    }
    return true;
  }

196 197 198 199 200 201 202 203
  void TopologicalSort() {
    if (!is_sorted_) {
      std::unordered_map<Node*, std::vector<Node*>> inputs_map;
      std::unordered_map<Node*, std::vector<Node*>> outputs_map;
      for (auto* n : nodes_set_) {
        inputs_map[n] = n->inputs;
        outputs_map[n] = n->outputs;
      }
204

205 206 207 208 209 210 211
      for (auto* n : nodes_set_) {
        if (n && n->IsVar() && n->Var()) {
          // Set the input of subgraph's input var node to null.
          std::vector<Node*> inputs;
          for (auto* in : n->inputs) {
            if (Has(in)) {
              inputs.push_back(in);
212 213
            }
          }
214 215 216 217 218 219
          // Set the output of subgraph's output var node to null.
          std::vector<Node*> outputs;
          for (auto* out : n->outputs) {
            if (Has(out)) {
              outputs.push_back(out);
            }
220
          }
221 222
          n->inputs = inputs;
          n->outputs = outputs;
223 224
        }
      }
225 226 227 228 229
      // Collect the start points of the subgraph.
      std::vector<Node*> start_points;
      for (auto* n : nodes_set_) {
        if (n->inputs.empty()) {
          start_points.push_back(n);
230 231
        }
      }
232 233 234 235 236
      // Sort the subgraph.
      NodesTSIterator x(start_points);
      for (auto& n : iterator_range<NodesTSIterator>(
               NodesTSIterator(start_points), NodesTSIterator())) {
        sorted_nodes_.push_back(&n);
237
      }
238 239 240 241
      // Reset the inputs, outputs.
      for (auto* n : nodes_set_) {
        n->inputs = inputs_map[n];
        n->outputs = outputs_map[n];
242 243
      }
    }
244
    is_sorted_ = true;
245 246 247
  }

 private:
248
  int type_{-1};
249
  std::string data_type_;
250 251 252 253 254 255
  std::string func_name_;
  bool save_intermediate_out_{true};

  std::unordered_set<Node*> nodes_set_;
  bool is_sorted_{false};
  std::vector<Node*> sorted_nodes_;
256 257 258 259 260 261
};

}  // namespace fusion_group
}  // namespace ir
}  // namespace framework
}  // namespace paddle