node.h 9.8 KB
Newer Older
X
Xin Pan 已提交
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
X
start  
Xin Pan 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

17
#include <memory>
X
Xin Pan 已提交
18
#include <string>
X
Xin Pan 已提交
19 20
#include <typeindex>
#include <typeinfo>
X
Xin Pan 已提交
21
#include <vector>
W
wanghuancoder 已提交
22

23 24
#include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/var_desc.h"
X
Xin Pan 已提交
25
#include "paddle/fluid/platform/macros.h"
26
#include "paddle/utils/any.h"
W
wanghuancoder 已提交
27 28 29 30 31 32 33
namespace paddle {
namespace framework {
class OpDesc;
class VarDesc;
}  // namespace framework
}  // namespace paddle

X
start  
Xin Pan 已提交
34
namespace paddle {
X
Xin Pan 已提交
35
namespace framework {
X
tmp  
Xin Pan 已提交
36
namespace ir {
X
Xin Pan 已提交
37

X
Xin Pan 已提交
38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55
// Node should only created by Graph::CreateXXXNode().
// 1. Every Node should be part of a graph. No dangling Node exists.
// 2. Node only contains members necessary for building graph structure.
//    It doesn't contain other unrelated members, such as device, etc.
//
// Sometimes, for specific usages, Node needs to have additional members,
// such as device_placement, version in order to be executed. It is suggested
// to use composition pattern.
//
// class RunnableOp {
//    RunnableOp(ir::Node* n) : n_(n) { n_.WrappedBy(this); }
//
//    int any_thing_;
// }
//
// RunnableOp is owned by the ir::Node that composes it. In other words.
// ir::Node will be responsible for deleting RunnableOp, say, when ir::Node
// is deleted from the graph.
X
Xin Pan 已提交
56 57
class Node {
 public:
X
Xin Pan 已提交
58 59
  virtual ~Node() {
    if (!wrapper_.empty()) {
60
      VLOG(10) << "ir::Node deleting a wrapper node " << Name();
X
Xin Pan 已提交
61 62 63
      wrapper_deleter_();
    }
  }
X
clean1  
Xin Pan 已提交
64

X
polish  
Xin Pan 已提交
65
  enum class Type { kOperation, kVariable };
66
  enum class Dep { kSame = 0, kBefore = 1, kAfter = 2, kNoDep = 3 };
67 68 69 70
// msvc not support constexpr correctly.
// static constexpr member implies inline since CXX17 and may cause multiple
// definition.
#if !defined(_WIN32) && (__cplusplus < 201703L)
P
peizhilin 已提交
71
  static constexpr char kControlDepVarName[] = "__control_var";
D
dzhwinter 已提交
72
#else
P
peizhilin 已提交
73
  static const char kControlDepVarName[];
D
dzhwinter 已提交
74
#endif
X
better  
Xin Pan 已提交
75

X
Xin Pan 已提交
76 77
  Type NodeType() const { return type_; }

X
Xin Pan 已提交
78
  std::string Name() const { return name_; }
X
tmp  
Xin Pan 已提交
79

Y
Yan Chunwei 已提交
80
  VarDesc* Var() const {
81 82
    PADDLE_ENFORCE_EQ(IsVar(),
                      true,
83
                      platform::errors::InvalidArgument(
84 85
                          "Node(%s) must be kVariable type, but not %d.",
                          name_,
86
                          static_cast<int>(type_)));
87
    return var_desc_.get();
X
Xin Pan 已提交
88
  }
X
Xin Pan 已提交
89

90
  OpDesc* Op() const {
91 92
    PADDLE_ENFORCE_EQ(IsOp(),
                      true,
93 94
                      platform::errors::InvalidArgument(
                          "Node(%s) must be kOperation type, but not %d.",
95 96
                          name_,
                          static_cast<int>(type_)));
97
    return op_desc_.get();
X
tmp  
Xin Pan 已提交
98
  }
99

X
Xin Pan 已提交
100
  // Set the `wrapper` that wraps the Node. `wrapper` is owned by Node.
X
clean1  
Xin Pan 已提交
101 102 103 104 105 106 107
  template <typename T>
  void WrappedBy(T* wrapper) {
    if (!wrapper_.empty()) {
      wrapper_deleter_();
    }
    wrapper_ = wrapper;
    wrapper_deleter_ = [wrapper]() { delete wrapper; };
X
Xin Pan 已提交
108
    wrapper_type_ = std::type_index(typeid(T));
X
clean1  
Xin Pan 已提交
109 110
  }

X
Xin Pan 已提交
111
  // Return a reference to the `wrapper`.
X
clean1  
Xin Pan 已提交
112 113
  template <typename T>
  T& Wrapper() {
114
    try {
115 116
      return *paddle::any_cast<T*>(wrapper_);
    } catch (paddle::bad_any_cast&) {
117 118
      PADDLE_THROW(platform::errors::InvalidArgument(
          "Invalid wrapper type error, expected %s, actual %s.",
119 120
          typeid(T).name(),
          wrapper_type_.name()));
121
    }
X
clean1  
Xin Pan 已提交
122 123
  }

X
Xin Pan 已提交
124
  // Test if the Node is wrapped by type T.
X
Xin Pan 已提交
125
  template <typename T>
126
  bool IsWrappedBy() const {
X
Xin Pan 已提交
127 128 129
    return std::type_index(typeid(T)) == wrapper_type_;
  }

X
Xin Pan 已提交
130
  // Please don't use this API!
131 132
  int id() const { return id_; }

133 134 135
  // Only use this for auto parallel.
  // A node does not have original desc if the return is zero.
  uint64_t OriginalDescId() const { return original_desc_id_; }
136
  int GraphId() const { return graph_id_; }
137

138 139
  bool IsOp() const { return type_ == Type::kOperation; }
  bool IsVar() const { return type_ == Type::kVariable; }
C
chengduo 已提交
140 141 142 143
  bool IsCtrlVar() const {
    return type_ == Type::kVariable &&
           Name().find(ir::Node::kControlDepVarName) != std::string::npos;
  }
144

145
  void RenameVar(const std::string& new_name) {
146
    PADDLE_ENFORCE_EQ(
147 148
        type_ == Type::kVariable && var_desc_,
        true,
149
        platform::errors::InvalidArgument("Node must be type of variable."));
150 151 152 153
    name_ = new_name;
    var_desc_->SetName(new_name);
  }

154 155 156 157
  int DescOrder() const { return desc_order_; }

  int GetVarNodeBlockId() const {
    PADDLE_ENFORCE_EQ(
158 159
        type_ == Type::kVariable && var_desc_,
        true,
160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179
        platform::errors::InvalidArgument("Node must be type of variable."));
    return block_id_;
  }

  const std::string ToString() const {
    if (IsOp()) {
      std::string op_str(Name());

      const auto& op = Op();
      if (op == nullptr) {
        // Node is an Op but hasn't OpDesc (often create by CreateEmptyNode),
        // like ScaleLossGradOp, it's type is OpHandle, which created by Pass
        // and then inserted into graph.
        // For OpHandle, we have to use Node's input and output for sorting.
        std::vector<Node*> sorted_inputs(inputs);
        std::vector<Node*> sorted_outputs(outputs);

        auto comparator = [](Node* a, Node* b) {
          return a->Name() > b->Name();
        };
180 181 182 183
        std::stable_sort(
            sorted_inputs.begin(), sorted_inputs.end(), comparator);
        std::stable_sort(
            sorted_outputs.begin(), sorted_outputs.end(), comparator);
184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237

        std::string out_str = "{";
        std::string pre_str = "";
        for (const auto& output : sorted_outputs) {
          out_str.append(pre_str + output->Name());
          pre_str = ", ";
        }
        out_str.append("} = ");

        std::string in_str = "(";
        pre_str = "";
        for (const auto& input : sorted_inputs) {
          in_str.append(pre_str + input->Name());
          pre_str = ", ";
        }
        in_str.append(")");
        op_str = out_str + op_str + in_str;
      } else {
        // A normal Op, has OpDesc, create from ProgramDesc
        std::string out_str = "{";
        std::string outer_pre_str = "";
        for (const auto& output : op->OutputNames()) {
          out_str.append(outer_pre_str + output + "=[");
          std::string inner_pre_str = "";
          for (const auto& arg : op->Output(output)) {
            out_str.append(inner_pre_str + arg);
            inner_pre_str = " ,";
          }
          outer_pre_str = ", ";
          out_str.append("]");
        }
        out_str.append("} = ");

        std::string in_str = "(";
        outer_pre_str = "";
        for (const auto& input : op->InputNames()) {
          in_str.append(outer_pre_str + input + "=[");
          std::string inner_pre_str = "";
          for (const auto& arg : op->Input(input)) {
            in_str.append(inner_pre_str + arg);
            inner_pre_str = " ,";
          }
          outer_pre_str = " ,";
          in_str.append("]");
        }
        in_str.append(")");
        op_str = out_str + op_str + in_str;
      }

      return op_str;
    }
    return Name();
  }

238 239
  std::vector<Node*> inputs;
  std::vector<Node*> outputs;
X
Xin Pan 已提交
240

241 242 243 244 245 246
  // Because NO_DESC_ORDER is a constexpr number,
  // no one can change it, meanwhile, we need
  // check whether the DescOrder invalid sometime,
  // so expose it is a good idea
  static constexpr int NO_DESC_ORDER = INT_MAX;

X
Xin Pan 已提交
247
 protected:
248
  std::string name_;
249 250
  std::unique_ptr<VarDesc> var_desc_;
  std::unique_ptr<OpDesc> op_desc_;
X
Xin Pan 已提交
251
  Type type_;
252
  int id_;
X
Xin Pan 已提交
253

254 255 256
  int desc_order_;
  int block_id_{-1};

257 258 259
  // Store the original id of var desc or op desc.
  // Only use this for auto parallel.
  uint64_t original_desc_id_{0};
260
  int graph_id_{-1};
261

X
tmp  
Xin Pan 已提交
262
 private:
263 264
  // ID can only set by a Graph.
  void SetId(int id) { id_ = id; }
265
  void SetGraphId(int graph_id) { graph_id_ = graph_id; }
266

267 268 269 270
  // desc_order can only set by a Graph when constructing a Graph from a
  // BlockDesc.
  void SetDescOrder(int desc_order) { desc_order_ = desc_order; }

271
  friend class Graph;
X
Xin Pan 已提交
272 273
  friend std::unique_ptr<Node> CreateNodeForTest(const std::string& name,
                                                 Node::Type type);
D
dzhwinter 已提交
274 275
  friend std::unique_ptr<Node> CreateNodeForTest(VarDesc* var_desc);
  friend std::unique_ptr<Node> CreateNodeForTest(OpDesc* op_desc);
X
Xin Pan 已提交
276

277 278 279 280 281 282 283
  explicit Node(const std::string& name, Type type, int block_id = 0)
      : name_(name),
        var_desc_(nullptr),
        op_desc_(nullptr),
        type_(type),
        desc_order_(NO_DESC_ORDER),
        block_id_(block_id) {}
X
Xin Pan 已提交
284

285
  explicit Node(VarDesc* var_desc, int block_id)
X
Xin Pan 已提交
286 287 288
      : name_(var_desc->Name()),
        var_desc_(new VarDesc(*var_desc)),
        op_desc_(nullptr),
289 290
        type_(Type::kVariable),
        desc_order_(NO_DESC_ORDER),
291 292
        block_id_(block_id),
        original_desc_id_(var_desc->OriginalId()) {}
X
Xin Pan 已提交
293 294 295 296 297

  explicit Node(OpDesc* op_desc)
      : name_(op_desc->Type()),
        var_desc_(nullptr),
        op_desc_(new OpDesc(*op_desc, op_desc->Block())),
298
        type_(Type::kOperation),
299 300
        desc_order_(NO_DESC_ORDER),
        original_desc_id_(op_desc->OriginalId()) {}
X
Xin Pan 已提交
301 302 303

  Node() = delete;

304
  paddle::any wrapper_;
X
clean1  
Xin Pan 已提交
305
  std::function<void(void)> wrapper_deleter_;
X
Xin Pan 已提交
306
  std::type_index wrapper_type_ = std::type_index(typeid(void));
X
clean1  
Xin Pan 已提交
307

X
Xin Pan 已提交
308 309 310
  DISABLE_COPY_AND_ASSIGN(Node);
};

X
Xin Pan 已提交
311 312
std::unique_ptr<Node> CreateNodeForTest(const std::string& name,
                                        Node::Type type);
D
dzhwinter 已提交
313
std::unique_ptr<Node> CreateNodeForTest(VarDesc* var_desc);
X
Xin Pan 已提交
314

D
dzhwinter 已提交
315
std::unique_ptr<Node> CreateNodeForTest(OpDesc* op_desc);
X
tmp  
Xin Pan 已提交
316
}  // namespace ir
X
Xin Pan 已提交
317
}  // namespace framework
X
start  
Xin Pan 已提交
318
}  // namespace paddle