block_desc.h 3.4 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

F
fengjiayi 已提交
17
#include <deque>
Y
Fix CI  
Yu Yang 已提交
18
#include <memory>
Y
Yu Yang 已提交
19
#include <set>
L
Luo Tao 已提交
20
#include <string>
F
fengjiayi 已提交
21 22
#include <unordered_map>
#include <vector>
Y
Yu Yang 已提交
23

Y
Yi Wang 已提交
24 25 26 27
#include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/proto_desc.h"
#include "paddle/fluid/framework/var_desc.h"
#include "paddle/fluid/platform/macros.h"
F
fengjiayi 已提交
28 29 30 31

namespace paddle {
namespace framework {

Y
Yu Yang 已提交
32
class ProgramDesc;
F
fengjiayi 已提交
33 34 35 36 37

// Each Protobuf Message, we provide a XXXBind class. In that class, we optimize
// read/write speed. Only when we want the protobuf message, the local changes
// will be synchronized (by `Sync` method).

Y
Yu Yang 已提交
38
class BlockDesc {
F
fengjiayi 已提交
39
 public:
Y
Yu Yang 已提交
40
  BlockDesc(ProgramDesc *prog, proto::BlockDesc *desc);
F
fengjiayi 已提交
41

Y
Yu Yang 已提交
42
  BlockDesc(const BlockDesc &other, proto::BlockDesc *desc, ProgramDesc *prog);
Y
Yu Yang 已提交
43

Y
Yu Yang 已提交
44
  ~BlockDesc() {
45 46 47 48
    this->ClearPBVars();
    this->ClearPBOps();
  }

F
fengjiayi 已提交
49 50 51 52
  int32_t ID() const { return desc_->idx(); }

  int32_t Parent() const { return desc_->parent_idx(); }

Y
Yu Yang 已提交
53 54
  int32_t ForwardBlockID() const { return desc_->forward_block_idx(); }

Y
Yu Yang 已提交
55
  VarDesc *Var(const std::string &name_bytes);
F
fengjiayi 已提交
56

Y
Yu Yang 已提交
57
  VarDesc *FindVar(const std::string &name_bytes) const;
F
fengjiayi 已提交
58

Q
qiaolongfei 已提交
59 60
  bool HasVar(const std::string &var_name) const;

T
wip  
typhoonzero 已提交
61
  VarDesc *RenameVar(const std::string &old_name, const std::string &new_name);
T
typhoonzero 已提交
62

Y
Yu Yang 已提交
63
  VarDesc *FindVarRecursive(const std::string &name_bytes) const;
64

Y
Yang Yu 已提交
65
  VarDesc &FindRecursiveOrCreateVar(const std::string &name_bytes);
Y
Yang Yang(Tony) 已提交
66

67 68
  bool HasVarRecursive(const std::string &var_name) const;

Y
Yu Yang 已提交
69 70 71 72 73 74 75 76
  std::set<std::string> LocalVarNames() const {
    std::set<std::string> var_names;
    for (auto &var : vars_) {
      var_names.insert(var.first);
    }
    return var_names;
  }

Y
Yu Yang 已提交
77
  std::vector<VarDesc *> AllVars() const;
F
fengjiayi 已提交
78

Y
Yu Yang 已提交
79
  BlockDesc *ParentBlock() const;
F
fengjiayi 已提交
80

Y
Yu Yang 已提交
81 82 83 84
  BlockDesc *ForwardBlock() const;

  void SetForwardBlockID(int32_t forward_block_id);

Y
Yu Yang 已提交
85
  OpDesc *AppendOp();
F
fengjiayi 已提交
86

Y
Yu Yang 已提交
87
  void AppendAllocatedOp(std::unique_ptr<OpDesc> &&op_desc);
88

Y
Yu Yang 已提交
89
  OpDesc *PrependOp();
F
fengjiayi 已提交
90

Y
Yao Cheng 已提交
91
  void PrependAllocatedOp(std::unique_ptr<OpDesc> &&op_desc);
Y
Yao Cheng 已提交
92

93 94
  OpDesc *InsertOp(size_t index);

L
Luo Tao 已提交
95 96
  /*
   * Remove Op and its input/output variables.
97
   * Note that for either input or output variable, if it is also an input or
L
Luo Tao 已提交
98 99
   * output variable of other ops, we should remain it.
   */
T
typhoonzero 已提交
100 101
  void RemoveOp(size_t s, size_t e);

L
Luo Tao 已提交
102 103
  void RemoveVar(const std::string &name) { vars_.erase(name); }

Y
Yu Yang 已提交
104
  std::vector<OpDesc *> AllOps() const;
F
fengjiayi 已提交
105

106 107
  size_t OpSize() const { return ops_.size(); }

Y
Yu Yang 已提交
108
  OpDesc *Op(int idx) { return ops_.at(idx).get(); }
109

110
  void Flush();
F
fengjiayi 已提交
111

112
  proto::BlockDesc *Proto();
F
fengjiayi 已提交
113

Y
Yu Yang 已提交
114
  ProgramDesc *Program() const { return this->prog_; }
Y
Yu Yang 已提交
115

116 117 118 119
 private:
  void ClearPBOps();
  void ClearPBVars();

120
 private:
Y
Yu Yang 已提交
121
  ProgramDesc *prog_;       // not_own
122
  proto::BlockDesc *desc_;  // not_own
F
fengjiayi 已提交
123 124
  bool need_update_;

Y
Yu Yang 已提交
125 126
  std::deque<std::unique_ptr<OpDesc>> ops_;
  std::unordered_map<std::string, std::unique_ptr<VarDesc>> vars_;
D
format  
dongzhihong 已提交
127

Y
Yu Yang 已提交
128
  DISABLE_COPY_AND_ASSIGN(BlockDesc);
F
fengjiayi 已提交
129
};
F
fengjiayi 已提交
130 131
}  // namespace framework
}  // namespace paddle