block_desc.cc 5.7 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "paddle/framework/block_desc.h"
16
#include "paddle/framework/operator.h"
F
fengjiayi 已提交
17
#include "paddle/framework/program_desc.h"
F
fengjiayi 已提交
18 19 20 21

namespace paddle {
namespace framework {

Y
Yu Yang 已提交
22
VarDesc *BlockDesc::Var(const std::string &name) {
F
fengjiayi 已提交
23
  auto it = vars_.find(name);
D
Dong Zhihong 已提交
24
  if (it != vars_.end()) {
D
Dong Zhihong 已提交
25
    return it->second.get();
D
Dong Zhihong 已提交
26
  }
27
  need_update_ = true;
Y
Yu Yang 已提交
28
  auto *var = new VarDesc(name);
F
fengjiayi 已提交
29 30 31 32
  vars_[name].reset(var);
  return var;
}

Y
Yu Yang 已提交
33
VarDesc *BlockDesc::FindVar(const std::string &name) const {
F
fengjiayi 已提交
34
  auto it = vars_.find(name);
D
Dong Zhihong 已提交
35 36 37
  if (it == vars_.end()) {
    return nullptr;
  }
F
fengjiayi 已提交
38 39 40
  return it->second.get();
}

Y
Yu Yang 已提交
41
bool BlockDesc::HasVar(const std::string &name) const {
Q
qiaolongfei 已提交
42
  return vars_.find(name) != vars_.end();
Q
qiaolongfei 已提交
43 44
}

T
wip  
typhoonzero 已提交
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65
VarDesc *BlockDesc::RenameVar(const std::string &old_name,
                              const std::string &new_name) {
  if (!this->HasVar(old_name)) {
    return nullptr;
  }
  need_update_ = true;
  auto *var = this->Var(old_name);
  VarDesc *new_var = new VarDesc(*(var->Proto()));
  new_var->SetName(new_name);
  // new_var->SetShape(var->GetShape());
  // new_var->SetType(var->GetType());
  // new_var->SetDataType(var->GetDataType());
  // new_var->SetLoDLevel(var->GetLoDLevel());
  // new_var->SetPersistable(var->Persistable());

  vars_[new_name].reset(new_var);

  // rename inputs and outputs
  for (const auto &op : ops_) {
    auto *it = op.get();
    it->Rename(old_name, new_name);
T
typhoonzero 已提交
66
  }
T
wip  
typhoonzero 已提交
67 68
  vars_.erase(old_name);
  return new_var;
T
typhoonzero 已提交
69 70
}

Y
Yu Yang 已提交
71
VarDesc *BlockDesc::FindVarRecursive(const std::string &name) const {
72 73
  if (name == kEmptyVarName) return nullptr;

74 75 76 77 78 79 80 81
  auto it = vars_.find(name);
  if (it == vars_.end()) {
    return Parent() == kNoneBlockIndex ? nullptr
                                       : ParentBlock()->FindVarRecursive(name);
  }
  return it->second.get();
}

Y
Yang Yu 已提交
82
VarDesc &BlockDesc::FindRecursiveOrCreateVar(const std::string &name_bytes) {
Y
Yu Yang 已提交
83
  VarDesc *res = FindVarRecursive(name_bytes);
Y
Yang Yang(Tony) 已提交
84 85 86
  if (res == nullptr) {
    res = Var(name_bytes);
  }
Y
Yang Yu 已提交
87
  return *res;
Y
Yang Yang(Tony) 已提交
88 89
}

Y
Yu Yang 已提交
90
bool BlockDesc::HasVarRecursive(const std::string &name) const {
91 92 93
  return FindVarRecursive(name) != nullptr;
}

Y
Yu Yang 已提交
94 95
std::vector<VarDesc *> BlockDesc::AllVars() const {
  std::vector<VarDesc *> res;
F
fengjiayi 已提交
96 97 98 99 100 101
  for (const auto &p : vars_) {
    res.push_back(p.second.get());
  }
  return res;
}

Y
Yu Yang 已提交
102
OpDesc *BlockDesc::AppendOp() {
F
fengjiayi 已提交
103
  need_update_ = true;
104
  ops_.emplace_back(new OpDesc(this));
F
fengjiayi 已提交
105 106 107
  return ops_.back().get();
}

Y
Yu Yang 已提交
108
void BlockDesc::AppendAllocatedOp(std::unique_ptr<OpDesc> &&op_desc) {
109 110 111 112
  need_update_ = true;
  ops_.emplace_back(std::move(op_desc));
}

Y
Yu Yang 已提交
113
OpDesc *BlockDesc::PrependOp() {
F
fengjiayi 已提交
114
  need_update_ = true;
115
  ops_.emplace_front(new OpDesc(this));
F
fengjiayi 已提交
116 117 118
  return ops_.front().get();
}

T
typhoonzero 已提交
119
void BlockDesc::RemoveOp(size_t s, size_t e) {
T
typhoonzero 已提交
120 121 122 123 124 125 126 127 128 129 130 131 132 133
  if (ops_.begin() + s == ops_.end() || ops_.begin() + e == ops_.end()) {
    return;
  }
  need_update_ = true;
  for (auto it = ops_.begin() + s; it != ops_.begin() + e; it++) {
    auto names = (*it)->InputArgumentNames();
    for (auto n : names) {
      // TODO(typhoonzero): delete vars if no other op use it.
      VLOG(3) << "deleting var " << n;
    }
  }
  ops_.erase(ops_.begin() + s, ops_.begin() + e);
}

Y
Yu Yang 已提交
134 135
std::vector<OpDesc *> BlockDesc::AllOps() const {
  std::vector<OpDesc *> res;
F
fengjiayi 已提交
136 137 138 139 140 141
  for (const auto &op : ops_) {
    res.push_back(op.get());
  }
  return res;
}

Y
Yu Yang 已提交
142
void BlockDesc::Flush() {
143 144 145 146
  for (auto &op_desc : ops_) {
    op_desc->Flush();
  }

F
fengjiayi 已提交
147 148
  if (need_update_) {
    auto &op_field = *this->desc_->mutable_ops();
149
    this->ClearPBOps();
F
fengjiayi 已提交
150 151 152 153
    op_field.Reserve(static_cast<int>(ops_.size()));
    for (auto &op_desc : ops_) {
      op_field.AddAllocated(op_desc->Proto());
    }
F
Fix bug  
fengjiayi 已提交
154
    auto &var_field = *this->desc_->mutable_vars();
155
    this->ClearPBVars();
F
Fix bug  
fengjiayi 已提交
156 157 158 159
    var_field.Reserve(static_cast<int>(vars_.size()));
    for (auto &var_desc : vars_) {
      var_field.AddAllocated(var_desc.second->Proto());
    }
F
fengjiayi 已提交
160 161 162 163
    need_update_ = false;
  }
}

Y
Yu Yang 已提交
164
BlockDesc *BlockDesc::ParentBlock() const {
165
  if (this->desc_->parent_idx() == kNoneBlockIndex) {
F
fengjiayi 已提交
166 167
    return nullptr;
  }
168
  return prog_->MutableBlock(static_cast<size_t>(this->desc_->parent_idx()));
F
fengjiayi 已提交
169 170
}

Y
Yu Yang 已提交
171
proto::BlockDesc *BlockDesc::Proto() {
172 173 174
  Flush();
  return desc_;
}
175

Y
Yu Yang 已提交
176
BlockDesc::BlockDesc(ProgramDesc *prog, proto::BlockDesc *desc)
177
    : prog_(prog), desc_(desc), need_update_(false) {
178
  for (const proto::VarDesc &var_desc : desc_->vars()) {
Y
Yu Yang 已提交
179
    vars_[var_desc.name()].reset(new VarDesc(var_desc));
180
  }
181
  for (const proto::OpDesc &op_desc : desc_->ops()) {
182
    ops_.emplace_back(new OpDesc(op_desc, prog, this));
183 184 185
  }
}

Y
Yu Yang 已提交
186 187
BlockDesc::BlockDesc(const BlockDesc &other, proto::BlockDesc *desc,
                     ProgramDesc *prog)
Y
Yu Yang 已提交
188 189 190
    : prog_(prog), desc_(desc) {
  need_update_ = true;
  for (auto &op : other.ops_) {
K
Kexin Zhao 已提交
191
    ops_.emplace_back(new OpDesc(*op->Proto(), prog, this));
Y
Yu Yang 已提交
192 193
  }
  for (auto &it : other.vars_) {
Y
Yu Yang 已提交
194
    auto *var = new VarDesc(*it.second);
Y
Yu Yang 已提交
195 196 197
    vars_[it.first].reset(var);
  }
}
198

Y
Yu Yang 已提交
199
void BlockDesc::ClearPBOps() {
200 201 202 203 204 205 206
  auto ops = this->desc_->mutable_ops();
  while (!ops->empty()) {
    // we do not own the OpDesc, so release the ownership.
    ops->ReleaseLast();
  }
}

Y
Yu Yang 已提交
207
void BlockDesc::ClearPBVars() {
208 209 210 211 212 213 214
  auto vars = this->desc_->mutable_vars();
  while (!vars->empty()) {
    // we do not own the VarDesc, so release the ownership.
    vars->ReleaseLast();
  }
}

F
fengjiayi 已提交
215 216
}  // namespace framework
}  // namespace paddle