block_desc.cc 5.5 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15 16 17
#include "paddle/fluid/framework/block_desc.h"
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/program_desc.h"
F
fengjiayi 已提交
18 19 20 21

namespace paddle {
namespace framework {

Y
Yu Yang 已提交
22
VarDesc *BlockDesc::Var(const std::string &name) {
F
fengjiayi 已提交
23
  auto it = vars_.find(name);
D
Dong Zhihong 已提交
24
  if (it != vars_.end()) {
D
Dong Zhihong 已提交
25
    return it->second.get();
D
Dong Zhihong 已提交
26
  }
27
  need_update_ = true;
Y
Yu Yang 已提交
28
  auto *var = new VarDesc(name);
F
fengjiayi 已提交
29 30 31 32
  vars_[name].reset(var);
  return var;
}

Y
Yu Yang 已提交
33
VarDesc *BlockDesc::FindVar(const std::string &name) const {
F
fengjiayi 已提交
34
  auto it = vars_.find(name);
D
Dong Zhihong 已提交
35 36 37
  if (it == vars_.end()) {
    return nullptr;
  }
F
fengjiayi 已提交
38 39 40
  return it->second.get();
}

Y
Yu Yang 已提交
41
bool BlockDesc::HasVar(const std::string &name) const {
Q
qiaolongfei 已提交
42
  return vars_.find(name) != vars_.end();
Q
qiaolongfei 已提交
43 44
}

T
wip  
typhoonzero 已提交
45 46 47 48 49 50 51 52 53 54 55 56 57 58
VarDesc *BlockDesc::RenameVar(const std::string &old_name,
                              const std::string &new_name) {
  if (!this->HasVar(old_name)) {
    return nullptr;
  }
  need_update_ = true;
  auto *var = this->Var(old_name);
  VarDesc *new_var = new VarDesc(*(var->Proto()));
  new_var->SetName(new_name);
  vars_[new_name].reset(new_var);
  // rename inputs and outputs
  for (const auto &op : ops_) {
    auto *it = op.get();
    it->Rename(old_name, new_name);
T
typhoonzero 已提交
59
  }
T
wip  
typhoonzero 已提交
60 61
  vars_.erase(old_name);
  return new_var;
T
typhoonzero 已提交
62 63
}

Y
Yu Yang 已提交
64
VarDesc *BlockDesc::FindVarRecursive(const std::string &name) const {
65 66
  if (name == kEmptyVarName) return nullptr;

67 68 69 70 71 72 73 74
  auto it = vars_.find(name);
  if (it == vars_.end()) {
    return Parent() == kNoneBlockIndex ? nullptr
                                       : ParentBlock()->FindVarRecursive(name);
  }
  return it->second.get();
}

Y
Yang Yu 已提交
75
VarDesc &BlockDesc::FindRecursiveOrCreateVar(const std::string &name_bytes) {
Y
Yu Yang 已提交
76
  VarDesc *res = FindVarRecursive(name_bytes);
Y
Yang Yang(Tony) 已提交
77 78 79
  if (res == nullptr) {
    res = Var(name_bytes);
  }
Y
Yang Yu 已提交
80
  return *res;
Y
Yang Yang(Tony) 已提交
81 82
}

Y
Yu Yang 已提交
83
bool BlockDesc::HasVarRecursive(const std::string &name) const {
84 85 86
  return FindVarRecursive(name) != nullptr;
}

Y
Yu Yang 已提交
87 88
std::vector<VarDesc *> BlockDesc::AllVars() const {
  std::vector<VarDesc *> res;
F
fengjiayi 已提交
89 90 91 92 93 94
  for (const auto &p : vars_) {
    res.push_back(p.second.get());
  }
  return res;
}

Y
Yu Yang 已提交
95
OpDesc *BlockDesc::AppendOp() {
F
fengjiayi 已提交
96
  need_update_ = true;
97
  ops_.emplace_back(new OpDesc(this));
F
fengjiayi 已提交
98 99 100
  return ops_.back().get();
}

Y
Yu Yang 已提交
101
void BlockDesc::AppendAllocatedOp(std::unique_ptr<OpDesc> &&op_desc) {
102 103 104 105
  need_update_ = true;
  ops_.emplace_back(std::move(op_desc));
}

Y
Yu Yang 已提交
106
OpDesc *BlockDesc::PrependOp() {
F
fengjiayi 已提交
107
  need_update_ = true;
108
  ops_.emplace_front(new OpDesc(this));
F
fengjiayi 已提交
109 110 111
  return ops_.front().get();
}

T
typhoonzero 已提交
112
void BlockDesc::RemoveOp(size_t s, size_t e) {
T
typhoonzero 已提交
113 114 115 116 117 118 119 120 121 122 123 124 125 126
  if (ops_.begin() + s == ops_.end() || ops_.begin() + e == ops_.end()) {
    return;
  }
  need_update_ = true;
  for (auto it = ops_.begin() + s; it != ops_.begin() + e; it++) {
    auto names = (*it)->InputArgumentNames();
    for (auto n : names) {
      // TODO(typhoonzero): delete vars if no other op use it.
      VLOG(3) << "deleting var " << n;
    }
  }
  ops_.erase(ops_.begin() + s, ops_.begin() + e);
}

Y
Yu Yang 已提交
127 128
std::vector<OpDesc *> BlockDesc::AllOps() const {
  std::vector<OpDesc *> res;
F
fengjiayi 已提交
129 130 131 132 133 134
  for (const auto &op : ops_) {
    res.push_back(op.get());
  }
  return res;
}

Y
Yu Yang 已提交
135
void BlockDesc::Flush() {
136 137 138 139
  for (auto &op_desc : ops_) {
    op_desc->Flush();
  }

F
fengjiayi 已提交
140 141
  if (need_update_) {
    auto &op_field = *this->desc_->mutable_ops();
142
    this->ClearPBOps();
F
fengjiayi 已提交
143 144 145 146
    op_field.Reserve(static_cast<int>(ops_.size()));
    for (auto &op_desc : ops_) {
      op_field.AddAllocated(op_desc->Proto());
    }
F
Fix bug  
fengjiayi 已提交
147
    auto &var_field = *this->desc_->mutable_vars();
148
    this->ClearPBVars();
F
Fix bug  
fengjiayi 已提交
149 150 151 152
    var_field.Reserve(static_cast<int>(vars_.size()));
    for (auto &var_desc : vars_) {
      var_field.AddAllocated(var_desc.second->Proto());
    }
F
fengjiayi 已提交
153 154 155 156
    need_update_ = false;
  }
}

Y
Yu Yang 已提交
157
BlockDesc *BlockDesc::ParentBlock() const {
158
  if (this->desc_->parent_idx() == kNoneBlockIndex) {
F
fengjiayi 已提交
159 160
    return nullptr;
  }
161
  return prog_->MutableBlock(static_cast<size_t>(this->desc_->parent_idx()));
F
fengjiayi 已提交
162 163
}

Y
Yu Yang 已提交
164
proto::BlockDesc *BlockDesc::Proto() {
165 166 167
  Flush();
  return desc_;
}
168

Y
Yu Yang 已提交
169
BlockDesc::BlockDesc(ProgramDesc *prog, proto::BlockDesc *desc)
170
    : prog_(prog), desc_(desc), need_update_(false) {
171
  for (const proto::VarDesc &var_desc : desc_->vars()) {
Y
Yu Yang 已提交
172
    vars_[var_desc.name()].reset(new VarDesc(var_desc));
173
  }
174
  for (const proto::OpDesc &op_desc : desc_->ops()) {
175
    ops_.emplace_back(new OpDesc(op_desc, prog, this));
176 177 178
  }
}

Y
Yu Yang 已提交
179 180
BlockDesc::BlockDesc(const BlockDesc &other, proto::BlockDesc *desc,
                     ProgramDesc *prog)
Y
Yu Yang 已提交
181 182 183
    : prog_(prog), desc_(desc) {
  need_update_ = true;
  for (auto &op : other.ops_) {
K
Kexin Zhao 已提交
184
    ops_.emplace_back(new OpDesc(*op->Proto(), prog, this));
Y
Yu Yang 已提交
185 186
  }
  for (auto &it : other.vars_) {
Y
Yu Yang 已提交
187
    auto *var = new VarDesc(*it.second);
Y
Yu Yang 已提交
188 189 190
    vars_[it.first].reset(var);
  }
}
191

Y
Yu Yang 已提交
192
void BlockDesc::ClearPBOps() {
193 194 195 196 197 198 199
  auto ops = this->desc_->mutable_ops();
  while (!ops->empty()) {
    // we do not own the OpDesc, so release the ownership.
    ops->ReleaseLast();
  }
}

Y
Yu Yang 已提交
200
void BlockDesc::ClearPBVars() {
201 202 203 204 205 206 207
  auto vars = this->desc_->mutable_vars();
  while (!vars->empty()) {
    // we do not own the VarDesc, so release the ownership.
    vars->ReleaseLast();
  }
}

F
fengjiayi 已提交
208 209
}  // namespace framework
}  // namespace paddle