block_desc.cc 5.7 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15 16 17
#include "paddle/fluid/framework/block_desc.h"
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/program_desc.h"
F
fengjiayi 已提交
18

Y
Yu Yang 已提交
19 20
#include <queue>

F
fengjiayi 已提交
21 22 23
namespace paddle {
namespace framework {

Y
Yu Yang 已提交
24
VarDesc *BlockDesc::Var(const std::string &name) {
F
fengjiayi 已提交
25
  auto it = vars_.find(name);
D
Dong Zhihong 已提交
26
  if (it != vars_.end()) {
D
Dong Zhihong 已提交
27
    return it->second.get();
D
Dong Zhihong 已提交
28
  }
29
  need_update_ = true;
Y
Yu Yang 已提交
30
  auto *var = new VarDesc(name);
F
fengjiayi 已提交
31 32 33 34
  vars_[name].reset(var);
  return var;
}

Y
Yu Yang 已提交
35
VarDesc *BlockDesc::FindVar(const std::string &name) const {
F
fengjiayi 已提交
36
  auto it = vars_.find(name);
D
Dong Zhihong 已提交
37 38 39
  if (it == vars_.end()) {
    return nullptr;
  }
F
fengjiayi 已提交
40 41 42
  return it->second.get();
}

Y
Yu Yang 已提交
43
bool BlockDesc::HasVar(const std::string &name) const {
Q
qiaolongfei 已提交
44
  return vars_.find(name) != vars_.end();
Q
qiaolongfei 已提交
45 46
}

Y
Yu Yang 已提交
47
VarDesc *BlockDesc::FindVarRecursive(const std::string &name) const {
48 49
  if (name == kEmptyVarName) return nullptr;

Y
Yu Yang 已提交
50 51
  std::queue<const BlockDesc *> frontier;
  std::unordered_set<const BlockDesc *> visited;
Y
Yu Yang 已提交
52

Y
Yu Yang 已提交
53
  frontier.push(this);
Y
Yu Yang 已提交
54

Y
Yu Yang 已提交
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
  while (!frontier.empty()) {  // BFS
    auto cur = frontier.front();
    frontier.pop();
    if (visited.count(cur) != 0) {
      continue;
    }
    auto var = cur->FindVar(name);
    if (var != nullptr) {
      return var;
    }

    auto fwd = cur->ForwardBlock();
    auto parent = cur->ParentBlock();

    if (fwd != nullptr) {
      frontier.push(fwd);
    }
    if (parent != nullptr) {
      frontier.push(parent);
Y
Yu Yang 已提交
74 75
    }

Y
Yu Yang 已提交
76
    visited.insert(cur);
Y
Yu Yang 已提交
77 78 79
  }

  return nullptr;
80 81
}

Y
Yang Yu 已提交
82
VarDesc &BlockDesc::FindRecursiveOrCreateVar(const std::string &name_bytes) {
Y
Yu Yang 已提交
83
  VarDesc *res = FindVarRecursive(name_bytes);
Y
Yang Yang(Tony) 已提交
84 85 86
  if (res == nullptr) {
    res = Var(name_bytes);
  }
Y
Yang Yu 已提交
87
  return *res;
Y
Yang Yang(Tony) 已提交
88 89
}

Y
Yu Yang 已提交
90
bool BlockDesc::HasVarRecursive(const std::string &name) const {
91 92 93
  return FindVarRecursive(name) != nullptr;
}

Y
Yu Yang 已提交
94 95
std::vector<VarDesc *> BlockDesc::AllVars() const {
  std::vector<VarDesc *> res;
F
fengjiayi 已提交
96 97 98 99 100 101
  for (const auto &p : vars_) {
    res.push_back(p.second.get());
  }
  return res;
}

Y
Yu Yang 已提交
102
OpDesc *BlockDesc::AppendOp() {
F
fengjiayi 已提交
103
  need_update_ = true;
104
  ops_.emplace_back(new OpDesc(this));
F
fengjiayi 已提交
105 106 107
  return ops_.back().get();
}

Y
Yu Yang 已提交
108
void BlockDesc::AppendAllocatedOp(std::unique_ptr<OpDesc> &&op_desc) {
109 110 111 112
  need_update_ = true;
  ops_.emplace_back(std::move(op_desc));
}

Y
Yu Yang 已提交
113
OpDesc *BlockDesc::PrependOp() {
F
fengjiayi 已提交
114
  need_update_ = true;
115
  ops_.emplace_front(new OpDesc(this));
F
fengjiayi 已提交
116 117 118
  return ops_.front().get();
}

T
typhoonzero 已提交
119
void BlockDesc::RemoveOp(size_t s, size_t e) {
T
typhoonzero 已提交
120 121 122 123 124 125 126 127 128 129 130 131 132 133
  if (ops_.begin() + s == ops_.end() || ops_.begin() + e == ops_.end()) {
    return;
  }
  need_update_ = true;
  for (auto it = ops_.begin() + s; it != ops_.begin() + e; it++) {
    auto names = (*it)->InputArgumentNames();
    for (auto n : names) {
      // TODO(typhoonzero): delete vars if no other op use it.
      VLOG(3) << "deleting var " << n;
    }
  }
  ops_.erase(ops_.begin() + s, ops_.begin() + e);
}

Y
Yu Yang 已提交
134 135
std::vector<OpDesc *> BlockDesc::AllOps() const {
  std::vector<OpDesc *> res;
F
fengjiayi 已提交
136 137 138 139 140 141
  for (const auto &op : ops_) {
    res.push_back(op.get());
  }
  return res;
}

Y
Yu Yang 已提交
142
void BlockDesc::Flush() {
143 144 145 146
  for (auto &op_desc : ops_) {
    op_desc->Flush();
  }

F
fengjiayi 已提交
147 148
  if (need_update_) {
    auto &op_field = *this->desc_->mutable_ops();
149
    this->ClearPBOps();
F
fengjiayi 已提交
150 151 152 153
    op_field.Reserve(static_cast<int>(ops_.size()));
    for (auto &op_desc : ops_) {
      op_field.AddAllocated(op_desc->Proto());
    }
F
Fix bug  
fengjiayi 已提交
154
    auto &var_field = *this->desc_->mutable_vars();
155
    this->ClearPBVars();
F
Fix bug  
fengjiayi 已提交
156 157 158 159
    var_field.Reserve(static_cast<int>(vars_.size()));
    for (auto &var_desc : vars_) {
      var_field.AddAllocated(var_desc.second->Proto());
    }
F
fengjiayi 已提交
160 161 162 163
    need_update_ = false;
  }
}

Y
Yu Yang 已提交
164
BlockDesc *BlockDesc::ParentBlock() const {
Y
Yu Yang 已提交
165
  return prog_->MutableBlock(static_cast<size_t>(desc_->parent_idx()));
F
fengjiayi 已提交
166 167
}

Y
Yu Yang 已提交
168
proto::BlockDesc *BlockDesc::Proto() {
169 170 171
  Flush();
  return desc_;
}
172

Y
Yu Yang 已提交
173
BlockDesc::BlockDesc(ProgramDesc *prog, proto::BlockDesc *desc)
174
    : prog_(prog), desc_(desc), need_update_(false) {
175
  for (const proto::VarDesc &var_desc : desc_->vars()) {
Y
Yu Yang 已提交
176
    vars_[var_desc.name()].reset(new VarDesc(var_desc));
177
  }
178
  for (const proto::OpDesc &op_desc : desc_->ops()) {
179
    ops_.emplace_back(new OpDesc(op_desc, prog, this));
180 181 182
  }
}

Y
Yu Yang 已提交
183 184
BlockDesc::BlockDesc(const BlockDesc &other, proto::BlockDesc *desc,
                     ProgramDesc *prog)
Y
Yu Yang 已提交
185 186 187
    : prog_(prog), desc_(desc) {
  need_update_ = true;
  for (auto &op : other.ops_) {
K
Kexin Zhao 已提交
188
    ops_.emplace_back(new OpDesc(*op->Proto(), prog, this));
Y
Yu Yang 已提交
189 190
  }
  for (auto &it : other.vars_) {
Y
Yu Yang 已提交
191
    auto *var = new VarDesc(*it.second);
Y
Yu Yang 已提交
192 193 194
    vars_[it.first].reset(var);
  }
}
195

Y
Yu Yang 已提交
196
void BlockDesc::ClearPBOps() {
197 198 199 200 201 202 203
  auto ops = this->desc_->mutable_ops();
  while (!ops->empty()) {
    // we do not own the OpDesc, so release the ownership.
    ops->ReleaseLast();
  }
}

Y
Yu Yang 已提交
204
void BlockDesc::ClearPBVars() {
205 206 207 208 209 210 211
  auto vars = this->desc_->mutable_vars();
  while (!vars->empty()) {
    // we do not own the VarDesc, so release the ownership.
    vars->ReleaseLast();
  }
}

Y
Yu Yang 已提交
212 213 214 215 216 217 218 219 220 221 222
void BlockDesc::SetForwardBlockID(int32_t forward_block_id) {
  PADDLE_ENFORCE(!desc_->has_forward_block_idx(),
                 "Parent block ID has been set to %d. Cannot set to %d",
                 desc_->forward_block_idx(), forward_block_id);
  desc_->set_forward_block_idx(forward_block_id);
}

BlockDesc *BlockDesc::ForwardBlock() const {
  return prog_->MutableBlock(static_cast<size_t>(desc_->forward_block_idx()));
}

F
fengjiayi 已提交
223 224
}  // namespace framework
}  // namespace paddle