block_desc.cc 5.7 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "paddle/framework/block_desc.h"
16
#include "paddle/framework/operator.h"
F
fengjiayi 已提交
17
#include "paddle/framework/program_desc.h"
F
fengjiayi 已提交
18 19 20 21

namespace paddle {
namespace framework {

Y
Yu Yang 已提交
22
VarDesc *BlockDesc::Var(const std::string &name) {
F
fengjiayi 已提交
23
  auto it = vars_.find(name);
D
Dong Zhihong 已提交
24
  if (it != vars_.end()) {
D
Dong Zhihong 已提交
25
    return it->second.get();
D
Dong Zhihong 已提交
26
  }
27
  need_update_ = true;
Y
Yu Yang 已提交
28
  auto *var = new VarDesc(name);
F
fengjiayi 已提交
29 30 31 32
  vars_[name].reset(var);
  return var;
}

Y
Yu Yang 已提交
33
VarDesc *BlockDesc::FindVar(const std::string &name) const {
F
fengjiayi 已提交
34
  auto it = vars_.find(name);
D
Dong Zhihong 已提交
35 36 37
  if (it == vars_.end()) {
    return nullptr;
  }
F
fengjiayi 已提交
38 39 40
  return it->second.get();
}

Y
Yu Yang 已提交
41
bool BlockDesc::HasVar(const std::string &name) const {
Q
qiaolongfei 已提交
42
  return vars_.find(name) != vars_.end();
Q
qiaolongfei 已提交
43 44
}

T
typhoonzero 已提交
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68
void BlockDesc::RenameVar(const std::string &old_name,
                          const std::string &new_name) {
  if (this->HasVar(old_name)) {
    auto *var = this->Var(old_name);
    var->SetName(new_name);
    vars_[new_name].reset(var);
    vars_.erase(old_name);
    // rename inputs and outputs
    for (const auto &op : ops_) {
      auto *it = op.get();
      for (auto in_name : it->InputArgumentNames()) {
        if (in_name == old_name) {
          it->RenameInput(old_name, new_name);
        }
      }
      for (auto out_name : it->OutputArgumentNames()) {
        if (out_name == old_name) {
          it->RenameOutput(old_name, new_name);
        }
      }
    }
  }
}

Y
Yu Yang 已提交
69
VarDesc *BlockDesc::FindVarRecursive(const std::string &name) const {
70 71
  if (name == kEmptyVarName) return nullptr;

72 73 74 75 76 77 78 79
  auto it = vars_.find(name);
  if (it == vars_.end()) {
    return Parent() == kNoneBlockIndex ? nullptr
                                       : ParentBlock()->FindVarRecursive(name);
  }
  return it->second.get();
}

Y
Yang Yu 已提交
80
VarDesc &BlockDesc::FindRecursiveOrCreateVar(const std::string &name_bytes) {
Y
Yu Yang 已提交
81
  VarDesc *res = FindVarRecursive(name_bytes);
Y
Yang Yang(Tony) 已提交
82 83 84
  if (res == nullptr) {
    res = Var(name_bytes);
  }
Y
Yang Yu 已提交
85
  return *res;
Y
Yang Yang(Tony) 已提交
86 87
}

Y
Yu Yang 已提交
88
bool BlockDesc::HasVarRecursive(const std::string &name) const {
89 90 91
  return FindVarRecursive(name) != nullptr;
}

Y
Yu Yang 已提交
92 93
std::vector<VarDesc *> BlockDesc::AllVars() const {
  std::vector<VarDesc *> res;
F
fengjiayi 已提交
94 95 96 97 98 99
  for (const auto &p : vars_) {
    res.push_back(p.second.get());
  }
  return res;
}

Y
Yu Yang 已提交
100
OpDesc *BlockDesc::AppendOp() {
F
fengjiayi 已提交
101
  need_update_ = true;
102
  ops_.emplace_back(new OpDesc(this));
F
fengjiayi 已提交
103 104 105
  return ops_.back().get();
}

Y
Yu Yang 已提交
106
void BlockDesc::AppendAllocatedOp(std::unique_ptr<OpDesc> &&op_desc) {
107 108 109 110
  need_update_ = true;
  ops_.emplace_back(std::move(op_desc));
}

Y
Yu Yang 已提交
111
OpDesc *BlockDesc::PrependOp() {
F
fengjiayi 已提交
112
  need_update_ = true;
113
  ops_.emplace_front(new OpDesc(this));
F
fengjiayi 已提交
114 115 116
  return ops_.front().get();
}

T
typhoonzero 已提交
117
void BlockDesc::RemoveOp(size_t s, size_t e) {
T
typhoonzero 已提交
118 119 120 121 122 123 124 125 126 127 128 129 130 131
  if (ops_.begin() + s == ops_.end() || ops_.begin() + e == ops_.end()) {
    return;
  }
  need_update_ = true;
  for (auto it = ops_.begin() + s; it != ops_.begin() + e; it++) {
    auto names = (*it)->InputArgumentNames();
    for (auto n : names) {
      // TODO(typhoonzero): delete vars if no other op use it.
      VLOG(3) << "deleting var " << n;
    }
  }
  ops_.erase(ops_.begin() + s, ops_.begin() + e);
}

Y
Yu Yang 已提交
132 133
std::vector<OpDesc *> BlockDesc::AllOps() const {
  std::vector<OpDesc *> res;
F
fengjiayi 已提交
134 135 136 137 138 139
  for (const auto &op : ops_) {
    res.push_back(op.get());
  }
  return res;
}

Y
Yu Yang 已提交
140
void BlockDesc::Flush() {
141 142 143 144
  for (auto &op_desc : ops_) {
    op_desc->Flush();
  }

F
fengjiayi 已提交
145 146
  if (need_update_) {
    auto &op_field = *this->desc_->mutable_ops();
147
    this->ClearPBOps();
F
fengjiayi 已提交
148 149 150 151
    op_field.Reserve(static_cast<int>(ops_.size()));
    for (auto &op_desc : ops_) {
      op_field.AddAllocated(op_desc->Proto());
    }
F
Fix bug  
fengjiayi 已提交
152
    auto &var_field = *this->desc_->mutable_vars();
153
    this->ClearPBVars();
F
Fix bug  
fengjiayi 已提交
154 155 156 157
    var_field.Reserve(static_cast<int>(vars_.size()));
    for (auto &var_desc : vars_) {
      var_field.AddAllocated(var_desc.second->Proto());
    }
F
fengjiayi 已提交
158 159 160 161
    need_update_ = false;
  }
}

Y
Yu Yang 已提交
162
BlockDesc *BlockDesc::ParentBlock() const {
163
  if (this->desc_->parent_idx() == kNoneBlockIndex) {
F
fengjiayi 已提交
164 165
    return nullptr;
  }
166
  return prog_->MutableBlock(static_cast<size_t>(this->desc_->parent_idx()));
F
fengjiayi 已提交
167 168
}

Y
Yu Yang 已提交
169
proto::BlockDesc *BlockDesc::Proto() {
170 171 172
  Flush();
  return desc_;
}
173

Y
Yu Yang 已提交
174
BlockDesc::BlockDesc(ProgramDesc *prog, proto::BlockDesc *desc)
175
    : prog_(prog), desc_(desc), need_update_(false) {
176
  for (const proto::VarDesc &var_desc : desc_->vars()) {
Y
Yu Yang 已提交
177
    vars_[var_desc.name()].reset(new VarDesc(var_desc));
178
  }
179
  for (const proto::OpDesc &op_desc : desc_->ops()) {
180
    ops_.emplace_back(new OpDesc(op_desc, prog, this));
181 182 183
  }
}

Y
Yu Yang 已提交
184 185
BlockDesc::BlockDesc(const BlockDesc &other, proto::BlockDesc *desc,
                     ProgramDesc *prog)
Y
Yu Yang 已提交
186 187 188
    : prog_(prog), desc_(desc) {
  need_update_ = true;
  for (auto &op : other.ops_) {
K
Kexin Zhao 已提交
189
    ops_.emplace_back(new OpDesc(*op->Proto(), prog, this));
Y
Yu Yang 已提交
190 191
  }
  for (auto &it : other.vars_) {
Y
Yu Yang 已提交
192
    auto *var = new VarDesc(*it.second);
Y
Yu Yang 已提交
193 194 195
    vars_[it.first].reset(var);
  }
}
196

Y
Yu Yang 已提交
197
void BlockDesc::ClearPBOps() {
198 199 200 201 202 203 204
  auto ops = this->desc_->mutable_ops();
  while (!ops->empty()) {
    // we do not own the OpDesc, so release the ownership.
    ops->ReleaseLast();
  }
}

Y
Yu Yang 已提交
205
void BlockDesc::ClearPBVars() {
206 207 208 209 210 211 212
  auto vars = this->desc_->mutable_vars();
  while (!vars->empty()) {
    // we do not own the VarDesc, so release the ownership.
    vars->ReleaseLast();
  }
}

F
fengjiayi 已提交
213 214
}  // namespace framework
}  // namespace paddle