scope.cc 8.9 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yi Wang 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/scope.h"
Y
Yang Yang 已提交
16

17
#include "glog/logging.h"
Y
Yi Wang 已提交
18
#include "paddle/fluid/framework/threadpool.h"
Y
Yi Wang 已提交
19

D
dzhwinter 已提交
20
DECLARE_bool(benchmark);
21

22
PADDLE_DEFINE_EXPORTED_bool(
Y
Yu Yang 已提交
23 24 25 26
    eager_delete_scope, true,
    "Delete local scope eagerly. It will reduce GPU memory usage but "
    "slow down the destruction of variables.(around 1% performance harm)");

27 28 29 30
// When in inference scenario, the scopes will not be written by two threads in
// a mean time, but a scope may be read by multiple threads concurrently, and
// the mutex will cause serious performance issue.
// So the mutex is disabled when `ON_INFER`.
31
#ifdef PADDLE_ON_INFERENCE
M
minqiyang 已提交
32 33 34 35
#define SCOPE_KIDS_READER_LOCK
#define SCOPE_KIDS_WRITER_LOCK
#define SCOPE_VARS_READER_LOCK
#define SCOPE_VARS_WRITER_LOCK
36
#else
37 38 39 40
#define SCOPE_KIDS_READER_LOCK phi::AutoRDLock auto_lock(&kids_lock_);
#define SCOPE_KIDS_WRITER_LOCK phi::AutoWRLock auto_lock(&kids_lock_);
#define SCOPE_VARS_READER_LOCK phi::AutoRDLock auto_lock(&vars_lock_);
#define SCOPE_VARS_WRITER_LOCK phi::AutoWRLock auto_lock(&vars_lock_);
41 42
#endif

Y
Yi Wang 已提交
43 44 45
namespace paddle {
namespace framework {

46
Scope::~Scope() { DropKids(); }
Y
Yi Wang 已提交
47

Y
Yu Yang 已提交
48
Scope& Scope::NewScope() const {
M
minqiyang 已提交
49 50 51 52 53 54
  Scope* child = new Scope(this);
  {
    SCOPE_KIDS_WRITER_LOCK
    kids_.push_back(child);
  }
  return *child;
Y
Yi Wang 已提交
55 56
}

57 58 59
std::unique_ptr<Scope> Scope::NewTmpScope() const {
  return std::unique_ptr<Scope>(new Scope(this));
}
Q
Qiao Longfei 已提交
60

D
dongzhihong 已提交
61
Variable* Scope::Var(const std::string& name) {
62 63 64 65 66 67 68 69
  // NOTE(xiongkun03): add {} here to unlock. With {}, scope
  // will do callback after unlock.
  Variable* ret = nullptr;
  {
    SCOPE_VARS_WRITER_LOCK
    ret = VarInternal(name);
  }
  for (auto l : listeners_) {
70
    l->onCreateVariable(name, ret);
71 72
  }
  return ret;
Y
Yi Wang 已提交
73 74
}

Y
Yu Yang 已提交
75
Variable* Scope::Var(std::string* name) {
76 77 78 79 80 81 82 83 84 85 86 87
  Variable* ret = nullptr;
  std::string new_name;
  {
    SCOPE_VARS_WRITER_LOCK
    new_name = std::to_string(reinterpret_cast<uintptr_t>(this)) + "." +
               std::to_string(vars_.size());
    if (name != nullptr) {
      *name = new_name;
    }
    ret = VarInternal(new_name);
  }
  for (auto l : listeners_) {
88
    l->onCreateVariable(new_name, ret);
Y
Yu Yang 已提交
89
  }
90
  return ret;
Y
Yi Wang 已提交
91 92 93
}

Variable* Scope::FindVar(const std::string& name) const {
M
minqiyang 已提交
94
  SCOPE_VARS_READER_LOCK
T
tensor-tang 已提交
95 96 97
  return FindVarInternal(name);
}

98 99 100 101 102 103 104
Variable* Scope::GetVar(const std::string& name) const {
  auto* var = FindVar(name);
  PADDLE_ENFORCE_NOT_NULL(
      var, platform::errors::NotFound("Cannot find %s in scope.", name));
  return var;
}

S
sneaxiy 已提交
105
Variable* Scope::FindLocalVar(const std::string& name) const {
M
minqiyang 已提交
106
  SCOPE_VARS_READER_LOCK
S
sneaxiy 已提交
107 108 109
  return FindVarLocally(name);
}

Y
Yu Yang 已提交
110
const Scope* Scope::FindScope(const Variable* var) const {
M
minqiyang 已提交
111
  SCOPE_VARS_READER_LOCK
T
tensor-tang 已提交
112
  return FindScopeInternal(var);
Y
Yi Wang 已提交
113
}
T
tensor-tang 已提交
114

115 116 117 118 119
const Scope* Scope::FindScope(const std::string& name) const {
  SCOPE_VARS_READER_LOCK
  return FindScopeInternal(name);
}

Y
Yu Yang 已提交
120
void Scope::DropKids() {
121 122 123 124 125 126 127 128
  {
    SCOPE_KIDS_WRITER_LOCK
    for (Scope* s : kids_) delete s;
    kids_.clear();
  }
  for (auto l : listeners_) {
    l->onClear();
  }
Y
Yu Yang 已提交
129
}
Y
Yi Wang 已提交
130

M
minqiyang 已提交
131
bool Scope::HasKid(const Scope* scope) const {
M
minqiyang 已提交
132
  SCOPE_KIDS_READER_LOCK
M
minqiyang 已提交
133 134 135 136
  auto it = std::find(this->kids_.begin(), this->kids_.end(), scope);
  return it != this->kids_.end();
}

Y
Yang Yu 已提交
137 138
std::vector<std::string> Scope::LocalVarNames() const {
  std::vector<std::string> known_vars;
M
minqiyang 已提交
139 140 141 142 143 144
  {
    SCOPE_VARS_READER_LOCK
    known_vars.reserve(this->vars_.size());
    for (auto& p : vars_) {
      known_vars.emplace_back(p.first);
    }
145 146 147 148
  }
  return known_vars;
}

149 150 151 152 153 154 155 156 157 158 159 160
std::vector<Variable*> Scope::LocalVars() {
  std::vector<Variable*> known_vars;
  {
    SCOPE_VARS_READER_LOCK
    known_vars.reserve(this->vars_.size());
    for (auto& p : vars_) {
      known_vars.emplace_back(p.second.get());
    }
  }
  return known_vars;
}

161
void Scope::DeleteScope(Scope* scope) const {
162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177
  {
    SCOPE_KIDS_WRITER_LOCK
    auto it = std::find(this->kids_.begin(), this->kids_.end(), scope);
    PADDLE_ENFORCE_NE(it, this->kids_.end(),
                      platform::errors::NotFound(
                          "%p is not found in %p as kid scope", scope, this));
    this->kids_.erase(it);
    // When making memory benchmark on Fluid, we have to delete scope sync.
    if (FLAGS_benchmark || FLAGS_eager_delete_scope) {
      delete scope;
    } else {
      Async([scope] { delete scope; });
    }
  }
  for (auto l : listeners_) {
    l->onDeleteScope(scope);
178
  }
Y
Yu Yang 已提交
179 180
}

Y
Yancey1989 已提交
181
void Scope::EraseVars(const std::vector<std::string>& var_names) {
182 183 184 185 186 187 188 189 190 191 192 193 194 195
  {
    std::set<std::string> var_set(var_names.begin(), var_names.end());
    SCOPE_VARS_WRITER_LOCK
    for (auto it = vars_.begin(); it != vars_.end();) {
      if (var_set.find(it->first) != var_set.end()) {
        it = vars_.erase(it);
      } else {
        ++it;
      }
    }
  }
  for (auto l : listeners_) {
    for (auto& var_name : var_names) {
      l->onDeleteVariable(var_name);
196 197 198 199
    }
  }
}

Y
Yu Yang 已提交
200 201
void Scope::Rename(const std::string& origin_name,
                   const std::string& new_name) const {
202 203 204 205 206 207 208
  {
    SCOPE_VARS_WRITER_LOCK
    RenameInternal(origin_name, new_name);
  }
  for (auto l : listeners_) {
    l->onRenameVariable(origin_name, new_name);
  }
T
tensor-tang 已提交
209 210 211 212
}

std::string Scope::Rename(const std::string& origin_name) const {
  auto new_name = string::Sprintf("%p.%d", this, vars_.size());
213 214 215 216 217 218 219
  {
    SCOPE_VARS_WRITER_LOCK
    RenameInternal(origin_name, new_name);
  }
  for (auto l : listeners_) {
    l->onRenameVariable(origin_name, new_name);
  }
T
tensor-tang 已提交
220 221 222 223 224 225 226
  return new_name;
}

Variable* Scope::VarInternal(const std::string& name) {
  auto* v = FindVarLocally(name);
  if (v != nullptr) return v;
  v = new Variable();
S
sneaxiy 已提交
227
  vars_.emplace(name, std::unique_ptr<Variable>(v));
M
minqiyang 已提交
228
  VLOG(3) << "Create variable " << name;
T
tensor-tang 已提交
229 230 231 232 233 234 235 236 237 238 239 240
  return v;
}

const Scope* Scope::FindScopeInternal(const Variable* var) const {
  for (auto& kv : vars_) {
    if (kv.second.get() == var) {
      return this;
    }
  }
  return (parent_ == nullptr) ? nullptr : parent_->FindScope(var);
}

241 242 243 244 245 246 247
const Scope* Scope::FindScopeInternal(const std::string& name) const {
  if (vars_.find(name) != vars_.end()) {
    return this;
  }
  return (parent_ == nullptr) ? nullptr : parent_->FindScope(name);
}

T
tensor-tang 已提交
248 249
void Scope::RenameInternal(const std::string& origin_name,
                           const std::string& new_name) const {
Y
Yu Yang 已提交
250
  auto origin_it = vars_.find(origin_name);
251 252 253 254 255
  PADDLE_ENFORCE_NE(
      origin_it, vars_.end(),
      platform::errors::NotFound(
          "Original variable with name %s is not found in the scope.",
          origin_name));
Y
Yu Yang 已提交
256
  auto new_it = vars_.find(new_name);
257 258 259 260
  PADDLE_ENFORCE_EQ(
      new_it, vars_.end(),
      platform::errors::AlreadyExists(
          "The variable with name %s already exists in the scope.", new_name));
261
  vars_[new_name].reset(origin_it->second.release());
Y
Yu Yang 已提交
262 263 264
  vars_.erase(origin_it);
}

T
tensor-tang 已提交
265 266 267 268 269 270
Variable* Scope::FindVarInternal(const std::string& name) const {
  auto var = FindVarLocally(name);
  if (var != nullptr) {
    return var;
  }
  return (parent_ == nullptr) ? nullptr : parent_->FindVar(name);
Y
Yu Yang 已提交
271
}
272

273 274
Variable* Scope::FindVarLocally(const std::string& name) const {
  auto it = vars_.find(name);
275 276 277
  if (it != vars_.end()) {
    return it->second.get();
  }
278 279
  return nullptr;
}
Y
Yu Yang 已提交
280

L
Leo Chen 已提交
281
void Scope::AddListener(const std::shared_ptr<ScopeListener>& listener) {
282 283 284 285 286 287
  auto it = std::find(listeners_.begin(), listeners_.end(), listener);
  if (it == listeners_.end()) {
    listeners_.push_back(listener);
  }
}

L
Leo Chen 已提交
288
void Scope::DelListener(const std::shared_ptr<ScopeListener>& listener) {
289 290 291
  listeners_.remove(listener);
}

292 293 294 295 296 297 298 299 300 301 302
void Scope::EraseVarsExcept(const std::unordered_set<Variable*>& vars) {
  SCOPE_VARS_WRITER_LOCK
  for (auto iter = vars_.begin(); iter != vars_.end();) {
    if (vars.count(iter->second.get()) != 0) {
      ++iter;
    } else {
      vars_.erase(iter++);
    }
  }
}

303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343
std::string GenScopeTreeDebugInfo(Scope* root) {
  std::stringstream os;

  if (!root) return "";

  // level traversal
  std::queue<Scope*> queue;
  queue.push(root);

  std::vector<Scope*> scopes;

  while (!queue.empty()) {
    auto* end = queue.back();
    Scope* q = nullptr;
    while (q != end) {
      q = queue.front();
      queue.pop();
      os << q << " ";
      scopes.push_back(q);

      for (auto* c : q->kids()) {
        queue.push(c);
      }
    }
    // end of a level
    os << "\n------------------------------------------\n";
  }

  os << "\nDetails:\n\n";

  for (Scope* q : scopes) {
    os << "====\n";
    os << q << ":\n";
    for (auto& var : q->LocalVarNames()) {
      os << "  - " << var << "\n";
    }
  }

  return os.str();
}

Y
Yi Wang 已提交
344 345
}  // namespace framework
}  // namespace paddle