scope.cc 9.1 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yi Wang 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/scope.h"
Y
Yang Yang 已提交
16

17
#include "glog/logging.h"
Y
Yi Wang 已提交
18
#include "paddle/fluid/framework/threadpool.h"
Y
Yi Wang 已提交
19

D
dzhwinter 已提交
20
DECLARE_bool(benchmark);
21

22
PADDLE_DEFINE_EXPORTED_bool(
23 24
    eager_delete_scope,
    true,
Y
Yu Yang 已提交
25 26 27
    "Delete local scope eagerly. It will reduce GPU memory usage but "
    "slow down the destruction of variables.(around 1% performance harm)");

28 29 30 31
// When in inference scenario, the scopes will not be written by two threads in
// a mean time, but a scope may be read by multiple threads concurrently, and
// the mutex will cause serious performance issue.
// So the mutex is disabled when `ON_INFER`.
32
#ifdef PADDLE_ON_INFERENCE
M
minqiyang 已提交
33 34 35 36
#define SCOPE_KIDS_READER_LOCK
#define SCOPE_KIDS_WRITER_LOCK
#define SCOPE_VARS_READER_LOCK
#define SCOPE_VARS_WRITER_LOCK
37
#else
38 39 40 41
#define SCOPE_KIDS_READER_LOCK phi::AutoRDLock auto_lock(&kids_lock_);
#define SCOPE_KIDS_WRITER_LOCK phi::AutoWRLock auto_lock(&kids_lock_);
#define SCOPE_VARS_READER_LOCK phi::AutoRDLock auto_lock(&vars_lock_);
#define SCOPE_VARS_WRITER_LOCK phi::AutoWRLock auto_lock(&vars_lock_);
42 43
#endif

Y
Yi Wang 已提交
44 45 46
namespace paddle {
namespace framework {

47
Scope::~Scope() { DropKids(); }
Y
Yi Wang 已提交
48

Y
Yu Yang 已提交
49
Scope& Scope::NewScope() const {
M
minqiyang 已提交
50 51 52 53 54 55
  Scope* child = new Scope(this);
  {
    SCOPE_KIDS_WRITER_LOCK
    kids_.push_back(child);
  }
  return *child;
Y
Yi Wang 已提交
56 57
}

58 59 60
std::unique_ptr<Scope> Scope::NewTmpScope() const {
  return std::unique_ptr<Scope>(new Scope(this));
}
Q
Qiao Longfei 已提交
61

D
dongzhihong 已提交
62
Variable* Scope::Var(const std::string& name) {
63 64 65 66 67 68 69 70
  // NOTE(xiongkun03): add {} here to unlock. With {}, scope
  // will do callback after unlock.
  Variable* ret = nullptr;
  {
    SCOPE_VARS_WRITER_LOCK
    ret = VarInternal(name);
  }
  for (auto l : listeners_) {
71
    l->onCreateVariable(name, ret);
72 73
  }
  return ret;
Y
Yi Wang 已提交
74 75
}

Y
Yu Yang 已提交
76
Variable* Scope::Var(std::string* name) {
77 78 79 80 81 82 83 84 85 86 87 88
  Variable* ret = nullptr;
  std::string new_name;
  {
    SCOPE_VARS_WRITER_LOCK
    new_name = std::to_string(reinterpret_cast<uintptr_t>(this)) + "." +
               std::to_string(vars_.size());
    if (name != nullptr) {
      *name = new_name;
    }
    ret = VarInternal(new_name);
  }
  for (auto l : listeners_) {
89
    l->onCreateVariable(new_name, ret);
Y
Yu Yang 已提交
90
  }
91
  return ret;
Y
Yi Wang 已提交
92 93 94
}

Variable* Scope::FindVar(const std::string& name) const {
M
minqiyang 已提交
95
  SCOPE_VARS_READER_LOCK
T
tensor-tang 已提交
96 97 98
  return FindVarInternal(name);
}

99 100 101 102 103 104 105
Variable* Scope::GetVar(const std::string& name) const {
  auto* var = FindVar(name);
  PADDLE_ENFORCE_NOT_NULL(
      var, platform::errors::NotFound("Cannot find %s in scope.", name));
  return var;
}

S
sneaxiy 已提交
106
Variable* Scope::FindLocalVar(const std::string& name) const {
M
minqiyang 已提交
107
  SCOPE_VARS_READER_LOCK
S
sneaxiy 已提交
108 109 110
  return FindVarLocally(name);
}

Y
Yu Yang 已提交
111
const Scope* Scope::FindScope(const Variable* var) const {
M
minqiyang 已提交
112
  SCOPE_VARS_READER_LOCK
T
tensor-tang 已提交
113
  return FindScopeInternal(var);
Y
Yi Wang 已提交
114
}
T
tensor-tang 已提交
115

116 117 118 119 120
const Scope* Scope::FindScope(const std::string& name) const {
  SCOPE_VARS_READER_LOCK
  return FindScopeInternal(name);
}

Y
Yu Yang 已提交
121
void Scope::DropKids() {
122 123 124 125 126 127 128 129
  {
    SCOPE_KIDS_WRITER_LOCK
    for (Scope* s : kids_) delete s;
    kids_.clear();
  }
  for (auto l : listeners_) {
    l->onClear();
  }
Y
Yu Yang 已提交
130
}
Y
Yi Wang 已提交
131

M
minqiyang 已提交
132
bool Scope::HasKid(const Scope* scope) const {
M
minqiyang 已提交
133
  SCOPE_KIDS_READER_LOCK
M
minqiyang 已提交
134 135 136 137
  auto it = std::find(this->kids_.begin(), this->kids_.end(), scope);
  return it != this->kids_.end();
}

Y
Yang Yu 已提交
138 139
std::vector<std::string> Scope::LocalVarNames() const {
  std::vector<std::string> known_vars;
M
minqiyang 已提交
140 141 142 143 144 145
  {
    SCOPE_VARS_READER_LOCK
    known_vars.reserve(this->vars_.size());
    for (auto& p : vars_) {
      known_vars.emplace_back(p.first);
    }
146 147 148 149
  }
  return known_vars;
}

150 151 152 153 154 155 156 157 158 159 160 161
std::vector<Variable*> Scope::LocalVars() {
  std::vector<Variable*> known_vars;
  {
    SCOPE_VARS_READER_LOCK
    known_vars.reserve(this->vars_.size());
    for (auto& p : vars_) {
      known_vars.emplace_back(p.second.get());
    }
  }
  return known_vars;
}

162
void Scope::DeleteScope(Scope* scope) const {
163 164 165
  {
    SCOPE_KIDS_WRITER_LOCK
    auto it = std::find(this->kids_.begin(), this->kids_.end(), scope);
166 167
    PADDLE_ENFORCE_NE(it,
                      this->kids_.end(),
168 169 170 171 172 173 174 175 176 177 178 179
                      platform::errors::NotFound(
                          "%p is not found in %p as kid scope", scope, this));
    this->kids_.erase(it);
    // When making memory benchmark on Fluid, we have to delete scope sync.
    if (FLAGS_benchmark || FLAGS_eager_delete_scope) {
      delete scope;
    } else {
      Async([scope] { delete scope; });
    }
  }
  for (auto l : listeners_) {
    l->onDeleteScope(scope);
180
  }
Y
Yu Yang 已提交
181 182
}

Y
Yancey1989 已提交
183
void Scope::EraseVars(const std::vector<std::string>& var_names) {
184 185 186 187 188 189 190 191 192 193 194 195 196 197
  {
    std::set<std::string> var_set(var_names.begin(), var_names.end());
    SCOPE_VARS_WRITER_LOCK
    for (auto it = vars_.begin(); it != vars_.end();) {
      if (var_set.find(it->first) != var_set.end()) {
        it = vars_.erase(it);
      } else {
        ++it;
      }
    }
  }
  for (auto l : listeners_) {
    for (auto& var_name : var_names) {
      l->onDeleteVariable(var_name);
198 199 200 201
    }
  }
}

Y
Yu Yang 已提交
202 203
void Scope::Rename(const std::string& origin_name,
                   const std::string& new_name) const {
204 205 206 207 208 209 210
  {
    SCOPE_VARS_WRITER_LOCK
    RenameInternal(origin_name, new_name);
  }
  for (auto l : listeners_) {
    l->onRenameVariable(origin_name, new_name);
  }
T
tensor-tang 已提交
211 212 213 214
}

std::string Scope::Rename(const std::string& origin_name) const {
  auto new_name = string::Sprintf("%p.%d", this, vars_.size());
215 216 217 218 219 220 221
  {
    SCOPE_VARS_WRITER_LOCK
    RenameInternal(origin_name, new_name);
  }
  for (auto l : listeners_) {
    l->onRenameVariable(origin_name, new_name);
  }
T
tensor-tang 已提交
222 223 224 225 226 227 228
  return new_name;
}

Variable* Scope::VarInternal(const std::string& name) {
  auto* v = FindVarLocally(name);
  if (v != nullptr) return v;
  v = new Variable();
S
sneaxiy 已提交
229
  vars_.emplace(name, std::unique_ptr<Variable>(v));
M
minqiyang 已提交
230
  VLOG(3) << "Create variable " << name;
T
tensor-tang 已提交
231 232 233 234 235 236 237 238 239 240 241 242
  return v;
}

const Scope* Scope::FindScopeInternal(const Variable* var) const {
  for (auto& kv : vars_) {
    if (kv.second.get() == var) {
      return this;
    }
  }
  return (parent_ == nullptr) ? nullptr : parent_->FindScope(var);
}

243 244 245 246 247 248 249
const Scope* Scope::FindScopeInternal(const std::string& name) const {
  if (vars_.find(name) != vars_.end()) {
    return this;
  }
  return (parent_ == nullptr) ? nullptr : parent_->FindScope(name);
}

T
tensor-tang 已提交
250 251
void Scope::RenameInternal(const std::string& origin_name,
                           const std::string& new_name) const {
Y
Yu Yang 已提交
252
  auto origin_it = vars_.find(origin_name);
253
  PADDLE_ENFORCE_NE(
254 255
      origin_it,
      vars_.end(),
256 257 258
      platform::errors::NotFound(
          "Original variable with name %s is not found in the scope.",
          origin_name));
Y
Yu Yang 已提交
259
  auto new_it = vars_.find(new_name);
260
  PADDLE_ENFORCE_EQ(
261 262
      new_it,
      vars_.end(),
263 264
      platform::errors::AlreadyExists(
          "The variable with name %s already exists in the scope.", new_name));
265
  vars_[new_name].reset(origin_it->second.release());
Y
Yu Yang 已提交
266 267 268
  vars_.erase(origin_it);
}

T
tensor-tang 已提交
269 270 271 272 273 274
Variable* Scope::FindVarInternal(const std::string& name) const {
  auto var = FindVarLocally(name);
  if (var != nullptr) {
    return var;
  }
  return (parent_ == nullptr) ? nullptr : parent_->FindVar(name);
Y
Yu Yang 已提交
275
}
276

277 278
Variable* Scope::FindVarLocally(const std::string& name) const {
  auto it = vars_.find(name);
279 280 281
  if (it != vars_.end()) {
    return it->second.get();
  }
282 283
  return nullptr;
}
Y
Yu Yang 已提交
284

L
Leo Chen 已提交
285
void Scope::AddListener(const std::shared_ptr<ScopeListener>& listener) {
286 287 288 289 290 291
  auto it = std::find(listeners_.begin(), listeners_.end(), listener);
  if (it == listeners_.end()) {
    listeners_.push_back(listener);
  }
}

L
Leo Chen 已提交
292
void Scope::DelListener(const std::shared_ptr<ScopeListener>& listener) {
293 294 295
  listeners_.remove(listener);
}

296 297 298 299 300
bool Scope::HasListener(const std::shared_ptr<ScopeListener>& listener) {
  auto it = std::find(listeners_.begin(), listeners_.end(), listener);
  return it != listeners_.end();
}

301 302 303 304 305 306 307 308 309 310 311
void Scope::EraseVarsExcept(const std::unordered_set<Variable*>& vars) {
  SCOPE_VARS_WRITER_LOCK
  for (auto iter = vars_.begin(); iter != vars_.end();) {
    if (vars.count(iter->second.get()) != 0) {
      ++iter;
    } else {
      vars_.erase(iter++);
    }
  }
}

312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352
std::string GenScopeTreeDebugInfo(Scope* root) {
  std::stringstream os;

  if (!root) return "";

  // level traversal
  std::queue<Scope*> queue;
  queue.push(root);

  std::vector<Scope*> scopes;

  while (!queue.empty()) {
    auto* end = queue.back();
    Scope* q = nullptr;
    while (q != end) {
      q = queue.front();
      queue.pop();
      os << q << " ";
      scopes.push_back(q);

      for (auto* c : q->kids()) {
        queue.push(c);
      }
    }
    // end of a level
    os << "\n------------------------------------------\n";
  }

  os << "\nDetails:\n\n";

  for (Scope* q : scopes) {
    os << "====\n";
    os << q << ":\n";
    for (auto& var : q->LocalVarNames()) {
      os << "  - " << var << "\n";
    }
  }

  return os.str();
}

Y
Yi Wang 已提交
353 354
}  // namespace framework
}  // namespace paddle