scope.cc 6.7 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yi Wang 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/scope.h"
Y
Yang Yang 已提交
16

Q
qijun 已提交
17
#include <memory>  // for unique_ptr
18
#include <queue>
19
#include <set>
20
#include <unordered_set>
21
#include "glog/logging.h"
Y
Yi Wang 已提交
22
#include "paddle/fluid/framework/threadpool.h"
23
#include "paddle/fluid/string/printf.h"
Y
Yi Wang 已提交
24

D
dzhwinter 已提交
25
DEFINE_bool(benchmark, false,
26
            "Doing memory benchmark. It will make deleting scope synchronized, "
D
dzhwinter 已提交
27 28 29
            "and add some memory usage logs."
            "Default cuda is asynchronous device, set to True will"
            "force op run in synchronous mode.");
30

Y
Yu Yang 已提交
31 32 33 34 35
DEFINE_bool(
    eager_delete_scope, true,
    "Delete local scope eagerly. It will reduce GPU memory usage but "
    "slow down the destruction of variables.(around 1% performance harm)");

S
sneaxiy 已提交
36
DEFINE_double(
S
sneaxiy 已提交
37
    eager_delete_tensor_gb, -1.0,
S
sneaxiy 已提交
38 39 40
    "Memory size threshold (GB) when the garbage collector clear tensors."
    "Disabled when this value is less than 0");

41 42 43 44
// When in inference scenario, the scopes will not be written by two threads in
// a mean time, but a scope may be read by multiple threads concurrently, and
// the mutex will cause serious performance issue.
// So the mutex is disabled when `ON_INFER`.
45
#ifdef PADDLE_ON_INFERENCE
46 47 48 49 50
#define SCOPE_LOCK_GUARD
#else
#define SCOPE_LOCK_GUARD std::lock_guard<std::mutex> lock(mutex_);
#endif

Y
Yi Wang 已提交
51 52 53
namespace paddle {
namespace framework {

S
sneaxiy 已提交
54
int64_t GetEagerDeletionThreshold() {
S
sneaxiy 已提交
55
  return FLAGS_eager_delete_tensor_gb < 0
S
sneaxiy 已提交
56
             ? -1
S
sneaxiy 已提交
57
             : static_cast<int64_t>(FLAGS_eager_delete_tensor_gb *
S
sneaxiy 已提交
58 59 60
                                    (static_cast<int64_t>(1) << 30));
}

61
Scope::~Scope() { DropKids(); }
Y
Yi Wang 已提交
62

Y
Yu Yang 已提交
63
Scope& Scope::NewScope() const {
64
  SCOPE_LOCK_GUARD
Y
Yi Wang 已提交
65 66 67 68
  kids_.push_back(new Scope(this));
  return *kids_.back();
}

D
dongzhihong 已提交
69
Variable* Scope::Var(const std::string& name) {
70
  SCOPE_LOCK_GUARD
T
tensor-tang 已提交
71
  return VarInternal(name);
Y
Yi Wang 已提交
72 73
}

Y
Yu Yang 已提交
74
Variable* Scope::Var(std::string* name) {
75
  SCOPE_LOCK_GUARD
T
tensor-tang 已提交
76
  auto new_name = string::Sprintf("%p.%d", this, vars_.size());
Y
Yu Yang 已提交
77
  if (name != nullptr) {
T
tensor-tang 已提交
78
    *name = new_name;
Y
Yu Yang 已提交
79
  }
T
tensor-tang 已提交
80
  return VarInternal(new_name);
Y
Yi Wang 已提交
81 82 83
}

Variable* Scope::FindVar(const std::string& name) const {
84
  SCOPE_LOCK_GUARD
T
tensor-tang 已提交
85 86 87
  return FindVarInternal(name);
}

S
sneaxiy 已提交
88
Variable* Scope::FindLocalVar(const std::string& name) const {
89
  SCOPE_LOCK_GUARD
S
sneaxiy 已提交
90 91 92
  return FindVarLocally(name);
}

Y
Yu Yang 已提交
93
const Scope* Scope::FindScope(const Variable* var) const {
94
  SCOPE_LOCK_GUARD
T
tensor-tang 已提交
95
  return FindScopeInternal(var);
Y
Yi Wang 已提交
96
}
T
tensor-tang 已提交
97

Y
Yu Yang 已提交
98
void Scope::DropKids() {
99
  SCOPE_LOCK_GUARD
Y
Yu Yang 已提交
100 101 102
  for (Scope* s : kids_) delete s;
  kids_.clear();
}
Y
Yi Wang 已提交
103

M
minqiyang 已提交
104
bool Scope::HasKid(const Scope* scope) const {
105
  SCOPE_LOCK_GUARD
M
minqiyang 已提交
106 107 108 109
  auto it = std::find(this->kids_.begin(), this->kids_.end(), scope);
  return it != this->kids_.end();
}

Y
Yang Yu 已提交
110
std::vector<std::string> Scope::LocalVarNames() const {
111
  SCOPE_LOCK_GUARD
Y
Yang Yu 已提交
112 113
  std::vector<std::string> known_vars;
  known_vars.reserve(this->vars_.size());
114 115 116 117 118 119
  for (auto& p : vars_) {
    known_vars.emplace_back(p.first);
  }
  return known_vars;
}

120
void Scope::DeleteScope(Scope* scope) const {
121
  SCOPE_LOCK_GUARD
Y
Yu Yang 已提交
122
  auto it = std::find(this->kids_.begin(), this->kids_.end(), scope);
123 124
  PADDLE_ENFORCE(it != this->kids_.end(), "%p Cannot find %p as kid scope",
                 this, scope);
Y
Yu Yang 已提交
125
  this->kids_.erase(it);
126
  // When making memory benchmark on Fluid, we have to delete scope sync.
Y
Yu Yang 已提交
127
  if (FLAGS_benchmark || FLAGS_eager_delete_scope) {
128 129 130 131
    delete scope;
  } else {
    Async([scope] { delete scope; });
  }
Y
Yu Yang 已提交
132 133
}

Y
Yancey1989 已提交
134
void Scope::EraseVars(const std::vector<std::string>& var_names) {
135
  SCOPE_LOCK_GUARD
136 137 138 139 140 141 142 143 144 145
  std::set<std::string> var_set(var_names.begin(), var_names.end());
  for (auto it = vars_.begin(); it != vars_.end();) {
    if (var_set.find(it->first) != var_set.end()) {
      it = vars_.erase(it);
    } else {
      ++it;
    }
  }
}

Y
Yu Yang 已提交
146 147
void Scope::Rename(const std::string& origin_name,
                   const std::string& new_name) const {
148
  SCOPE_LOCK_GUARD
T
tensor-tang 已提交
149 150 151 152
  RenameInternal(origin_name, new_name);
}

std::string Scope::Rename(const std::string& origin_name) const {
153
  SCOPE_LOCK_GUARD
T
tensor-tang 已提交
154 155 156 157 158 159 160 161 162 163 164
  auto new_name = string::Sprintf("%p.%d", this, vars_.size());
  RenameInternal(origin_name, new_name);
  return new_name;
}

Variable* Scope::VarInternal(const std::string& name) {
  auto* v = FindVarLocally(name);
  if (v != nullptr) return v;

  v = new Variable();
  vars_[name].reset(v);
165
  VLOG(30) << "Create variable " << name;
T
tensor-tang 已提交
166 167 168 169 170 171 172 173 174 175 176 177 178 179 180
  v->name_ = &(vars_.find(name)->first);
  return v;
}

const Scope* Scope::FindScopeInternal(const Variable* var) const {
  for (auto& kv : vars_) {
    if (kv.second.get() == var) {
      return this;
    }
  }
  return (parent_ == nullptr) ? nullptr : parent_->FindScope(var);
}

void Scope::RenameInternal(const std::string& origin_name,
                           const std::string& new_name) const {
Y
Yu Yang 已提交
181 182 183 184 185 186
  auto origin_it = vars_.find(origin_name);
  PADDLE_ENFORCE(origin_it != vars_.end(),
                 "Cannot find original variable with name %s", origin_name);
  auto new_it = vars_.find(new_name);
  PADDLE_ENFORCE(new_it == vars_.end(),
                 "The variable with name %s is already in the scope", new_name);
187
  vars_[new_name].reset(origin_it->second.release());
Y
Yu Yang 已提交
188 189 190
  vars_.erase(origin_it);
}

T
tensor-tang 已提交
191 192 193 194 195 196
Variable* Scope::FindVarInternal(const std::string& name) const {
  auto var = FindVarLocally(name);
  if (var != nullptr) {
    return var;
  }
  return (parent_ == nullptr) ? nullptr : parent_->FindVar(name);
Y
Yu Yang 已提交
197
}
198

199 200
Variable* Scope::FindVarLocally(const std::string& name) const {
  auto it = vars_.find(name);
201
  if (it != vars_.end()) return it->second.get();
202 203
  return nullptr;
}
Y
Yu Yang 已提交
204

205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245
std::string GenScopeTreeDebugInfo(Scope* root) {
  std::stringstream os;

  if (!root) return "";

  // level traversal
  std::queue<Scope*> queue;
  queue.push(root);

  std::vector<Scope*> scopes;

  while (!queue.empty()) {
    auto* end = queue.back();
    Scope* q = nullptr;
    while (q != end) {
      q = queue.front();
      queue.pop();
      os << q << " ";
      scopes.push_back(q);

      for (auto* c : q->kids()) {
        queue.push(c);
      }
    }
    // end of a level
    os << "\n------------------------------------------\n";
  }

  os << "\nDetails:\n\n";

  for (Scope* q : scopes) {
    os << "====\n";
    os << q << ":\n";
    for (auto& var : q->LocalVarNames()) {
      os << "  - " << var << "\n";
    }
  }

  return os.str();
}

Y
Yi Wang 已提交
246 247
}  // namespace framework
}  // namespace paddle