scope.cc 7.0 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yi Wang 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/scope.h"
Y
Yang Yang 已提交
16

Q
qijun 已提交
17
#include <memory>  // for unique_ptr
18
#include <queue>
19
#include <set>
20
#include <unordered_set>
21
#include "glog/logging.h"
22
#include "paddle/fluid/string/printf.h"
Y
Yi Wang 已提交
23

D
dzhwinter 已提交
24
DEFINE_bool(benchmark, false,
25
            "Doing memory benchmark. It will make deleting scope synchronized, "
D
dzhwinter 已提交
26 27 28
            "and add some memory usage logs."
            "Default cuda is asynchronous device, set to True will"
            "force op run in synchronous mode.");
29

Y
Yu Yang 已提交
30 31 32 33 34
DEFINE_bool(
    eager_delete_scope, true,
    "Delete local scope eagerly. It will reduce GPU memory usage but "
    "slow down the destruction of variables.(around 1% performance harm)");

S
sneaxiy 已提交
35
DEFINE_double(
S
sneaxiy 已提交
36
    eager_delete_tensor_gb, -1.0,
S
sneaxiy 已提交
37 38 39
    "Memory size threshold (GB) when the garbage collector clear tensors."
    "Disabled when this value is less than 0");

40 41 42 43
// When in inference scenario, the scopes will not be written by two threads in
// a mean time, but a scope may be read by multiple threads concurrently, and
// the mutex will cause serious performance issue.
// So the mutex is disabled when `ON_INFER`.
44
#ifdef PADDLE_ON_INFERENCE
M
minqiyang 已提交
45 46 47 48
#define SCOPE_KIDS_READER_LOCK
#define SCOPE_KIDS_WRITER_LOCK
#define SCOPE_VARS_READER_LOCK
#define SCOPE_VARS_WRITER_LOCK
49
#else
M
minqiyang 已提交
50 51 52 53
#define SCOPE_KIDS_READER_LOCK AutoRDLock(&kids_lock_);
#define SCOPE_KIDS_WRITER_LOCK AutoWRLock(&kids_lock_);
#define SCOPE_VARS_READER_LOCK AutoRDLock(&vars_lock_);
#define SCOPE_VARS_WRITER_LOCK AutoWRLock(&vars_lock_);
54 55
#endif

Y
Yi Wang 已提交
56 57 58
namespace paddle {
namespace framework {

S
sneaxiy 已提交
59
int64_t GetEagerDeletionThreshold() {
S
sneaxiy 已提交
60
  return FLAGS_eager_delete_tensor_gb < 0
S
sneaxiy 已提交
61
             ? -1
S
sneaxiy 已提交
62
             : static_cast<int64_t>(FLAGS_eager_delete_tensor_gb *
S
sneaxiy 已提交
63 64 65
                                    (static_cast<int64_t>(1) << 30));
}

66
Scope::~Scope() { DropKids(); }
Y
Yi Wang 已提交
67

Y
Yu Yang 已提交
68
Scope& Scope::NewScope() const {
M
minqiyang 已提交
69 70 71 72 73 74
  Scope* child = new Scope(this);
  {
    SCOPE_KIDS_WRITER_LOCK
    kids_.push_back(child);
  }
  return *child;
Y
Yi Wang 已提交
75 76
}

D
dongzhihong 已提交
77
Variable* Scope::Var(const std::string& name) {
M
minqiyang 已提交
78
  SCOPE_VARS_WRITER_LOCK
T
tensor-tang 已提交
79
  return VarInternal(name);
Y
Yi Wang 已提交
80 81
}

Y
Yu Yang 已提交
82
Variable* Scope::Var(std::string* name) {
T
tensor-tang 已提交
83
  auto new_name = string::Sprintf("%p.%d", this, vars_.size());
Y
Yu Yang 已提交
84
  if (name != nullptr) {
T
tensor-tang 已提交
85
    *name = new_name;
Y
Yu Yang 已提交
86
  }
M
minqiyang 已提交
87
  SCOPE_VARS_WRITER_LOCK
T
tensor-tang 已提交
88
  return VarInternal(new_name);
Y
Yi Wang 已提交
89 90 91
}

Variable* Scope::FindVar(const std::string& name) const {
M
minqiyang 已提交
92
  SCOPE_VARS_READER_LOCK
T
tensor-tang 已提交
93 94 95
  return FindVarInternal(name);
}

S
sneaxiy 已提交
96
Variable* Scope::FindLocalVar(const std::string& name) const {
M
minqiyang 已提交
97
  SCOPE_VARS_READER_LOCK
S
sneaxiy 已提交
98 99 100
  return FindVarLocally(name);
}

Y
Yu Yang 已提交
101
const Scope* Scope::FindScope(const Variable* var) const {
M
minqiyang 已提交
102
  SCOPE_VARS_READER_LOCK
T
tensor-tang 已提交
103
  return FindScopeInternal(var);
Y
Yi Wang 已提交
104
}
T
tensor-tang 已提交
105

Y
Yu Yang 已提交
106
void Scope::DropKids() {
M
minqiyang 已提交
107
  SCOPE_KIDS_WRITER_LOCK
Y
Yu Yang 已提交
108 109 110
  for (Scope* s : kids_) delete s;
  kids_.clear();
}
Y
Yi Wang 已提交
111

M
minqiyang 已提交
112
bool Scope::HasKid(const Scope* scope) const {
M
minqiyang 已提交
113
  SCOPE_KIDS_READER_LOCK
M
minqiyang 已提交
114 115 116 117
  auto it = std::find(this->kids_.begin(), this->kids_.end(), scope);
  return it != this->kids_.end();
}

Y
Yang Yu 已提交
118 119
std::vector<std::string> Scope::LocalVarNames() const {
  std::vector<std::string> known_vars;
M
minqiyang 已提交
120 121 122 123 124 125
  {
    SCOPE_VARS_READER_LOCK
    known_vars.reserve(this->vars_.size());
    for (auto& p : vars_) {
      known_vars.emplace_back(p.first);
    }
126 127 128 129
  }
  return known_vars;
}

130
void Scope::DeleteScope(Scope* scope) const {
M
minqiyang 已提交
131
  SCOPE_KIDS_WRITER_LOCK
Y
Yu Yang 已提交
132
  auto it = std::find(this->kids_.begin(), this->kids_.end(), scope);
133 134
  PADDLE_ENFORCE(it != this->kids_.end(), "%p Cannot find %p as kid scope",
                 this, scope);
Y
Yu Yang 已提交
135
  this->kids_.erase(it);
136
  // When making memory benchmark on Fluid, we have to delete scope sync.
Y
Yu Yang 已提交
137
  if (FLAGS_benchmark || FLAGS_eager_delete_scope) {
138 139 140 141
    delete scope;
  } else {
    Async([scope] { delete scope; });
  }
Y
Yu Yang 已提交
142 143
}

Y
Yancey1989 已提交
144
void Scope::EraseVars(const std::vector<std::string>& var_names) {
145
  std::set<std::string> var_set(var_names.begin(), var_names.end());
M
minqiyang 已提交
146
  SCOPE_VARS_WRITER_LOCK
147 148 149 150 151 152 153 154 155
  for (auto it = vars_.begin(); it != vars_.end();) {
    if (var_set.find(it->first) != var_set.end()) {
      it = vars_.erase(it);
    } else {
      ++it;
    }
  }
}

Y
Yu Yang 已提交
156 157
void Scope::Rename(const std::string& origin_name,
                   const std::string& new_name) const {
M
minqiyang 已提交
158
  SCOPE_VARS_WRITER_LOCK
T
tensor-tang 已提交
159 160 161 162
  RenameInternal(origin_name, new_name);
}

std::string Scope::Rename(const std::string& origin_name) const {
M
minqiyang 已提交
163
  SCOPE_VARS_WRITER_LOCK
T
tensor-tang 已提交
164 165 166 167 168 169 170 171 172 173 174
  auto new_name = string::Sprintf("%p.%d", this, vars_.size());
  RenameInternal(origin_name, new_name);
  return new_name;
}

Variable* Scope::VarInternal(const std::string& name) {
  auto* v = FindVarLocally(name);
  if (v != nullptr) return v;

  v = new Variable();
  vars_[name].reset(v);
M
minqiyang 已提交
175
  VLOG(3) << "Create variable " << name;
T
tensor-tang 已提交
176 177 178 179 180 181 182 183 184 185 186 187 188 189 190
  v->name_ = &(vars_.find(name)->first);
  return v;
}

const Scope* Scope::FindScopeInternal(const Variable* var) const {
  for (auto& kv : vars_) {
    if (kv.second.get() == var) {
      return this;
    }
  }
  return (parent_ == nullptr) ? nullptr : parent_->FindScope(var);
}

void Scope::RenameInternal(const std::string& origin_name,
                           const std::string& new_name) const {
Y
Yu Yang 已提交
191 192 193 194 195 196
  auto origin_it = vars_.find(origin_name);
  PADDLE_ENFORCE(origin_it != vars_.end(),
                 "Cannot find original variable with name %s", origin_name);
  auto new_it = vars_.find(new_name);
  PADDLE_ENFORCE(new_it == vars_.end(),
                 "The variable with name %s is already in the scope", new_name);
M
minqiyang 已提交
197
  vars_[new_name].reset(origin_it.value().release());
Y
Yu Yang 已提交
198 199 200
  vars_.erase(origin_it);
}

T
tensor-tang 已提交
201 202 203 204 205 206
Variable* Scope::FindVarInternal(const std::string& name) const {
  auto var = FindVarLocally(name);
  if (var != nullptr) {
    return var;
  }
  return (parent_ == nullptr) ? nullptr : parent_->FindVar(name);
Y
Yu Yang 已提交
207
}
208

209 210
Variable* Scope::FindVarLocally(const std::string& name) const {
  auto it = vars_.find(name);
211
  if (it != vars_.end()) return it->second.get();
212 213
  return nullptr;
}
Y
Yu Yang 已提交
214

215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255
std::string GenScopeTreeDebugInfo(Scope* root) {
  std::stringstream os;

  if (!root) return "";

  // level traversal
  std::queue<Scope*> queue;
  queue.push(root);

  std::vector<Scope*> scopes;

  while (!queue.empty()) {
    auto* end = queue.back();
    Scope* q = nullptr;
    while (q != end) {
      q = queue.front();
      queue.pop();
      os << q << " ";
      scopes.push_back(q);

      for (auto* c : q->kids()) {
        queue.push(c);
      }
    }
    // end of a level
    os << "\n------------------------------------------\n";
  }

  os << "\nDetails:\n\n";

  for (Scope* q : scopes) {
    os << "====\n";
    os << q << ":\n";
    for (auto& var : q->LocalVarNames()) {
      os << "  - " << var << "\n";
    }
  }

  return os.str();
}

Y
Yi Wang 已提交
256 257
}  // namespace framework
}  // namespace paddle