scope.cc 5.8 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yi Wang 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/scope.h"
Y
Yang Yang 已提交
16

Q
qijun 已提交
17
#include <memory>  // for unique_ptr
18
#include <set>
19
#include "glog/logging.h"
Y
Yi Wang 已提交
20
#include "paddle/fluid/framework/threadpool.h"
21
#include "paddle/fluid/string/printf.h"
Y
Yi Wang 已提交
22

D
dzhwinter 已提交
23
DEFINE_bool(benchmark, false,
24
            "Doing memory benchmark. It will make deleting scope synchronized, "
D
dzhwinter 已提交
25 26 27
            "and add some memory usage logs."
            "Default cuda is asynchronous device, set to True will"
            "force op run in synchronous mode.");
28

Y
Yu Yang 已提交
29 30 31 32 33
DEFINE_bool(
    eager_delete_scope, true,
    "Delete local scope eagerly. It will reduce GPU memory usage but "
    "slow down the destruction of variables.(around 1% performance harm)");

S
sneaxiy 已提交
34
DEFINE_double(
S
sneaxiy 已提交
35
    eager_delete_tensor_gb, -1.0,
S
sneaxiy 已提交
36 37 38
    "Memory size threshold (GB) when the garbage collector clear tensors."
    "Disabled when this value is less than 0");

Y
Yi Wang 已提交
39 40 41
namespace paddle {
namespace framework {

S
sneaxiy 已提交
42
int64_t GetEagerDeletionThreshold() {
S
sneaxiy 已提交
43
  return FLAGS_eager_delete_tensor_gb < 0
S
sneaxiy 已提交
44
             ? -1
S
sneaxiy 已提交
45
             : static_cast<int64_t>(FLAGS_eager_delete_tensor_gb *
S
sneaxiy 已提交
46 47 48
                                    (static_cast<int64_t>(1) << 30));
}

49
Scope::~Scope() { DropKids(); }
Y
Yi Wang 已提交
50

Y
Yu Yang 已提交
51
Scope& Scope::NewScope() const {
S
sneaxiy 已提交
52
  std::lock_guard<std::mutex> lock(mutex_);
Y
Yi Wang 已提交
53 54 55 56
  kids_.push_back(new Scope(this));
  return *kids_.back();
}

D
dongzhihong 已提交
57
Variable* Scope::Var(const std::string& name) {
S
sneaxiy 已提交
58
  std::lock_guard<std::mutex> lock(mutex_);
T
tensor-tang 已提交
59
  return VarInternal(name);
Y
Yi Wang 已提交
60 61
}

Y
Yu Yang 已提交
62
Variable* Scope::Var(std::string* name) {
S
sneaxiy 已提交
63
  std::lock_guard<std::mutex> lock(mutex_);
T
tensor-tang 已提交
64
  auto new_name = string::Sprintf("%p.%d", this, vars_.size());
Y
Yu Yang 已提交
65
  if (name != nullptr) {
T
tensor-tang 已提交
66
    *name = new_name;
Y
Yu Yang 已提交
67
  }
T
tensor-tang 已提交
68
  return VarInternal(new_name);
Y
Yi Wang 已提交
69 70 71
}

Variable* Scope::FindVar(const std::string& name) const {
S
sneaxiy 已提交
72
  std::lock_guard<std::mutex> lock(mutex_);
T
tensor-tang 已提交
73 74 75
  return FindVarInternal(name);
}

S
sneaxiy 已提交
76 77 78 79 80
Variable* Scope::FindLocalVar(const std::string& name) const {
  std::lock_guard<std::mutex> lock(mutex_);
  return FindVarLocally(name);
}

Y
Yu Yang 已提交
81
const Scope* Scope::FindScope(const Variable* var) const {
S
sneaxiy 已提交
82
  std::lock_guard<std::mutex> lock(mutex_);
T
tensor-tang 已提交
83
  return FindScopeInternal(var);
Y
Yi Wang 已提交
84
}
T
tensor-tang 已提交
85

Y
Yu Yang 已提交
86
void Scope::DropKids() {
S
sneaxiy 已提交
87
  std::lock_guard<std::mutex> lock(mutex_);
Y
Yu Yang 已提交
88 89 90
  for (Scope* s : kids_) delete s;
  kids_.clear();
}
Y
Yi Wang 已提交
91

M
minqiyang 已提交
92
bool Scope::HasKid(const Scope* scope) const {
S
sneaxiy 已提交
93
  std::lock_guard<std::mutex> lock(mutex_);
M
minqiyang 已提交
94 95 96 97
  auto it = std::find(this->kids_.begin(), this->kids_.end(), scope);
  return it != this->kids_.end();
}

Y
Yang Yu 已提交
98
std::vector<std::string> Scope::LocalVarNames() const {
S
sneaxiy 已提交
99
  std::lock_guard<std::mutex> lock(mutex_);
Y
Yang Yu 已提交
100 101
  std::vector<std::string> known_vars;
  known_vars.reserve(this->vars_.size());
102 103 104 105 106 107
  for (auto& p : vars_) {
    known_vars.emplace_back(p.first);
  }
  return known_vars;
}

108
void Scope::DeleteScope(Scope* scope) const {
S
sneaxiy 已提交
109
  std::lock_guard<std::mutex> lock(mutex_);
Y
Yu Yang 已提交
110 111 112
  auto it = std::find(this->kids_.begin(), this->kids_.end(), scope);
  PADDLE_ENFORCE(it != this->kids_.end(), "Cannot find %p as kid scope", scope);
  this->kids_.erase(it);
113
  // When making memory benchmark on Fluid, we have to delete scope sync.
Y
Yu Yang 已提交
114
  if (FLAGS_benchmark || FLAGS_eager_delete_scope) {
115 116 117 118
    delete scope;
  } else {
    Async([scope] { delete scope; });
  }
Y
Yu Yang 已提交
119 120
}

Y
Yancey1989 已提交
121
void Scope::EraseVars(const std::vector<std::string>& var_names) {
S
sneaxiy 已提交
122
  std::lock_guard<std::mutex> lock(mutex_);
123 124 125 126 127 128 129 130 131 132
  std::set<std::string> var_set(var_names.begin(), var_names.end());
  for (auto it = vars_.begin(); it != vars_.end();) {
    if (var_set.find(it->first) != var_set.end()) {
      it = vars_.erase(it);
    } else {
      ++it;
    }
  }
}

Y
Yu Yang 已提交
133 134
void Scope::Rename(const std::string& origin_name,
                   const std::string& new_name) const {
S
sneaxiy 已提交
135
  std::lock_guard<std::mutex> lock(mutex_);
T
tensor-tang 已提交
136 137 138 139
  RenameInternal(origin_name, new_name);
}

std::string Scope::Rename(const std::string& origin_name) const {
S
sneaxiy 已提交
140
  std::lock_guard<std::mutex> lock(mutex_);
T
tensor-tang 已提交
141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167
  auto new_name = string::Sprintf("%p.%d", this, vars_.size());
  RenameInternal(origin_name, new_name);
  return new_name;
}

Variable* Scope::VarInternal(const std::string& name) {
  auto* v = FindVarLocally(name);
  if (v != nullptr) return v;

  v = new Variable();
  vars_[name].reset(v);
  VLOG(3) << "Create variable " << name;
  v->name_ = &(vars_.find(name)->first);
  return v;
}

const Scope* Scope::FindScopeInternal(const Variable* var) const {
  for (auto& kv : vars_) {
    if (kv.second.get() == var) {
      return this;
    }
  }
  return (parent_ == nullptr) ? nullptr : parent_->FindScope(var);
}

void Scope::RenameInternal(const std::string& origin_name,
                           const std::string& new_name) const {
Y
Yu Yang 已提交
168 169 170 171 172 173
  auto origin_it = vars_.find(origin_name);
  PADDLE_ENFORCE(origin_it != vars_.end(),
                 "Cannot find original variable with name %s", origin_name);
  auto new_it = vars_.find(new_name);
  PADDLE_ENFORCE(new_it == vars_.end(),
                 "The variable with name %s is already in the scope", new_name);
174
  vars_[new_name].reset(origin_it->second.release());
Y
Yu Yang 已提交
175 176 177
  vars_.erase(origin_it);
}

T
tensor-tang 已提交
178 179 180 181 182 183
Variable* Scope::FindVarInternal(const std::string& name) const {
  auto var = FindVarLocally(name);
  if (var != nullptr) {
    return var;
  }
  return (parent_ == nullptr) ? nullptr : parent_->FindVar(name);
Y
Yu Yang 已提交
184
}
185

186 187
Variable* Scope::FindVarLocally(const std::string& name) const {
  auto it = vars_.find(name);
188
  if (it != vars_.end()) return it->second.get();
189 190
  return nullptr;
}
Y
Yu Yang 已提交
191

Y
Yi Wang 已提交
192 193
}  // namespace framework
}  // namespace paddle