From c8d1a8e90904df337d06fb40722b810d2393a3be Mon Sep 17 00:00:00 2001 From: minqiyang Date: Wed, 9 Jan 2019 22:54:19 +0800 Subject: [PATCH] Change var_ and grad_ to shared_ptr --- paddle/fluid/imperative/layer.cc | 2 +- paddle/fluid/imperative/layer.h | 7 ++++--- paddle/fluid/imperative/tracer.h | 16 ++++++++-------- paddle/fluid/pybind/pybind.cc | 7 ++++--- python/paddle/fluid/framework.py | 2 +- python/paddle/fluid/imperative/base.py | 1 - 6 files changed, 18 insertions(+), 17 deletions(-) diff --git a/paddle/fluid/imperative/layer.cc b/paddle/fluid/imperative/layer.cc index b2a7e5df46..a79f501673 100644 --- a/paddle/fluid/imperative/layer.cc +++ b/paddle/fluid/imperative/layer.cc @@ -114,7 +114,7 @@ class Autograd { } }; -framework::LoDTensor& VarBase::Grad() { +framework::LoDTensor& VarBase::GradValue() { VLOG(3) << "get var grad " << var_desc_->Name(); return *(grads_->var_->GetMutable()); } diff --git a/paddle/fluid/imperative/layer.h b/paddle/fluid/imperative/layer.h index 3cafab1620..5050564034 100644 --- a/paddle/fluid/imperative/layer.h +++ b/paddle/fluid/imperative/layer.h @@ -109,7 +109,7 @@ class VarBase { void RunBackward(); - framework::LoDTensor& Grad(); + framework::LoDTensor& GradValue(); inline std::string GradName() const { PADDLE_ENFORCE( @@ -123,8 +123,9 @@ class VarBase { int pre_op_out_idx_; framework::VarDesc* var_desc_; - framework::Variable* var_; - VarBase* grads_; + + std::shared_ptr var_; + std::shared_ptr grads_; bool stop_gradient_; }; diff --git a/paddle/fluid/imperative/tracer.h b/paddle/fluid/imperative/tracer.h index 0add560342..eebdfed22d 100644 --- a/paddle/fluid/imperative/tracer.h +++ b/paddle/fluid/imperative/tracer.h @@ -74,10 +74,10 @@ class Tracer { for (auto it : op->input_vars_) { auto& invars = invars_map[it.first]; for (VarBase* inp : it.second) { - PADDLE_ENFORCE_NOT_NULL(inp->var_, "op %s input %s nullptr", + PADDLE_ENFORCE_NOT_NULL(inp->var_.get(), "op %s input %s nullptr", op->op_desc_->Type(), inp->var_desc_->Name()); - invars.push_back(inp->var_); + invars.push_back(inp->var_.get()); vars[inp->var_desc_->Name()] = inp; if (inp->pre_op_) { op->pre_ops_[it.first].push_back(inp->pre_op_); @@ -96,7 +96,7 @@ class Tracer { const std::vector& outputs = it.second; for (size_t i = 0; i < outputs.size(); ++i) { VarBase* out = outputs[i]; - outvars.push_back(out->var_); + outvars.push_back(out->var_.get()); vars[out->var_desc_->Name()] = out; framework::VarDesc* var_desc = block->FindVar(out->var_desc_->Name()); @@ -143,13 +143,13 @@ class Tracer { if (var_it == grad_to_var->end()) { auto fwd_var_it = vars.find(grad_invar); PADDLE_ENFORCE(fwd_var_it != vars.end()); - grad_in_vars.push_back(fwd_var_it->second->var_); + grad_in_vars.push_back(fwd_var_it->second->var_.get()); } else { VarBase* var = vars[var_it->second]; if (!var->grads_->var_->IsInitialized()) { - InitVar(var->var_, var->grads_->var_); + InitVar(var->var_.get(), var->grads_->var_.get()); } - grad_in_vars.push_back(var->grads_->var_); + grad_in_vars.push_back(var->grads_->var_.get()); } } } @@ -162,9 +162,9 @@ class Tracer { PADDLE_ENFORCE(var_it != grad_to_var->end()); VarBase* var = vars[var_it->second]; if (!var->grads_->var_->IsInitialized()) { - InitVar(var->var_, var->grads_->var_); + InitVar(var->var_.get(), var->grads_->var_.get()); } - grad_out_vars.push_back(var->grads_->var_); + grad_out_vars.push_back(var->grads_->var_.get()); } } } diff --git a/paddle/fluid/pybind/pybind.cc b/paddle/fluid/pybind/pybind.cc index efaadabd18..7f15abb1bb 100644 --- a/paddle/fluid/pybind/pybind.cc +++ b/paddle/fluid/pybind/pybind.cc @@ -132,11 +132,12 @@ PYBIND11_MODULE(core, m) { .def("_run_backward", [](imperative::VarBase &self) { self.RunBackward(); }) .def("_grad_name", &imperative::VarBase::GradName) - .def("_grad", &imperative::VarBase::Grad) + .def("_grad_value", &imperative::VarBase::GradValue) .def("_grad_ivar", - [](const imperative::VarBase &self) { return self.grads_; }, + [](const imperative::VarBase &self) { return self.grads_.get(); }, py::return_value_policy::reference) - .def("value", [](const imperative::VarBase &self) { return self.var_; }, + .def("value", + [](const imperative::VarBase &self) { return self.var_.get(); }, py::return_value_policy::reference) .def_property( "desc", diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index 371a8c9e13..4de34e7b2b 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -379,7 +379,7 @@ class Variable(object): self._ivar._run_backward() def _gradient(self): - return np.array(self._ivar._grad()) + return np.array(self._ivar._grad_value()) def __str__(self): return self.to_string(True) diff --git a/python/paddle/fluid/imperative/base.py b/python/paddle/fluid/imperative/base.py index e66ea33851..5d3ebb25a9 100644 --- a/python/paddle/fluid/imperative/base.py +++ b/python/paddle/fluid/imperative/base.py @@ -46,7 +46,6 @@ def to_variable(value, block=None): shape=value.shape, dtype=value.dtype) var = py_var._ivar.value() - print(type(var)) tensor = var.get_tensor() tensor.set(value, core.CPUPlace()) return py_var -- GitLab