From 84bf4d7b065bf245c606d8744079c856218d00e0 Mon Sep 17 00:00:00 2001 From: minqiyang Date: Mon, 25 Feb 2019 13:32:35 +0800 Subject: [PATCH] Move ClearBlock into OpBase and VarBase's destructor test=develop --- paddle/fluid/framework/block_desc.cc | 14 -------------- paddle/fluid/framework/block_desc.h | 2 -- paddle/fluid/imperative/layer.h | 16 ++++++++++++++++ paddle/fluid/pybind/protobuf.cc | 2 -- python/paddle/fluid/framework.py | 11 ++--------- .../tests/unittests/test_imperative_optimizer.py | 2 -- .../tests/unittests/test_imperative_resnet.py | 2 -- 7 files changed, 18 insertions(+), 31 deletions(-) diff --git a/paddle/fluid/framework/block_desc.cc b/paddle/fluid/framework/block_desc.cc index f4bb2f3e2f..f537e4b9e5 100644 --- a/paddle/fluid/framework/block_desc.cc +++ b/paddle/fluid/framework/block_desc.cc @@ -163,20 +163,6 @@ std::vector BlockDesc::AllOps() const { return res; } -void BlockDesc::Clear() { - // clear all ops - ops_.clear(); - - // clear all vars which are not persistable - for (auto it = vars_.begin(); it != vars_.end();) { - if (it->second->Persistable()) { - ++it; - } else { - vars_.erase(it++); - } - } -} - void BlockDesc::Flush() { for (auto &op_desc : ops_) { op_desc->Flush(); diff --git a/paddle/fluid/framework/block_desc.h b/paddle/fluid/framework/block_desc.h index e192624a26..960ca39e1e 100644 --- a/paddle/fluid/framework/block_desc.h +++ b/paddle/fluid/framework/block_desc.h @@ -97,8 +97,6 @@ class BlockDesc { std::vector AllOps() const; - void Clear(); - size_t OpSize() const { return ops_.size(); } OpDesc *Op(int idx) const { return ops_.at(idx).get(); } diff --git a/paddle/fluid/imperative/layer.h b/paddle/fluid/imperative/layer.h index b3862f5ed9..30c8022a33 100644 --- a/paddle/fluid/imperative/layer.h +++ b/paddle/fluid/imperative/layer.h @@ -117,12 +117,19 @@ class VarBase { : var_desc_(nullptr), var_(var), grads_(grad), + block_(nullptr), stop_gradient_(stop_gradient), pre_op_(nullptr), pre_op_out_idx_(-1) {} public: virtual ~VarBase() { + LOG(ERROR) << "remove var " << name_; + + if (block_) { + block_->RemoveVar(name_); + } + if (var_) { delete var_; } @@ -180,11 +187,14 @@ class VarBase { framework::Variable* var_; VarBase* grads_; + framework::BlockDesc* block_; + private: bool stop_gradient_; OpBase* pre_op_; std::string pre_op_out_name_; int pre_op_out_idx_; + std::string name_; }; /* The wrapper for OpDesc which holds a OpDesc and a OpDesc of its @@ -203,6 +213,12 @@ class OpBase { for (framework::OpDesc* desc : grad_op_descs_) { delete desc; } + + LOG(ERROR) << "remove op " << op_desc_->Type() << " id " << trace_id_; + + if (block_) { + block_->RemoveOp(trace_id_, trace_id_ + 1); + } } std::map> ApplyGrad(); diff --git a/paddle/fluid/pybind/protobuf.cc b/paddle/fluid/pybind/protobuf.cc index 48fe445b7d..e729be4a95 100644 --- a/paddle/fluid/pybind/protobuf.cc +++ b/paddle/fluid/pybind/protobuf.cc @@ -189,8 +189,6 @@ void BindBlockDesc(pybind11::module *m) { return self.HasVar(name); }, pybind11::return_value_policy::reference) - .def("_clear_block", [](pd::BlockDesc &self) { return self.Clear(); }, - pybind11::return_value_policy::reference) .def("_rename_var", [](pd::BlockDesc &self, const pybind11::bytes &byte_name, const pybind11::bytes &byte_name_new) { diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index fdb7c0068e..72d63bf079 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -381,6 +381,8 @@ class Variable(object): if _in_imperative_mode(): # record vars in tracer rather than blocks self._ivar = kwargs.get("ivar", None) + self._ivar.block = block.desc + self._ivar.name = name if not self._ivar: self._ivar = core.VarBase(stop_gradient) self._ivar.desc = self.desc @@ -1192,15 +1194,6 @@ class Block(object): else: raise ValueError("Var {0} is not found recursively".format(name)) - def _clear_block(self): - assert _in_imperative_mode() - - # TODO(minqiyang): move this to Variable and Operator's __del__ - self.desc._clear_block() - - assert len(self.vars) == 0 - assert len(self.ops) == 0 - def all_parameters(self): return list(self.iter_parameters()) diff --git a/python/paddle/fluid/tests/unittests/test_imperative_optimizer.py b/python/paddle/fluid/tests/unittests/test_imperative_optimizer.py index 0d0a3bbe0b..72356faf92 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_optimizer.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_optimizer.py @@ -142,8 +142,6 @@ class TestImperativeMnist(unittest.TestCase): sgd.minimize(avg_loss) mnist.clear_gradients() - fluid.default_main_program().global_block()._clear_block() - dy_param_value = {} for param in mnist.parameters(): dy_param_value[param.name] = param._numpy() diff --git a/python/paddle/fluid/tests/unittests/test_imperative_resnet.py b/python/paddle/fluid/tests/unittests/test_imperative_resnet.py index 4892495e11..9b5b4c8cef 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_resnet.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_resnet.py @@ -286,8 +286,6 @@ class TestImperativeResnet(unittest.TestCase): optimizer.minimize(avg_loss) resnet.clear_gradients() - fluid.default_main_program().global_block()._clear_block() - dy_param_value = {} for param in resnet.parameters(): dy_param_value[param.name] = param._numpy() -- GitLab