diff --git a/paddle/fluid/framework/block_desc.cc b/paddle/fluid/framework/block_desc.cc index f4bb2f3e2fc2c8cf0376631d1996b395a8bc581a..f537e4b9e569dd4c513ac0efde7240833bcf04b6 100644 --- a/paddle/fluid/framework/block_desc.cc +++ b/paddle/fluid/framework/block_desc.cc @@ -163,20 +163,6 @@ std::vector BlockDesc::AllOps() const { return res; } -void BlockDesc::Clear() { - // clear all ops - ops_.clear(); - - // clear all vars which are not persistable - for (auto it = vars_.begin(); it != vars_.end();) { - if (it->second->Persistable()) { - ++it; - } else { - vars_.erase(it++); - } - } -} - void BlockDesc::Flush() { for (auto &op_desc : ops_) { op_desc->Flush(); diff --git a/paddle/fluid/framework/block_desc.h b/paddle/fluid/framework/block_desc.h index e192624a261e1291f1610e8e7e700d99a9d814d2..960ca39e1eadd3c064beb0e2c1342a406c4f0b6a 100644 --- a/paddle/fluid/framework/block_desc.h +++ b/paddle/fluid/framework/block_desc.h @@ -97,8 +97,6 @@ class BlockDesc { std::vector AllOps() const; - void Clear(); - size_t OpSize() const { return ops_.size(); } OpDesc *Op(int idx) const { return ops_.at(idx).get(); } diff --git a/paddle/fluid/imperative/layer.h b/paddle/fluid/imperative/layer.h index 8a295341b96dfcfcac7eb16eb3a7448eab6fa8da..db18e4e4303b07d7aeadfc00b771265a9f62462e 100644 --- a/paddle/fluid/imperative/layer.h +++ b/paddle/fluid/imperative/layer.h @@ -126,12 +126,19 @@ class VarBase { : var_desc_(nullptr), var_(var), grads_(grad), + block_(nullptr), stop_gradient_(stop_gradient), pre_op_(nullptr), pre_op_out_idx_(-1) {} public: virtual ~VarBase() { + LOG(ERROR) << "remove var " << name_; + + if (block_) { + block_->RemoveVar(name_); + } + if (var_) { delete var_; } @@ -189,11 +196,14 @@ class VarBase { framework::Variable* var_; VarBase* grads_; + framework::BlockDesc* block_; + private: bool stop_gradient_; OpBase* pre_op_; std::string pre_op_out_name_; int pre_op_out_idx_; + std::string name_; }; /* The wrapper for OpDesc which holds a OpDesc and a OpDesc of its @@ -212,6 +222,12 @@ class OpBase { for (framework::OpDesc* desc : grad_op_descs_) { delete desc; } + + LOG(ERROR) << "remove op " << op_desc_->Type() << " id " << trace_id_; + + if (block_) { + block_->RemoveOp(trace_id_, trace_id_ + 1); + } } std::map> ApplyGrad(); diff --git a/paddle/fluid/pybind/protobuf.cc b/paddle/fluid/pybind/protobuf.cc index 48fe445b7d01287c37bcf7d4811f687785ca78d5..e729be4a95a58510f1e0162af4216feaa400d971 100644 --- a/paddle/fluid/pybind/protobuf.cc +++ b/paddle/fluid/pybind/protobuf.cc @@ -189,8 +189,6 @@ void BindBlockDesc(pybind11::module *m) { return self.HasVar(name); }, pybind11::return_value_policy::reference) - .def("_clear_block", [](pd::BlockDesc &self) { return self.Clear(); }, - pybind11::return_value_policy::reference) .def("_rename_var", [](pd::BlockDesc &self, const pybind11::bytes &byte_name, const pybind11::bytes &byte_name_new) { diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index 12de275facf3fee08e31de04bedd29be61adeaac..0f938a85c899cbc0dae0767d4f4a9f0eca247f50 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -390,6 +390,8 @@ class Variable(object): if _in_imperative_mode(): # record vars in tracer rather than blocks self._ivar = kwargs.get("ivar", None) + self._ivar.block = block.desc + self._ivar.name = name if not self._ivar: self._ivar = core.VarBase(stop_gradient) self._ivar.desc = self.desc @@ -1200,15 +1202,6 @@ class Block(object): else: raise ValueError("Var {0} is not found recursively".format(name)) - def _clear_block(self): - assert _in_imperative_mode() - - # TODO(minqiyang): move this to Variable and Operator's __del__ - self.desc._clear_block() - - assert len(self.vars) == 0 - assert len(self.ops) == 0 - def all_parameters(self): return list(self.iter_parameters()) diff --git a/python/paddle/fluid/tests/unittests/test_imperative_optimizer.py b/python/paddle/fluid/tests/unittests/test_imperative_optimizer.py index 0d0a3bbe0bd47fe0e01761f8b42c92b884a5680a..72356faf923ec2496a50d956ccc82c5828da6576 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_optimizer.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_optimizer.py @@ -142,8 +142,6 @@ class TestImperativeMnist(unittest.TestCase): sgd.minimize(avg_loss) mnist.clear_gradients() - fluid.default_main_program().global_block()._clear_block() - dy_param_value = {} for param in mnist.parameters(): dy_param_value[param.name] = param._numpy() diff --git a/python/paddle/fluid/tests/unittests/test_imperative_resnet.py b/python/paddle/fluid/tests/unittests/test_imperative_resnet.py index 4892495e1108e6d2a7e96cab88dc7668e360d79f..9b5b4c8cef1ca4496e3ac0cd2f52dd2ddc758d1f 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_resnet.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_resnet.py @@ -286,8 +286,6 @@ class TestImperativeResnet(unittest.TestCase): optimizer.minimize(avg_loss) resnet.clear_gradients() - fluid.default_main_program().global_block()._clear_block() - dy_param_value = {} for param in resnet.parameters(): dy_param_value[param.name] = param._numpy()