From 2b3510bc505fd6b44a6843f23728b0466d5ed01d Mon Sep 17 00:00:00 2001 From: minqiyang Date: Fri, 22 Feb 2019 18:53:41 +0800 Subject: [PATCH] Add imperative python tracer --- paddle/fluid/imperative/layer.h | 2 ++ paddle/fluid/pybind/pybind.cc | 10 ++++++ python/paddle/fluid/framework.py | 37 ++++++++-------------- python/paddle/fluid/imperative/__init__.py | 4 +++ python/paddle/fluid/imperative/base.py | 3 +- 5 files changed, 31 insertions(+), 25 deletions(-) diff --git a/paddle/fluid/imperative/layer.h b/paddle/fluid/imperative/layer.h index 9adc81f04dd..b3862f5ed9d 100644 --- a/paddle/fluid/imperative/layer.h +++ b/paddle/fluid/imperative/layer.h @@ -196,6 +196,7 @@ class OpBase { : op_desc_(nullptr), forward_id_(-1), backward_id_(-1), + trace_id_(-1), place_(platform::CPUPlace()) {} virtual ~OpBase() { @@ -216,6 +217,7 @@ class OpBase { // Note: each fwd op corresponds to a vector of bwd ops. std::vector grad_op_descs_; int backward_id_; + int trace_id_; platform::Place place_; diff --git a/paddle/fluid/pybind/pybind.cc b/paddle/fluid/pybind/pybind.cc index d8e57a1ac6c..1c7b13fd8af 100644 --- a/paddle/fluid/pybind/pybind.cc +++ b/paddle/fluid/pybind/pybind.cc @@ -193,6 +193,16 @@ PYBIND11_MODULE(core, m) { } }, py::return_value_policy::reference) + .def_property("_trace_id", + [](const imperative::OpBase &self) { + pybind11::gil_scoped_release release; + return self.trace_id_; + }, + [](imperative::OpBase &self, int trace_id) { + pybind11::gil_scoped_release release; + self.trace_id_ = trace_id; + }, + py::return_value_policy::reference) .def_property( "forward_id", [](const imperative::OpBase &self) { return self.forward_id_; }, diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index ae9bcbbecdf..fdb7c0068e7 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -1193,13 +1193,13 @@ class Block(object): raise ValueError("Var {0} is not found recursively".format(name)) def _clear_block(self): - # TODO(minqiyang): move this to backward_hooks - self.desc._clear_block() + assert _in_imperative_mode() - for name in self.vars.keys(): - assert self.vars[name].persistable + # TODO(minqiyang): move this to Variable and Operator's __del__ + self.desc._clear_block() - del self.ops[:] + assert len(self.vars) == 0 + assert len(self.ops) == 0 def all_parameters(self): return list(self.iter_parameters()) @@ -1337,26 +1337,13 @@ class Block(object): # # TODO(minqiyang): add op stop_gradient support in static mode too. # currently, we only support stop_gradient in imperative mode. - self._trace_op(op, kwargs.get("stop_gradient", False)) - self.ops.append(op) + _imperative_tracer().trace_op(op, + kwargs.get("stop_gradient", False)) + else: + self.ops.append(op) return op - def _trace_op(self, op, stop_gradient=False): - backward_refs = _imperative_tracer().trace( - op.iop, op.inputs, op.outputs, self.desc, - _imperative_current_expected_place_, stop_gradient) - - # TODO(minqiyang): support backward_hooks to eager remove backward_refs - op.backward_refs = defaultdict(list) - for k, v in six.iteritems(op.inputs): - if k in backward_refs: - op.backward_refs[k] = op.inputs[k] - - for k, v in six.iteritems(op.outputs): - if k in backward_refs: - op.backward_refs[k] = op.outputs[k] - def _insert_op(self, index, *args, **kwargs): """ Insert a Operator according to the giving arguments. @@ -1409,9 +1396,11 @@ class Block(object): inputs=kwargs.get("inputs", None), outputs=kwargs.get("outputs", None), attrs=kwargs.get("attrs", None)) - self.ops.insert(0, op) if _in_imperative_mode(): - self._trace_op(op, kwargs.get("stop_gradient", False)) + _imperative_tracer().trace_op(op, + kwargs.get("stop_gradient", False)) + else: + self.ops.insert(0, op) return op def _sync_with_cpp(self): diff --git a/python/paddle/fluid/imperative/__init__.py b/python/paddle/fluid/imperative/__init__.py index 54dc794ea63..034a11e0a60 100644 --- a/python/paddle/fluid/imperative/__init__.py +++ b/python/paddle/fluid/imperative/__init__.py @@ -23,7 +23,11 @@ from .layers import * from . import nn from .nn import * +from . import tracer +from .tracer import * + __all__ = [] __all__ += layers.__all__ __all__ += base.__all__ __all__ += nn.__all__ +__all__ += tracer.__all__ diff --git a/python/paddle/fluid/imperative/base.py b/python/paddle/fluid/imperative/base.py index d4525233cc6..174f138bfa2 100644 --- a/python/paddle/fluid/imperative/base.py +++ b/python/paddle/fluid/imperative/base.py @@ -16,6 +16,7 @@ import numpy as np from paddle.fluid import core from paddle.fluid import framework +from .tracer import Tracer __all__ = ['enabled', 'guard', 'to_variable'] @@ -28,7 +29,7 @@ def enabled(): def guard(place=None): train = framework.Program() startup = framework.Program() - tracer = core.Tracer(train.current_block().desc) + tracer = Tracer(train.current_block().desc) if place is None: if core.is_compiled_with_cuda(): -- GitLab