From 2349acea48f76cc527d9a47ff6a0b5da5e77aa2f Mon Sep 17 00:00:00 2001 From: Xin Pan Date: Tue, 8 Jan 2019 16:41:20 +0800 Subject: [PATCH] checkpoint test=develop --- paddle/fluid/imperative/layer.cc | 19 +++++++++++++++++ paddle/fluid/imperative/layer.h | 27 ++++++++++++------------ paddle/fluid/imperative/tracer.h | 15 +++++++++++++ paddle/fluid/pybind/imperative.cc | 4 +++- paddle/fluid/pybind/pybind.cc | 24 +++++++++++++++------ python/paddle/fluid/imperative/layers.py | 13 +++++++++--- 6 files changed, 78 insertions(+), 24 deletions(-) diff --git a/paddle/fluid/imperative/layer.cc b/paddle/fluid/imperative/layer.cc index 9813149865..53e949d9f9 100644 --- a/paddle/fluid/imperative/layer.cc +++ b/paddle/fluid/imperative/layer.cc @@ -27,6 +27,8 @@ namespace paddle { namespace imperative { +std::map py_funcs_; + using framework::Variable; void AddTo(Variable* src, Variable* dst) { @@ -183,5 +185,22 @@ void VarBase::RunBackward() { Autograd().RunBackward(this); } +void PyLayer::RegisterFunc(int func_id, const py::object& py_func) { + py_funcs_[func_id] = py_func; +} + +std::vector PyLayer::Apply(int func_id, + const std::vector& inputs) { + std::vector tensor_inputs; + std::vector ret; + + for (const VarBase& in : inputs) { + tensor_inputs.push_back(in.var_->Get()); + } + PADDLE_ENFORCE(py_funcs_.find(func_id) != py_funcs_.end()); + CallPythonFunc(py_funcs_[func_id], tensor_inputs, &ret); + return ret; +} + } // namespace imperative } // namespace paddle diff --git a/paddle/fluid/imperative/layer.h b/paddle/fluid/imperative/layer.h index a0eee357c3..52cbb2c015 100644 --- a/paddle/fluid/imperative/layer.h +++ b/paddle/fluid/imperative/layer.h @@ -82,6 +82,7 @@ class PreparedOp { framework::OperatorWithKernel::OpKernelFunc func; platform::DeviceContext* dev_ctx; }; + class OpBase; class VarBase { @@ -128,7 +129,11 @@ class VarBase { class OpBase { public: - OpBase() : op_desc_(nullptr), grad_op_desc_(nullptr) {} + OpBase() + : op_desc_(nullptr), + grad_op_desc_(nullptr), + forward_id_(-1), + backward_id_(-1) {} virtual ~OpBase() { if (grad_op_desc_) delete grad_op_desc_; @@ -139,6 +144,9 @@ class OpBase { framework::OpDesc* op_desc_; framework::OpDesc* grad_op_desc_; + int forward_id_; + int backward_id_; + std::map> input_vars_; std::map> output_vars_; std::map> pre_ops_; @@ -159,7 +167,7 @@ class Layer { } }; -static void CallPythonFunc(py::object* callable, +static void CallPythonFunc(const py::object& callable, const std::vector& ins, std::vector* outs) { py::gil_scoped_acquire guard; @@ -169,7 +177,7 @@ static void CallPythonFunc(py::object* callable, } // TODO(panyx0718): Who owns the returned LoDTensor. - auto ret = (*callable)(in_args); + auto ret = callable(in_args); auto ret_tuple = py::cast(ret); size_t ret_num = py::len(ret_tuple); for (size_t i = 0; i < ret_num; ++i) { @@ -192,17 +200,10 @@ class PyLayer { public: virtual ~PyLayer() {} - static std::vector Apply(py::object* callable, - const std::vector& inputs) { - std::vector tensor_inputs; - std::vector ret; + static void RegisterFunc(int func_id, const py::object& py_func); - for (const VarBase& in : inputs) { - tensor_inputs.push_back(in.var_->Get()); - } - CallPythonFunc(callable, tensor_inputs, &ret); - return ret; - } + static std::vector Apply(int func_id, + const std::vector& inputs); }; } // namespace imperative diff --git a/paddle/fluid/imperative/tracer.h b/paddle/fluid/imperative/tracer.h index c6eff86fac..1954c7a68a 100644 --- a/paddle/fluid/imperative/tracer.h +++ b/paddle/fluid/imperative/tracer.h @@ -172,6 +172,21 @@ class Tracer { op->block_ = block; } + std::vector PyTrace(OpBase* op, + const std::vector& inputs) { + std::vector outputs = PyLayer::Apply(op->forward_id_, inputs); + /* + for (const VarBase& inp : inputs) { + if (inp.pre_op_) { + op->pre_ops_[it.first].push_back(inp->pre_op_); + op->pre_ops_out_idx_[it.first].push_back(inp->pre_op_out_idx_); + } else { + op->pre_ops_[it.first].push_back(nullptr); + } + }*/ + return outputs; + } + private: framework::BlockDesc* root_block_; }; diff --git a/paddle/fluid/pybind/imperative.cc b/paddle/fluid/pybind/imperative.cc index 5c1c7478f4..dbc7843caa 100644 --- a/paddle/fluid/pybind/imperative.cc +++ b/paddle/fluid/pybind/imperative.cc @@ -26,7 +26,9 @@ void BindTracer(pybind11::module *m) { [](imperative::Tracer &self, framework::BlockDesc *root_block) { new (&self) imperative::Tracer(root_block); }) - .def("trace", &imperative::Tracer::Trace); + .def("trace", &imperative::Tracer::Trace) + .def("py_trace", &imperative::Tracer::PyTrace, + pybind11::return_value_policy::take_ownership); } } // namespace pybind diff --git a/paddle/fluid/pybind/pybind.cc b/paddle/fluid/pybind/pybind.cc index 684b931ee8..455bcc6a41 100644 --- a/paddle/fluid/pybind/pybind.cc +++ b/paddle/fluid/pybind/pybind.cc @@ -168,6 +168,13 @@ PYBIND11_MODULE(core, m) { self.op_desc_ = op_desc; } }, + py::return_value_policy::reference) + .def_property( + "forward_id", + [](const imperative::OpBase &self) { return self.forward_id_; }, + [](imperative::OpBase &self, int forward_id) { + self.forward_id_ = forward_id; + }, py::return_value_policy::reference); py::class_ layer(m, "Layer"); @@ -179,13 +186,16 @@ PYBIND11_MODULE(core, m) { py::class_(m, "PyLayer") .def(py::init<>()) - .def_static("apply", - [](py::object *callable, - const std::vector &inputs) - -> std::vector { - return imperative::PyLayer::Apply(callable, inputs); - }, - py::return_value_policy::take_ownership); + .def_static( + "apply", + [](int func_id, const std::vector &inputs) + -> std::vector { + return imperative::PyLayer::Apply(func_id, inputs); + }, + py::return_value_policy::take_ownership) + .def_static("register_func", [](int func_id, const py::object &callable) { + imperative::PyLayer::RegisterFunc(func_id, callable); + }); BindTracer(&m); diff --git a/python/paddle/fluid/imperative/layers.py b/python/paddle/fluid/imperative/layers.py index 06b6d7ac06..40ec312b69 100644 --- a/python/paddle/fluid/imperative/layers.py +++ b/python/paddle/fluid/imperative/layers.py @@ -48,7 +48,6 @@ class Layer(core.Layer): raise ValueError("Layer shouldn't implement backward") -# TODO(panyx0718): Inherit from C++ base class. class PyLayer(core.PyLayer): """Layers composed of user-defined python codes.""" @@ -65,13 +64,21 @@ class PyLayer(core.PyLayer): @classmethod def __call__(cls, inputs): + tracer = framework._imperative_tracer() + block = framework.default_main_program().current_block() inputs = map(base.to_variable, inputs) inputs = [x._ivar for x in inputs] - ivars = core.PyLayer.apply(cls.forward, inputs) + + PyLayer.register_func(1, cls.forward) + + iop = core.OpBase() + iop.forward_id = 1 + block.ops.append(iop) + ivars = tracer.py_trace(iop, inputs) + # ivars = core.PyLayer.apply(cls.forward, inputs) ret = [] for ivar in ivars: tensor = ivar.value.get_tensor() - block = framework.default_main_program().current_block() py_var = framework.Variable( block, type=core.VarDesc.VarType.LOD_TENSOR, -- GitLab