From b629133375728fcb0be71c2d6fcf7c9d83a2b391 Mon Sep 17 00:00:00 2001 From: Xin Pan Date: Tue, 8 Jan 2019 10:52:03 +0800 Subject: [PATCH] checkpoint runnable PyLayer test=develop --- paddle/fluid/imperative/layer.h | 49 +++++++++++++++++-- paddle/fluid/pybind/imperative.h | 6 --- paddle/fluid/pybind/pybind.cc | 21 +++++--- python/paddle/fluid/imperative/layers.py | 22 ++++++--- .../fluid/tests/unittests/test_imperative.py | 26 ++++++++++ 5 files changed, 101 insertions(+), 23 deletions(-) diff --git a/paddle/fluid/imperative/layer.h b/paddle/fluid/imperative/layer.h index 377ac3e1c5f..d76512af046 100644 --- a/paddle/fluid/imperative/layer.h +++ b/paddle/fluid/imperative/layer.h @@ -17,6 +17,9 @@ #include #include #include +#include "pybind11/pybind11.h" + +#include "Python.h" #include "paddle/fluid/framework/op_desc.h" #include "paddle/fluid/framework/operator.h" #include "paddle/fluid/framework/var_desc.h" @@ -25,6 +28,8 @@ namespace paddle { namespace imperative { +namespace py = ::pybind11; + class PreparedOp { public: PreparedOp(const framework::OperatorBase& op, @@ -152,10 +157,48 @@ class Layer { std::vector vars; return vars; } +}; - virtual std::vector Backward(const std::vector& inputs) { - std::vector vars; - return vars; +static void CallPythonFunc(py::object* callable, + const std::vector& ins, + std::vector* outs) { + py::gil_scoped_acquire guard; + py::tuple in_args(ins.size()); + for (size_t i = 0; i < ins.size(); ++i) { + in_args[i] = ins[i].IsInitialized() ? py::cast(ins[i]) : py::cast(nullptr); + } + + auto ret = (*callable)(in_args); + auto ret_tuple = py::cast(ret); + size_t ret_num = py::len(ret_tuple); + for (size_t i = 0; i < ret_num; ++i) { + try { + auto* py_out_tensor = py::cast(ret_tuple[i]); + PADDLE_ENFORCE_NOT_NULL(py_out_tensor, + "Output tensor %d should not be nullptr", i); + outs->push_back(py_out_tensor); + } catch (py::cast_error&) { + PADDLE_THROW("The %d-th output must be LoDTensor", i); + } + } +} + +class PyLayer { + public: + virtual ~PyLayer() {} + + static std::vector Apply(py::object* callable, + const std::vector& inputs) { + std::vector outputs; + std::vector tensor_inputs; + std::vector tensor_outputs; + + for (const VarBase& in : inputs) { + tensor_inputs.push_back(in.var_->Get()); + } + + CallPythonFunc(callable, tensor_inputs, &tensor_outputs); + return outputs; } }; diff --git a/paddle/fluid/pybind/imperative.h b/paddle/fluid/pybind/imperative.h index ef0d643954c..f947b743f99 100644 --- a/paddle/fluid/pybind/imperative.h +++ b/paddle/fluid/pybind/imperative.h @@ -31,12 +31,6 @@ class Layer : public imperative::Layer { PYBIND11_OVERLOAD(std::vector, Layer, Forward, inputs); // NOLINT } - - std::vector Backward( - const std::vector& inputs) override { - PYBIND11_OVERLOAD(std::vector, Layer, Backward, - inputs); // NOLINT - } }; class PyOpBase : public imperative::OpBase { diff --git a/paddle/fluid/pybind/pybind.cc b/paddle/fluid/pybind/pybind.cc index 6e3c52da89f..d065818bc86 100644 --- a/paddle/fluid/pybind/pybind.cc +++ b/paddle/fluid/pybind/pybind.cc @@ -172,15 +172,20 @@ PYBIND11_MODULE(core, m) { py::class_ layer(m, "Layer"); layer.def(py::init<>()) - .def("forward", - [](imperative::Layer &self, - const std::vector &inputs) { - return self.Forward(inputs); - }) - .def("backward", [](imperative::Layer &self, - const std::vector &inputs) { - return self.Backward(inputs); + .def("forward", [](imperative::Layer &self, + const std::vector &inputs) { + return self.Forward(inputs); }); + + py::class_(m, "PyLayer") + .def(py::init<>()) + .def_static("apply", + [](py::object *callable, + const std::vector &inputs) + -> std::vector { + return imperative::PyLayer::Apply(callable, inputs); + }); + BindTracer(&m); py::class_(m, "Tensor", py::buffer_protocol()) diff --git a/python/paddle/fluid/imperative/layers.py b/python/paddle/fluid/imperative/layers.py index 1ebf79e0529..76964273666 100644 --- a/python/paddle/fluid/imperative/layers.py +++ b/python/paddle/fluid/imperative/layers.py @@ -20,7 +20,7 @@ from paddle.fluid import core from paddle.fluid import framework from paddle.fluid.imperative import base -__all__ = ['Layer'] +__all__ = ['Layer', 'PyLayer'] class Layer(core.Layer): @@ -48,14 +48,24 @@ class Layer(core.Layer): raise ValueError("Layer shouldn't implement backward") -class PyLayer(core.Layer): +# TODO(panyx0718): Inherit from C++ base class. +class PyLayer(core.PyLayer): """Layers composed of user-defined python codes.""" - def __call__(self, *inputs): - pass + def __init__(self): + super(PyLayer, self).__init__() - def forward(self, *inputs): + @staticmethod + def forward(inputs): raise NotImplementedError - def backward(self, *inputs): + @staticmethod + def backward(inputs): raise NotImplementedError + + @classmethod + def __call__(cls, inputs): + inputs = map(base.to_variable, inputs) + inputs = [x._ivar for x in inputs] + sys.stderr.write('%s\n' % inputs) + return core.PyLayer.apply(cls.forward, inputs) diff --git a/python/paddle/fluid/tests/unittests/test_imperative.py b/python/paddle/fluid/tests/unittests/test_imperative.py index 44005411d1f..ae99fb82e33 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative.py +++ b/python/paddle/fluid/tests/unittests/test_imperative.py @@ -15,6 +15,7 @@ import contextlib import unittest import numpy as np +import sys import paddle.fluid as fluid from paddle.fluid import core @@ -34,6 +35,24 @@ class MyLayer(fluid.imperative.Layer): return [x] +class MyPyLayer(fluid.imperative.PyLayer): + def __init__(self): + super(MyPyLayer, self).__init__() + + @staticmethod + def forward(inputs): + sys.stderr.write('before forward\n') + ret = np.tanh(inputs[0]) + sys.stderr.write('after forward: %s\n' % ret) + tensor = core.LoDTensor() + tensor.set(ret, core.CPUPlace()) + return tuple([tensor]) + + @staticmethod + def backward(douts, outs): + return np.array(douts[0]) * (1 - np.square(np.array(outs[0]))) + + class MLP(fluid.imperative.Layer): def __init__(self): super(MLP, self).__init__() @@ -59,6 +78,13 @@ class TestImperative(unittest.TestCase): l = fluid.imperative.Layer() self.assertRaises(NotImplementedError, l.forward, []) + def test_pylayer(self): + with fluid.imperative.guard(): + my_py_layer = MyPyLayer() + out = my_py_layer([np.ones([2, 2], np.float32)]) + sys.stderr.write('%s\n' % np.array(out)) + # out.backward() + def test_layer_in_out(self): np_inp = np.array([1.0, 2.0, -1.0], dtype=np.float32) with fluid.imperative.guard(): -- GitLab