提交 0d0bc612 编写于 作者: X Xin Pan

update api

test=develop
上级 875a07c3
...@@ -153,7 +153,10 @@ class Layer { ...@@ -153,7 +153,10 @@ class Layer {
return vars; return vars;
} }
virtual void Backward() { LOG(ERROR) << "To support customize"; } virtual std::vector<VarBase> Backward(const std::vector<VarBase>& inputs) {
std::vector<VarBase> vars;
return vars;
}
}; };
} // namespace imperative } // namespace imperative
......
...@@ -22,7 +22,7 @@ limitations under the License. */ ...@@ -22,7 +22,7 @@ limitations under the License. */
namespace paddle { namespace paddle {
namespace pybind { namespace pybind {
class PyLayer : public imperative::Layer { class Layer : public imperative::Layer {
public: public:
using imperative::Layer::Layer; // Inherit constructors using imperative::Layer::Layer; // Inherit constructors
...@@ -32,8 +32,10 @@ class PyLayer : public imperative::Layer { ...@@ -32,8 +32,10 @@ class PyLayer : public imperative::Layer {
inputs); // NOLINT inputs); // NOLINT
} }
void Backward() override { std::vector<imperative::VarBase> Backward(
PYBIND11_OVERLOAD(void, Layer, Backward, ); // NOLINT const std::vector<imperative::VarBase>& inputs) override {
PYBIND11_OVERLOAD(std::vector<imperative::VarBase>, Layer, Backward,
inputs); // NOLINT
} }
}; };
......
...@@ -170,14 +170,17 @@ PYBIND11_MODULE(core, m) { ...@@ -170,14 +170,17 @@ PYBIND11_MODULE(core, m) {
}, },
py::return_value_policy::reference); py::return_value_policy::reference);
py::class_<imperative::Layer, PyLayer /* <--- trampoline*/> layer(m, "Layer"); py::class_<imperative::Layer, Layer /* <--- trampoline*/> layer(m, "Layer");
layer.def(py::init<>()) layer.def(py::init<>())
.def("forward", .def("forward",
[](imperative::Layer &self, [](imperative::Layer &self,
const std::vector<imperative::VarBase> &inputs) { const std::vector<imperative::VarBase> &inputs) {
return self.Forward(inputs); return self.Forward(inputs);
}) })
.def("backward", &imperative::Layer::Backward); .def("backward", [](imperative::Layer &self,
const std::vector<imperative::VarBase> &inputs) {
return self.Backward(inputs);
});
BindTracer(&m); BindTracer(&m);
py::class_<Tensor>(m, "Tensor", py::buffer_protocol()) py::class_<Tensor>(m, "Tensor", py::buffer_protocol())
......
...@@ -20,10 +20,12 @@ from paddle.fluid import core ...@@ -20,10 +20,12 @@ from paddle.fluid import core
from paddle.fluid import framework from paddle.fluid import framework
from paddle.fluid.imperative import base from paddle.fluid.imperative import base
__all__ = ['PyLayer'] __all__ = ['Layer']
class PyLayer(core.Layer): class Layer(core.Layer):
"""Layers composed of operators."""
def __init__(self, dtype=core.VarDesc.VarType.FP32, name=None): def __init__(self, dtype=core.VarDesc.VarType.FP32, name=None):
self._once_built = False self._once_built = False
self._dtype = dtype self._dtype = dtype
...@@ -37,8 +39,23 @@ class PyLayer(core.Layer): ...@@ -37,8 +39,23 @@ class PyLayer(core.Layer):
self._once_built = True self._once_built = True
outputs = self.forward(*inputs) outputs = self.forward(*inputs)
return outputs return outputs
def forward(self, *inputs): def forward(self, *inputs):
raise NotImplementedError raise NotImplementedError
def backward(self, *inputs):
raise ValueError("Layer shouldn't implement backward")
class PyLayer(core.Layer):
"""Layers composed of user-defined python codes."""
def __call__(self, *inputs):
pass
def forward(self, *inputs):
raise NotImplementedError
def backward(self, *inputs):
raise NotImplementedError
...@@ -30,7 +30,7 @@ __all__ = [ ...@@ -30,7 +30,7 @@ __all__ = [
] ]
class Conv2D(layers.PyLayer): class Conv2D(layers.Layer):
def __init__(self, def __init__(self,
num_channels, num_channels,
num_filters, num_filters,
...@@ -143,7 +143,7 @@ class Conv2D(layers.PyLayer): ...@@ -143,7 +143,7 @@ class Conv2D(layers.PyLayer):
return self._helper.append_activation(pre_act) return self._helper.append_activation(pre_act)
class Pool2D(layers.PyLayer): class Pool2D(layers.Layer):
def __init__(self, def __init__(self,
pool_size=-1, pool_size=-1,
pool_type="max", pool_type="max",
...@@ -205,7 +205,7 @@ class Pool2D(layers.PyLayer): ...@@ -205,7 +205,7 @@ class Pool2D(layers.PyLayer):
return pool_out return pool_out
class FC(layers.PyLayer): class FC(layers.Layer):
def __init__(self, def __init__(self,
size, size,
param_attr=None, param_attr=None,
......
...@@ -22,7 +22,7 @@ from paddle.fluid.imperative.nn import FC ...@@ -22,7 +22,7 @@ from paddle.fluid.imperative.nn import FC
from test_imperative_base import new_program_scope from test_imperative_base import new_program_scope
class MyLayer(fluid.imperative.PyLayer): class MyLayer(fluid.imperative.Layer):
def __init__(self): def __init__(self):
super(MyLayer, self).__init__() super(MyLayer, self).__init__()
...@@ -34,7 +34,7 @@ class MyLayer(fluid.imperative.PyLayer): ...@@ -34,7 +34,7 @@ class MyLayer(fluid.imperative.PyLayer):
return [x] return [x]
class MLP(fluid.imperative.PyLayer): class MLP(fluid.imperative.Layer):
def __init__(self): def __init__(self):
super(MLP, self).__init__() super(MLP, self).__init__()
self._fc1 = FC(3, self._fc1 = FC(3,
...@@ -56,7 +56,7 @@ class TestImperative(unittest.TestCase): ...@@ -56,7 +56,7 @@ class TestImperative(unittest.TestCase):
with fluid.imperative.guard(): with fluid.imperative.guard():
cl = core.Layer() cl = core.Layer()
cl.forward([]) cl.forward([])
l = fluid.imperative.PyLayer() l = fluid.imperative.Layer()
self.assertRaises(NotImplementedError, l.forward, []) self.assertRaises(NotImplementedError, l.forward, [])
def test_layer_in_out(self): def test_layer_in_out(self):
......
...@@ -26,7 +26,7 @@ from paddle.fluid.imperative.base import to_variable ...@@ -26,7 +26,7 @@ from paddle.fluid.imperative.base import to_variable
from test_imperative_base import new_program_scope from test_imperative_base import new_program_scope
class SimpleImgConvPool(fluid.imperative.PyLayer): class SimpleImgConvPool(fluid.imperative.Layer):
def __init__(self, def __init__(self,
num_channels, num_channels,
num_filters, num_filters,
...@@ -72,7 +72,7 @@ class SimpleImgConvPool(fluid.imperative.PyLayer): ...@@ -72,7 +72,7 @@ class SimpleImgConvPool(fluid.imperative.PyLayer):
return x return x
class MNIST(fluid.imperative.PyLayer): class MNIST(fluid.imperative.Layer):
def __init__(self, param_attr=None, bias_attr=None): def __init__(self, param_attr=None, bias_attr=None):
super(MNIST, self).__init__() super(MNIST, self).__init__()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册