未验证 提交 b6ca6a55 编写于 作者: W WeiXin 提交者: GitHub

forward return any type. (#32661)

上级 b7ddd7d7
......@@ -115,12 +115,12 @@ py::object PyLayerApply(const platform::Place& place, const py::object& cls,
tuple_result[i].cast<std::shared_ptr<imperative::VarBase>>();
output_vars.push_back(temp_out);
} catch (py::cast_error&) {
PADDLE_THROW(platform::errors::Unimplemented(
"The output of `PyLayer.forward` should be `Tensor`."));
// Only collect Tensor type in 'kwargs' and pass them to backward.
// Ignore other types of input temporarily.
}
} else {
PADDLE_THROW(platform::errors::Unimplemented(
"The output of `PyLayer.forward` can not be `None`."));
// Only collect Tensor type in 'kwargs' and pass them to backward.
// Ignore other types of input temporarily.
}
}
} else {
......@@ -130,14 +130,18 @@ py::object PyLayerApply(const platform::Place& place, const py::object& cls,
result_forward.cast<std::shared_ptr<imperative::VarBase>>();
output_vars.push_back(temp_out);
} catch (py::cast_error&) {
PADDLE_THROW(platform::errors::Unimplemented(
"The output of `PyLayer.forward` should be `Tensor`."));
// Only collect Tensor type in 'kwargs' and pass them to backward.
// Ignore other types of input temporarily.
}
} else {
PADDLE_THROW(platform::errors::Unimplemented(
"The output of `PyLayer.forward` can not be `None`."));
// Only collect Tensor type in 'kwargs' and pass them to backward.
// Ignore other types of input temporarily.
}
}
if (output_vars.size() == 0) {
PADDLE_THROW(platform::errors::InvalidArgument(
"At least one output of `PyLayer.forward` is a `Tensor`."));
}
NameVarBaseMap outs = {{"Out", output_vars}};
......
......@@ -86,6 +86,12 @@ void RunPyObject(py::object *py_object,
}
}
} else {
if (1 != outs->size()) {
PADDLE_THROW(platform::errors::InvalidArgument(
"The number of outputs of `PyLayer.backward` should be %d, but "
"received 1.",
outs->size()));
}
if ((*outs)[0] != nullptr) {
if (Py_None != py_result.ptr()) {
try {
......
......@@ -30,7 +30,7 @@ class TestPyLayer(unittest.TestCase):
y1 = func1(x1)
y2 = func1(x2)
ctx.save_for_backward(y1, y2)
return y1, y2
return y1, 1, y2, None
@staticmethod
def backward(ctx, dy1, dy2):
......@@ -44,7 +44,7 @@ class TestPyLayer(unittest.TestCase):
input1.stop_gradient = False
input2.stop_gradient = False
z = tanh.apply(input1, input1, paddle.tanh, paddle.square)
z = z[0] + z[1]
z = z[0] + z[2]
z.mean().backward()
z2 = paddle.tanh(input2) + paddle.tanh(input2)
......@@ -61,7 +61,7 @@ class TestPyLayer(unittest.TestCase):
y1 = func1(x1)
y2 = func1(x2)
ctx.save_for_backward(y1, y2)
return y1, y2
return 1, None, y1, y2, ''
@staticmethod
def backward(ctx, dy1, dy2):
......@@ -79,7 +79,7 @@ class TestPyLayer(unittest.TestCase):
input3.stop_gradient = True
input4.stop_gradient = True
z = tanh.apply(input1, input3, paddle.tanh, paddle.square)
z = z[0] + z[1]
z = z[2] + z[3]
z.mean().backward()
z2 = paddle.tanh(input2) + paddle.tanh(input4)
......@@ -115,6 +115,27 @@ class TestPyLayer(unittest.TestCase):
self.assertTrue(
np.max(np.abs((input1.grad.numpy() - input2.grad.numpy()))) < 1e-10)
def test_pylayer_num_output_match(self):
class tanh(PyLayer):
@staticmethod
def forward(
ctx,
x1,
x2, ):
return x1 + x2
@staticmethod
def backward(ctx, dy1):
return dy1 + 1
input1 = paddle.randn([2, 3]).astype("float64")
input2 = input1.detach().clone()
input1.stop_gradient = False
input2.stop_gradient = False
z = tanh.apply(input1, input2)
with self.assertRaises(ValueError):
z.mean().backward()
def test_pylayer_dtype(self):
class tanh(PyLayer):
@staticmethod
......@@ -150,21 +171,21 @@ class TestPyLayer(unittest.TestCase):
return args
input1 = paddle.randn([2, 3]).astype("float64")
with self.assertRaises(NotImplementedError):
with self.assertRaises(ValueError):
z = Layer_None1.apply(input1)
class Layer_None2(PyLayer):
@staticmethod
def forward(ctx, *args):
return [None, None]
return [None, args[0]]
@staticmethod
def backward(ctx, *args):
return args
input1 = paddle.randn([2, 3]).astype("float64")
with self.assertRaises(NotImplementedError):
z = Layer_None2.apply(input1)
# return None
z = Layer_None2.apply(input1)
class Layer_one1(PyLayer):
@staticmethod
......@@ -176,21 +197,22 @@ class TestPyLayer(unittest.TestCase):
return args
input1 = paddle.randn([2, 3]).astype("float64")
with self.assertRaises(NotImplementedError):
# At least one output of `PyLayer.backward` is a `Tensor`
with self.assertRaises(ValueError):
z = Layer_one1.apply(input1)
class Layer_one2(PyLayer):
@staticmethod
def forward(ctx, *args):
return [1, 2]
return [1, 2, args[0]]
@staticmethod
def backward(ctx, *args):
return args
input1 = paddle.randn([2, 3]).astype("float64")
with self.assertRaises(NotImplementedError):
z = Layer_one2.apply(input1)
# return int
z = Layer_one2.apply(input1)
class Layer_no_fw(PyLayer):
@staticmethod
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册