未验证 提交 a86e8c0e 编写于 作者: Z Zhen Wang 提交者: GitHub

add more error info for these ops without double grad ops. (#25987)

上级 75a13114
......@@ -887,7 +887,10 @@ void PartialGradTask::RunEachOp(OpBase *op) {
op->Attrs(), op->place());
PADDLE_ENFORCE_NOT_NULL(
double_grad_node,
platform::errors::NotFound("The Op %s doesn't have any grad op.",
platform::errors::NotFound("The Op %s doesn't have any grad op. If you "
"don't intend calculating higher order "
"derivatives, please set `create_graph` to "
"False.",
op->Type()));
VLOG(10) << "Create " << double_grad_node->size()
<< " double grad op(s) for " << op->Type()
......
......@@ -298,15 +298,16 @@ class TestDygraphDoubleGradSortGradient(TestDygraphDoubleGrad):
class TestDygraphDoubleGradVisitedUniq(TestCase):
def test_compare(self):
value = np.random.uniform(-1, 1, [10, 3, 32, 32]).astype('float32')
value = np.random.uniform(-0.5, 0.5, 100).reshape(10, 2,
5).astype("float32")
def model_f(input):
conv2d = fluid.dygraph.Conv2D(3, 2, 3)
linear = fluid.dygraph.Linear(5, 3, bias_attr=False)
for i in range(10):
if i == 0:
out = conv2d(input)
out = linear(input)
else:
out = out + conv2d(input)
out = out + linear(input)
return out
backward_strategy = fluid.dygraph.BackwardStrategy()
......@@ -321,8 +322,7 @@ class TestDygraphDoubleGradVisitedUniq(TestCase):
dx = fluid.dygraph.grad(
outputs=[out],
inputs=[a],
create_graph=True,
retain_graph=True,
create_graph=False,
only_inputs=True,
allow_unused=False,
backward_strategy=backward_strategy)
......@@ -339,9 +339,25 @@ class TestDygraphDoubleGradVisitedUniq(TestCase):
grad_2 = a.gradient()
self.assertTrue(
np.allclose(
grad_1, grad_2, rtol=1.e-5, atol=1.e-8, equal_nan=True))
self.assertTrue(np.array_equal(grad_1, grad_2))
class TestRaiseNoDoubleGradOp(TestCase):
def raise_no_grad_op(self):
with fluid.dygraph.guard():
x = fluid.layers.ones(shape=[2, 3, 2, 2], dtype='float32')
x.stop_gradient = False
y = paddle.fluid.layers.batch_norm(x)
dx = fluid.dygraph.grad(
outputs=[y], inputs=[x], create_graph=True,
retain_graph=True)[0]
loss = fluid.layers.reduce_mean(dx)
loss.backward()
def test_raise(self):
self.assertRaises(fluid.core.EnforceNotMet, self.raise_no_grad_op)
if __name__ == '__main__':
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册