diff --git a/paddle/fluid/eager/auto_code_generator/generator/codegen_utils.py b/paddle/fluid/eager/auto_code_generator/generator/codegen_utils.py index f51d270cc8a0c90213b58f30e79efb58f0f2a0ba..0ec006555e47f33e8f74624ace796dde354801f3 100644 --- a/paddle/fluid/eager/auto_code_generator/generator/codegen_utils.py +++ b/paddle/fluid/eager/auto_code_generator/generator/codegen_utils.py @@ -67,6 +67,7 @@ ops_to_fill_zero_for_empty_grads = { "multiply_grad", "divide_grad", "matmul_grad", + "unbind_grad", } # For API dispatch used at python-level diff --git a/python/paddle/fluid/tests/unittests/test_unbind_op.py b/python/paddle/fluid/tests/unittests/test_unbind_op.py index 989eb43b0504d0e0ee58531dbd20816a404883f3..763aa2c3f247d7806fbc4a67883138f5f40c1f1d 100644 --- a/python/paddle/fluid/tests/unittests/test_unbind_op.py +++ b/python/paddle/fluid/tests/unittests/test_unbind_op.py @@ -280,5 +280,18 @@ class TestUnbindBool(unittest.TestCase): np.testing.assert_array_equal(xs[0].numpy(), [True, True]) +class TestUnbindGradOptionalInput(unittest.TestCase): + def test_grad(self): + a = paddle.zeros([3, 2, 3]) + a.stop_gradient = False + x, y = a.unbind(-2) + x.sum().backward() # y_grad is empty + + a_grad = a.detach() + a_grad[:, 0, :] = 1 + + np.testing.assert_array_equal(a.grad.numpy(), a_grad.numpy()) + + if __name__ == '__main__': unittest.main()