diff --git a/paddle/fluid/pybind/eager_method.cc b/paddle/fluid/pybind/eager_method.cc index ae00953f2cf4118f7e55486341b228656978ba7e..f41e84537a96c7fc7ecfa01b319974b5b8960198 100644 --- a/paddle/fluid/pybind/eager_method.cc +++ b/paddle/fluid/pybind/eager_method.cc @@ -1635,6 +1635,26 @@ static PyObject* tensor__grad_value(TensorObject* self, PyObject* args, EAGER_CATCH_AND_THROW_RETURN_NULL } +static PyObject* tensor__unset_fake_empty(TensorObject* self, PyObject* args, + PyObject* kwargs) { + EAGER_TRY + paddle::experimental::Tensor* grad = + egr::EagerUtils::mutable_grad(self->tensor); + PADDLE_ENFORCE_EQ(grad != nullptr, true, + platform::errors::InvalidArgument( + "Detected NULL grad. Please check if you have manually " + "cleared the grad inside autograd_meta")); + + bool is_leaf = egr::egr_utils_api::IsLeafTensor(self->tensor); + if (is_leaf) { + std::static_pointer_cast( + egr::EagerUtils::grad_node(self->tensor)) + ->SetFakeEmpty(false); + } + RETURN_PY_NONE + EAGER_CATCH_AND_THROW_RETURN_NULL +} + #if defined(PADDLE_WITH_CUDA) static PyObject* tensor_method__uva(TensorObject* self, PyObject* args, PyObject* kwargs) { @@ -1791,6 +1811,8 @@ PyMethodDef variable_methods[] = { METH_VARARGS | METH_KEYWORDS, NULL}, {"_grad_value", (PyCFunction)(void (*)(void))tensor__grad_value, METH_VARARGS | METH_KEYWORDS, NULL}, + {"_unset_fake_empty", (PyCFunction)(void (*)(void))tensor__unset_fake_empty, + METH_VARARGS | METH_KEYWORDS, NULL}, #if defined(PADDLE_WITH_CUDA) {"_tensor_uva", (PyCFunction)(void (*)(void))tensor_method__uva, METH_VARARGS | METH_KEYWORDS, NULL}, diff --git a/python/paddle/fluid/dygraph/varbase_patch_methods.py b/python/paddle/fluid/dygraph/varbase_patch_methods.py index 2422c68622a009a67a65135a874fe0fd1fb91ec9..9eb044188f0d150bc4768245816bc30a620f4223 100644 --- a/python/paddle/fluid/dygraph/varbase_patch_methods.py +++ b/python/paddle/fluid/dygraph/varbase_patch_methods.py @@ -804,6 +804,7 @@ def monkey_patch_varbase(): def _set_grad_ivar(self, value): if isinstance(self, EagerParamBase): self.grad = value + self._unset_fake_empty() else: raise TypeError( "_set_grad_ivar is only supported for Parameter Tensor")