diff --git a/paddle/fluid/eager/saved_tensors_hooks.cc b/paddle/fluid/eager/saved_tensors_hooks.cc index 6bd62c21611c0c4fcea842b4fc8315b0a281511f..1060e5d463dd7502c1df9e67161ad536b13707be 100644 --- a/paddle/fluid/eager/saved_tensors_hooks.cc +++ b/paddle/fluid/eager/saved_tensors_hooks.cc @@ -36,6 +36,9 @@ void* PackHook::operator()(const paddle::experimental::Tensor& tensor) { auto args = PyTuple_New(1); PyTuple_SET_ITEM(args, 0, paddle::pybind::ToPyObject(tensor)); PyObject* ret = PyObject_Call(hook_, args, nullptr); + PADDLE_ENFORCE_NOT_NULL(ret, + paddle::platform::errors::External( + pybind11::detail::error_string().c_str())); Py_XDECREF(args); egr::Controller::Instance().SetHasGrad(grad_tmp); return reinterpret_cast(ret); @@ -49,6 +52,9 @@ void* PackHook::operator()(void* py_tensor) { Py_INCREF(reinterpret_cast(py_tensor)); PyTuple_SET_ITEM(args, 0, reinterpret_cast(py_tensor)); PyObject* ret = PyObject_Call(hook_, args, nullptr); + PADDLE_ENFORCE_NOT_NULL(ret, + paddle::platform::errors::External( + pybind11::detail::error_string().c_str())); Py_XDECREF(args); egr::Controller::Instance().SetHasGrad(grad_tmp); return reinterpret_cast(ret); @@ -69,6 +75,9 @@ paddle::experimental::Tensor UnPackHook::operator()(void* packed_value) { Py_INCREF(reinterpret_cast(packed_value)); PyTuple_SET_ITEM(args, 0, reinterpret_cast(packed_value)); PyObject* ret = PyObject_Call(hook_, args, nullptr); + PADDLE_ENFORCE_NOT_NULL(ret, + paddle::platform::errors::External( + pybind11::detail::error_string().c_str())); Py_XDECREF(args); egr::Controller::Instance().SetHasGrad(grad_tmp); @@ -91,6 +100,9 @@ void* UnPackHook::operator()(void* packed_value, void* other) { Py_INCREF(reinterpret_cast(packed_value)); PyTuple_SET_ITEM(args, 0, reinterpret_cast(packed_value)); PyObject* ret = PyObject_Call(hook_, args, nullptr); + PADDLE_ENFORCE_NOT_NULL(ret, + paddle::platform::errors::External( + pybind11::detail::error_string().c_str())); Py_XDECREF(args); egr::Controller::Instance().SetHasGrad(grad_tmp); diff --git a/paddle/fluid/pybind/eager_py_layer.cc b/paddle/fluid/pybind/eager_py_layer.cc index f39dc6d74f4ebe254378c6cc14c545ffc2d4f85b..294da6956abc57c6871d50b5757d3b0fef5fb017 100644 --- a/paddle/fluid/pybind/eager_py_layer.cc +++ b/paddle/fluid/pybind/eager_py_layer.cc @@ -139,6 +139,8 @@ PyObject* pylayer_method_apply(PyObject* cls, PyLayerObject* ctx = reinterpret_cast( PyObject_CallFunctionObjArgs(backward_function, nullptr)); if (!ctx) { + PADDLE_THROW(paddle::platform::errors::External( + pybind11::detail::error_string().c_str())); return nullptr; } VLOG(6) << "PyLayer construct PyLayerContext finish..."; diff --git a/paddle/fluid/pybind/eager_utils.cc b/paddle/fluid/pybind/eager_utils.cc index 1237e4092f02f5c1a326f614e38833643d8a63a5..b4f65faf64a5e6987d87c032657a319864922265 100644 --- a/paddle/fluid/pybind/eager_utils.cc +++ b/paddle/fluid/pybind/eager_utils.cc @@ -1525,8 +1525,8 @@ paddle::experimental::Tensor PyTensorHook::operator()( } PADDLE_ENFORCE_NOT_NULL(res, - platform::errors::Unavailable( - "Hook function of Tensor return a nullptr.")); + paddle::platform::errors::External( + pybind11::detail::error_string().c_str())); if (res == Py_None) { return var; }