未验证 提交 9db219d1 编写于 作者: W wanghuancoder 提交者: GitHub

Eager tensor doc2 (#55886)

* add docstring of three eager method

* test=docs_preview

* update element size bind

* update docs of numpy, clone, clear_gradient, element_size; test=docs_preview

* refine clear_gradient docs; test=docs_preview

* refine element_size docs; test=docs_preview

* add detach doc; test=docs_preview

* empty commit; test=docs_preview

* update signature; test=docs_preview

* refactor; test=docs_preview

* empty commit; test=docs_preview

* add docstring of Tensor

* empty commit; test=docs_preview

* refine TensorDoc; test=docs_preview

* refine TensorDoc; test=docs_preview

* remove extra indent in TensorDoc; test=docs_preview

* remove a space; test=docs_preview

* move docs ahead of implementation; test=docs_preview

* add doc

* refine

* refine

* refine

---------
Co-authored-by: Nwj-Mcat <1435130236@qq.com>
Co-authored-by: NSigureMo <sigure.qaq@gmail.com>
上级 842f56e4
......@@ -540,23 +540,6 @@ static PyObject* tensor_method__copy_to(TensorObject* self,
EAGER_CATCH_AND_THROW_RETURN_NULL
}
static PyObject* tensor_method_cpu(TensorObject* self,
PyObject* args,
PyObject* kwargs) {
EAGER_TRY
paddle::Tensor cp_tensor;
{
eager_gil_scoped_release guard;
cp_tensor = self->tensor.copy_to(phi::CPUPlace(), true);
egr::EagerUtils::autograd_meta(&cp_tensor)->SetStopGradient(true);
egr::EagerUtils::autograd_meta(&cp_tensor)
->SetPersistable(
egr::EagerUtils::autograd_meta(&(self->tensor))->Persistable());
}
return ToPyObject(cp_tensor);
EAGER_CATCH_AND_THROW_RETURN_NULL
}
static PyObject* tensor_method_reconstruct_from_(TensorObject* self,
PyObject* args,
PyObject* kwargs) {
......@@ -1636,6 +1619,30 @@ static PyObject* tensor_remove_grad_hook(TensorObject* self,
EAGER_CATCH_AND_THROW_RETURN_NULL
}
PyDoc_STRVAR(tensor_method__register_reduce_hook__doc__,
R"DOC(_register_backward_hook($self, hook, /)
--
Registers a backward hook for current Tensor.
This hook will be called every time the gradient of current Tensor has been fully calculated.
There are two differences with `_register_grad_hook`:
1. This backward hook will be executed after the gradient accumulation completed across batches,
but the hook registered by `_register_grad_hook` will be executed the gradient accumulation
completed in current batch.
2. This backward hook function should have the following signature:
hook() -> None
It requires no input and no return value.
Args:
hook(function): A backward hook to be registered for Tensor.gradient
Returns:
None
)DOC");
static PyObject* tensor_register_reduce_hook(TensorObject* self,
PyObject* args,
PyObject* kwargs) {
......@@ -2040,6 +2047,15 @@ static PyObject* tensor_method_element_size(TensorObject* self,
EAGER_CATCH_AND_THROW_RETURN_NULL
}
PyDoc_STRVAR(tensor_method__bump_inplace_version__doc__,
R"DOC(_bump_inplace_version($self, /)
--
**Notes**:
**This API is ONLY available in Dygraph mode.**
**This is a very low level API. Users should not use it directly. **
Bump the version whenever the Tensor is modified through an inplace operation.
)DOC");
static PyObject* tensor__bump_inplace_version(TensorObject* self,
PyObject* args,
PyObject* kwargs) {
......@@ -2439,7 +2455,7 @@ PyMethodDef variable_methods[] = {
{"_register_backward_hook",
(PyCFunction)(void (*)())tensor_register_reduce_hook,
METH_VARARGS | METH_KEYWORDS,
NULL},
tensor_method__register_reduce_hook__doc__},
{"_set_grad_type",
(PyCFunction)(void (*)())tensor__set_grad_type,
METH_VARARGS | METH_KEYWORDS,
......@@ -2526,7 +2542,7 @@ PyMethodDef variable_methods[] = {
{"_bump_inplace_version",
(PyCFunction)(void (*)())tensor__bump_inplace_version,
METH_VARARGS | METH_KEYWORDS,
NULL},
tensor_method__bump_inplace_version__doc__},
{"is_selected_rows",
(PyCFunction)(void (*)())tensor_method_is_selected_rows,
METH_VARARGS | METH_KEYWORDS,
......
......@@ -74,6 +74,37 @@ PyObject* tensor_properties_get_type(TensorObject* self, void* closure) {
EAGER_CATCH_AND_THROW_RETURN_NULL
}
PyDoc_STRVAR(tensor_is_leaf__doc__,
R"DOC(is_leaf
Whether a Tensor is leaf Tensor.
For the Tensor whose stop_gradient is ``True`` , it will be leaf Tensor.
For the Tensor whose stop_gradient is ``False`` , it will be leaf Tensor too if it is created by user.
Returns:
bool: Whether a Tensor is leaf Tensor.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor(1.)
print(x.is_leaf) # True
x = paddle.to_tensor(1., stop_gradient=True)
y = x + 1
print(x.is_leaf) # True
print(y.is_leaf) # True
x = paddle.to_tensor(1., stop_gradient=False)
y = x + 1
print(x.is_leaf) # True
print(y.is_leaf) # False
)DOC");
PyObject* tensor_properties_is_leaf(TensorObject* self, void* closure) {
EAGER_TRY
return ToPyObject(egr::EagerUtils::IsLeafTensor(self->tensor));
......@@ -466,7 +497,11 @@ struct PyGetSetDef variable_properties[] = {
nullptr},
{"dtype", (getter)tensor_properties_get_dtype, nullptr, nullptr, nullptr},
{"type", (getter)tensor_properties_get_type, nullptr, nullptr, nullptr},
{"is_leaf", (getter)tensor_properties_is_leaf, nullptr, nullptr, nullptr},
{"is_leaf",
(getter)tensor_properties_is_leaf,
nullptr,
tensor_is_leaf__doc__,
nullptr},
{"grad_fn",
(getter)tensor_properties_get_grad_fn,
nullptr,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册