diff --git a/paddle/fluid/pybind/eager_method.cc b/paddle/fluid/pybind/eager_method.cc index 5233bbc832935fd5887896d89073e105a42507dd..04820bdb30e7d994a9d99be0f59af50848349125 100644 --- a/paddle/fluid/pybind/eager_method.cc +++ b/paddle/fluid/pybind/eager_method.cc @@ -445,6 +445,24 @@ static PyObject* tensor_method_copy_(TensorObject* self, EAGER_CATCH_AND_THROW_RETURN_NULL } +static PyObject* tensor_method_clone(TensorObject* self, + PyObject* args, + PyObject* kwargs) { + EAGER_TRY + + PADDLE_ENFORCE_EQ( + self->tensor.initialized(), + true, + paddle::platform::errors::InvalidArgument( + "We can only support initialized tensor in clone, however we got " + "uninitialized tensor %s, please check your code.", + self->tensor.name())); + + auto out = assign_ad_func(self->tensor); + return ToPyObject(out); + EAGER_CATCH_AND_THROW_RETURN_NULL +} + static PyObject* tensor_retain_grads(TensorObject* self, PyObject* args, PyObject* kwargs) { @@ -1854,6 +1872,10 @@ PyMethodDef variable_methods[] = { (PyCFunction)(void (*)(void))tensor_method_copy_, METH_VARARGS | METH_KEYWORDS, NULL}, + {"clone", + (PyCFunction)(void (*)(void))tensor_method_clone, + METH_VARARGS | METH_KEYWORDS, + NULL}, {"reconstruct_from_", (PyCFunction)(void (*)(void))tensor_method_reconstruct_from_, METH_VARARGS | METH_KEYWORDS, diff --git a/python/paddle/fluid/dygraph/varbase_patch_methods.py b/python/paddle/fluid/dygraph/varbase_patch_methods.py index cb6907d842ca6a9b990064fb8db943fedcda9193..61c856c071207287fafbb0e681dba3b0424b8acb 100644 --- a/python/paddle/fluid/dygraph/varbase_patch_methods.py +++ b/python/paddle/fluid/dygraph/varbase_patch_methods.py @@ -815,17 +815,6 @@ def monkey_patch_varbase(): raise TypeError( "_set_grad_ivar is only supported for Parameter Tensor") - @framework.dygraph_only - def clone(self): - if in_dygraph_mode(): - return _C_ops.assign(self) - - if _in_legacy_dygraph(): - output = core.VarBase() - else: - output = core.eager.Tensor() - return _legacy_C_ops.assign(self, output) - @framework.dygraph_only def value(self): return self @@ -1009,7 +998,6 @@ def monkey_patch_varbase(): if framework._in_eager_mode_: setattr(core.eager.Tensor, "_grad_ivar", _grad_ivar) setattr(core.eager.Tensor, "_set_grad_ivar", _set_grad_ivar) - setattr(core.eager.Tensor, "clone", clone) setattr(core.eager.Tensor, "value", value) setattr(core.eager.Tensor, "cpu", cpu) setattr(core.eager.Tensor, "cuda", cuda)