diff --git a/paddle/fluid/eager/auto_code_generator/final_state_generator/eager_gen.py b/paddle/fluid/eager/auto_code_generator/final_state_generator/eager_gen.py index 88688672b18b58ce1ecd9301a3dc0d5571275f9c..3a7e5fbcc0f865950be62b95d76e840e5488551c 100644 --- a/paddle/fluid/eager/auto_code_generator/final_state_generator/eager_gen.py +++ b/paddle/fluid/eager/auto_code_generator/final_state_generator/eager_gen.py @@ -359,6 +359,12 @@ CREATE_PLAIN_OPTIONAL_TENSOR_TEMPLATE = \ if({}.initialized()) {}_optional = paddle::make_optional({}); """ +CREATE_RECOVER_OPTIONAL_TENSOR_TEMPLATE = \ +""" + paddle::optional {}_optional = paddle::none; + if( {}.impl() ) {}_optional = paddle::make_optional({}); +""" + ####################### ## Generator Helpers ## @@ -1248,11 +1254,18 @@ class DygraphNodeGenerator(DygraphFunctionGeneratorBase): name) is_optional = (name in self.optional_inputs) + tensor_wrapper_recover_str = f"{indent}auto {transformed_tensor_name} = egr::EagerUtils::RecoverTensorWrapper(&this->{tensor_wrapper_name}, this->shared_from_this());" if is_optional: - tensor_wrapper_recover_str = f"{indent}auto {transformed_tensor_name} = egr::EagerUtils::RecoverOptionalTensorWrapper(&this->{tensor_wrapper_name}, this->shared_from_this());" + tensor_wrapper_recover_str += "\n" + CREATE_RECOVER_OPTIONAL_TENSOR_TEMPLATE.format( + transformed_tensor_name, transformed_tensor_name, + transformed_tensor_name, transformed_tensor_name) + + grad_api_args[ + grad_api_position] = transformed_tensor_name + "_optional" + else: - tensor_wrapper_recover_str = f"{indent}auto {transformed_tensor_name} = egr::EagerUtils::RecoverTensorWrapper(&this->{tensor_wrapper_name}, this->shared_from_this());" - grad_api_args[grad_api_position] = transformed_tensor_name + grad_api_args[grad_api_position] = transformed_tensor_name + get_grad_in_args_list.append(tensor_wrapper_recover_str) # Grad Ins from grads diff --git a/paddle/fluid/eager/utils.cc b/paddle/fluid/eager/utils.cc index dfbc96a9db836a8dfcea4fd4646d09c0cb0e2649..bcf4a4627bb769a4786adf6e9eb24b0be6655cd3 100644 --- a/paddle/fluid/eager/utils.cc +++ b/paddle/fluid/eager/utils.cc @@ -364,22 +364,6 @@ paddle::experimental::Tensor EagerUtils::RecoverTensorWrapper( return tw->recover(grad_node); } -paddle::optional -EagerUtils::RecoverOptionalTensorWrapper( - TensorWrapper* tw, const std::shared_ptr& grad_node) { - PADDLE_ENFORCE_NOT_NULL( - tw, phi::errors::InvalidArgument("TensorWrapper in " - "RecoverOptionalTensorWrapper function " - "should not be null")); - auto tmp = tw->recover(grad_node); - - paddle::optional res{paddle::none}; - if (tmp.initialized()) { - res = tmp; - } - return res; -} - std::vector EagerUtils::RecoverTensorWrapper( std::vector* tw, const std::shared_ptr& grad_node) { diff --git a/paddle/fluid/eager/utils.h b/paddle/fluid/eager/utils.h index beb46d876c4a123f9e3586380e4394a851585b73..be534d4440561ac4dca5c3fbef7ff379ebe44fff 100644 --- a/paddle/fluid/eager/utils.h +++ b/paddle/fluid/eager/utils.h @@ -179,9 +179,6 @@ class EagerUtils { static std::vector RecoverTensorWrapper( std::vector* tw, const std::shared_ptr& grad_node); - static paddle::optional - RecoverOptionalTensorWrapper(TensorWrapper* tw, - const std::shared_ptr& grad_node); // Intermidate needed remove this once we don't need legacy // Inner Method diff --git a/paddle/fluid/pybind/eager_utils.cc b/paddle/fluid/pybind/eager_utils.cc index e245362c50be590aa205db1605279261fe78ea2c..bdc96e85e44ae7be5740f08516be682bad5d93b2 100644 --- a/paddle/fluid/pybind/eager_utils.cc +++ b/paddle/fluid/pybind/eager_utils.cc @@ -971,7 +971,7 @@ paddle::experimental::IntArray CastPyArg2IntArray(PyObject* obj, std::vector value = CastPyArg2Ints(obj, op_type, arg_pos); return paddle::experimental::IntArray(value); - } else if (type_name == "paddle.Tensor") { + } else if (type_name == "paddle.Tensor" || type_name == "Tensor") { paddle::experimental::Tensor& value = GetTensorFromPyObject( op_type, "" /*arg_name*/, obj, arg_pos, false /*dispensable*/); return paddle::experimental::IntArray(value); diff --git a/paddle/phi/api/include/tensor.h b/paddle/phi/api/include/tensor.h index 0a2e815be8411a8c29486c7814c70264c2f5066c..3c5c1531c4a2dd6d977699bb65a5c9204c4bdf8f 100644 --- a/paddle/phi/api/include/tensor.h +++ b/paddle/phi/api/include/tensor.h @@ -567,7 +567,7 @@ class PADDLE_API Tensor final { * heterogeneous Tensor implementation, so that the API level can be unified * to one `Tensor`. */ - std::shared_ptr impl_; + std::shared_ptr impl_{nullptr}; /** * [ Why need abstract AbstractAutogradMeta here? ] diff --git a/paddle/phi/api/lib/api_gen_utils.cc b/paddle/phi/api/lib/api_gen_utils.cc index 7cbb4344e81d7c38f0aeb28cb161f9325648628c..732ecacde94d7d28fd386ef5985eeb1a0a4e30df 100644 --- a/paddle/phi/api/lib/api_gen_utils.cc +++ b/paddle/phi/api/lib/api_gen_utils.cc @@ -66,14 +66,6 @@ phi::MetaTensor MakeMetaTensor(const phi::DenseTensor& tensor) { return phi::MetaTensor(tensor); } -paddle::optional MakeMetaTensor( - const paddle::optional& tensor) { - if (tensor) { - return {phi::MetaTensor(*tensor)}; - } - return {paddle::none}; -} - std::vector MakeMetaTensor( const std::vector& tensors) { std::vector meta_tensors; @@ -88,14 +80,6 @@ phi::MetaTensor MakeMetaTensor(const phi::SelectedRows& tensor) { return phi::MetaTensor(tensor); } -paddle::optional MakeMetaTensor( - const paddle::optional& tensor) { - if (tensor) { - return {phi::MetaTensor(*tensor)}; - } - return {paddle::none}; -} - phi::MetaTensor MakeMetaTensor(const phi::StringTensor& tensor) { return phi::MetaTensor(tensor); } diff --git a/paddle/phi/api/lib/api_gen_utils.h b/paddle/phi/api/lib/api_gen_utils.h index 2a4c8417b5e6ded3b277d1b5751f1a8a4fc5f09a..d7ecef61c5be31f5a30dad9cc1c8e82ba3405af9 100644 --- a/paddle/phi/api/lib/api_gen_utils.h +++ b/paddle/phi/api/lib/api_gen_utils.h @@ -50,17 +50,11 @@ std::shared_ptr TensorToStringTensor(const Tensor& tensor); phi::MetaTensor MakeMetaTensor(const phi::DenseTensor& tensor); -paddle::optional MakeMetaTensor( - const paddle::optional& tensor); - std::vector MakeMetaTensor( const std::vector& tensors); phi::MetaTensor MakeMetaTensor(const phi::SelectedRows& tensor); -paddle::optional MakeMetaTensor( - const paddle::optional& tensor); - phi::MetaTensor MakeMetaTensor(const phi::StringTensor& tensor); /* ------------------ for output ----------------------- */ diff --git a/python/paddle/utils/code_gen/api_base.py b/python/paddle/utils/code_gen/api_base.py index 14f22fced9230244088c7c46b65031d95769ba3c..c1a987d06ba397a4b51dc8fac8d27427abea5984 100644 --- a/python/paddle/utils/code_gen/api_base.py +++ b/python/paddle/utils/code_gen/api_base.py @@ -480,11 +480,15 @@ PADDLE_API {self.gene_return_type_code()} {self.get_api_func_name() + '_'}({self param_code = param_code + param + "_metas, " elif param in self.optional_vars: meta_tensor_code = meta_tensor_code + f""" -{code_indent} paddle::optional {PREFIX_TENSOR_NAME}meta_ref_{param}(paddle::none); -{code_indent} auto {PREFIX_TENSOR_NAME}meta_{param} = MakeMetaTensor({PREFIX_TENSOR_NAME}{param}); -{code_indent} if ({PREFIX_TENSOR_NAME}meta_{param}) {{ -{code_indent} {PREFIX_TENSOR_NAME}meta_ref_{param} = paddle::make_optional(*{PREFIX_TENSOR_NAME}meta_{param}); -{code_indent} }}""" +{code_indent} paddle::optional {PREFIX_TENSOR_NAME}meta_ref_{param} = paddle::none; +{code_indent} phi::DenseTensor dt; +{code_indent} phi::MetaTensor {PREFIX_TENSOR_NAME}meta_tmp_{param}(dt); +{code_indent} if ({PREFIX_TENSOR_NAME}{param}_ptr) {{ +{code_indent} {PREFIX_TENSOR_NAME}meta_tmp_{param}.set_dtype( {PREFIX_TENSOR_NAME}{param}_ptr->dtype() ); +{code_indent} {PREFIX_TENSOR_NAME}meta_tmp_{param}.set_dims( {PREFIX_TENSOR_NAME}{param}_ptr->dims() ); +{code_indent} {PREFIX_TENSOR_NAME}meta_tmp_{param}.set_layout( {PREFIX_TENSOR_NAME}{param}_ptr->layout() ); +{code_indent} {PREFIX_TENSOR_NAME}meta_ref_{param} = {PREFIX_TENSOR_NAME}meta_tmp_{param}; +{code_indent} }}\n""" param_code = param_code + f"{PREFIX_TENSOR_NAME}meta_ref_{param}, " else: