From 0bcfc4747410a52e138e63cd5b1edb4062f3fa4b Mon Sep 17 00:00:00 2001 From: hong <43953930+phlrain@users.noreply.github.com> Date: Mon, 4 Apr 2022 10:49:20 +0800 Subject: [PATCH] fix eager gen opti bug (#41302) * fix eager gen opti bug * polish code * fix some bug * fix some bugs; --- .../final_state_generator/eager_gen.py | 19 ++++++++++++++++--- paddle/fluid/eager/utils.cc | 16 ---------------- paddle/fluid/eager/utils.h | 3 --- paddle/fluid/pybind/eager_utils.cc | 2 +- paddle/phi/api/include/tensor.h | 2 +- paddle/phi/api/lib/api_gen_utils.cc | 16 ---------------- paddle/phi/api/lib/api_gen_utils.h | 6 ------ python/paddle/utils/code_gen/api_base.py | 14 +++++++++----- 8 files changed, 27 insertions(+), 51 deletions(-) diff --git a/paddle/fluid/eager/auto_code_generator/final_state_generator/eager_gen.py b/paddle/fluid/eager/auto_code_generator/final_state_generator/eager_gen.py index 88688672b18..3a7e5fbcc0f 100644 --- a/paddle/fluid/eager/auto_code_generator/final_state_generator/eager_gen.py +++ b/paddle/fluid/eager/auto_code_generator/final_state_generator/eager_gen.py @@ -359,6 +359,12 @@ CREATE_PLAIN_OPTIONAL_TENSOR_TEMPLATE = \ if({}.initialized()) {}_optional = paddle::make_optional({}); """ +CREATE_RECOVER_OPTIONAL_TENSOR_TEMPLATE = \ +""" + paddle::optional {}_optional = paddle::none; + if( {}.impl() ) {}_optional = paddle::make_optional({}); +""" + ####################### ## Generator Helpers ## @@ -1248,11 +1254,18 @@ class DygraphNodeGenerator(DygraphFunctionGeneratorBase): name) is_optional = (name in self.optional_inputs) + tensor_wrapper_recover_str = f"{indent}auto {transformed_tensor_name} = egr::EagerUtils::RecoverTensorWrapper(&this->{tensor_wrapper_name}, this->shared_from_this());" if is_optional: - tensor_wrapper_recover_str = f"{indent}auto {transformed_tensor_name} = egr::EagerUtils::RecoverOptionalTensorWrapper(&this->{tensor_wrapper_name}, this->shared_from_this());" + tensor_wrapper_recover_str += "\n" + CREATE_RECOVER_OPTIONAL_TENSOR_TEMPLATE.format( + transformed_tensor_name, transformed_tensor_name, + transformed_tensor_name, transformed_tensor_name) + + grad_api_args[ + grad_api_position] = transformed_tensor_name + "_optional" + else: - tensor_wrapper_recover_str = f"{indent}auto {transformed_tensor_name} = egr::EagerUtils::RecoverTensorWrapper(&this->{tensor_wrapper_name}, this->shared_from_this());" - grad_api_args[grad_api_position] = transformed_tensor_name + grad_api_args[grad_api_position] = transformed_tensor_name + get_grad_in_args_list.append(tensor_wrapper_recover_str) # Grad Ins from grads diff --git a/paddle/fluid/eager/utils.cc b/paddle/fluid/eager/utils.cc index dfbc96a9db8..bcf4a4627bb 100644 --- a/paddle/fluid/eager/utils.cc +++ b/paddle/fluid/eager/utils.cc @@ -364,22 +364,6 @@ paddle::experimental::Tensor EagerUtils::RecoverTensorWrapper( return tw->recover(grad_node); } -paddle::optional -EagerUtils::RecoverOptionalTensorWrapper( - TensorWrapper* tw, const std::shared_ptr& grad_node) { - PADDLE_ENFORCE_NOT_NULL( - tw, phi::errors::InvalidArgument("TensorWrapper in " - "RecoverOptionalTensorWrapper function " - "should not be null")); - auto tmp = tw->recover(grad_node); - - paddle::optional res{paddle::none}; - if (tmp.initialized()) { - res = tmp; - } - return res; -} - std::vector EagerUtils::RecoverTensorWrapper( std::vector* tw, const std::shared_ptr& grad_node) { diff --git a/paddle/fluid/eager/utils.h b/paddle/fluid/eager/utils.h index beb46d876c4..be534d44405 100644 --- a/paddle/fluid/eager/utils.h +++ b/paddle/fluid/eager/utils.h @@ -179,9 +179,6 @@ class EagerUtils { static std::vector RecoverTensorWrapper( std::vector* tw, const std::shared_ptr& grad_node); - static paddle::optional - RecoverOptionalTensorWrapper(TensorWrapper* tw, - const std::shared_ptr& grad_node); // Intermidate needed remove this once we don't need legacy // Inner Method diff --git a/paddle/fluid/pybind/eager_utils.cc b/paddle/fluid/pybind/eager_utils.cc index e245362c50b..bdc96e85e44 100644 --- a/paddle/fluid/pybind/eager_utils.cc +++ b/paddle/fluid/pybind/eager_utils.cc @@ -971,7 +971,7 @@ paddle::experimental::IntArray CastPyArg2IntArray(PyObject* obj, std::vector value = CastPyArg2Ints(obj, op_type, arg_pos); return paddle::experimental::IntArray(value); - } else if (type_name == "paddle.Tensor") { + } else if (type_name == "paddle.Tensor" || type_name == "Tensor") { paddle::experimental::Tensor& value = GetTensorFromPyObject( op_type, "" /*arg_name*/, obj, arg_pos, false /*dispensable*/); return paddle::experimental::IntArray(value); diff --git a/paddle/phi/api/include/tensor.h b/paddle/phi/api/include/tensor.h index 0a2e815be84..3c5c1531c4a 100644 --- a/paddle/phi/api/include/tensor.h +++ b/paddle/phi/api/include/tensor.h @@ -567,7 +567,7 @@ class PADDLE_API Tensor final { * heterogeneous Tensor implementation, so that the API level can be unified * to one `Tensor`. */ - std::shared_ptr impl_; + std::shared_ptr impl_{nullptr}; /** * [ Why need abstract AbstractAutogradMeta here? ] diff --git a/paddle/phi/api/lib/api_gen_utils.cc b/paddle/phi/api/lib/api_gen_utils.cc index 7cbb4344e81..732ecacde94 100644 --- a/paddle/phi/api/lib/api_gen_utils.cc +++ b/paddle/phi/api/lib/api_gen_utils.cc @@ -66,14 +66,6 @@ phi::MetaTensor MakeMetaTensor(const phi::DenseTensor& tensor) { return phi::MetaTensor(tensor); } -paddle::optional MakeMetaTensor( - const paddle::optional& tensor) { - if (tensor) { - return {phi::MetaTensor(*tensor)}; - } - return {paddle::none}; -} - std::vector MakeMetaTensor( const std::vector& tensors) { std::vector meta_tensors; @@ -88,14 +80,6 @@ phi::MetaTensor MakeMetaTensor(const phi::SelectedRows& tensor) { return phi::MetaTensor(tensor); } -paddle::optional MakeMetaTensor( - const paddle::optional& tensor) { - if (tensor) { - return {phi::MetaTensor(*tensor)}; - } - return {paddle::none}; -} - phi::MetaTensor MakeMetaTensor(const phi::StringTensor& tensor) { return phi::MetaTensor(tensor); } diff --git a/paddle/phi/api/lib/api_gen_utils.h b/paddle/phi/api/lib/api_gen_utils.h index 2a4c8417b5e..d7ecef61c5b 100644 --- a/paddle/phi/api/lib/api_gen_utils.h +++ b/paddle/phi/api/lib/api_gen_utils.h @@ -50,17 +50,11 @@ std::shared_ptr TensorToStringTensor(const Tensor& tensor); phi::MetaTensor MakeMetaTensor(const phi::DenseTensor& tensor); -paddle::optional MakeMetaTensor( - const paddle::optional& tensor); - std::vector MakeMetaTensor( const std::vector& tensors); phi::MetaTensor MakeMetaTensor(const phi::SelectedRows& tensor); -paddle::optional MakeMetaTensor( - const paddle::optional& tensor); - phi::MetaTensor MakeMetaTensor(const phi::StringTensor& tensor); /* ------------------ for output ----------------------- */ diff --git a/python/paddle/utils/code_gen/api_base.py b/python/paddle/utils/code_gen/api_base.py index 14f22fced92..c1a987d06ba 100644 --- a/python/paddle/utils/code_gen/api_base.py +++ b/python/paddle/utils/code_gen/api_base.py @@ -480,11 +480,15 @@ PADDLE_API {self.gene_return_type_code()} {self.get_api_func_name() + '_'}({self param_code = param_code + param + "_metas, " elif param in self.optional_vars: meta_tensor_code = meta_tensor_code + f""" -{code_indent} paddle::optional {PREFIX_TENSOR_NAME}meta_ref_{param}(paddle::none); -{code_indent} auto {PREFIX_TENSOR_NAME}meta_{param} = MakeMetaTensor({PREFIX_TENSOR_NAME}{param}); -{code_indent} if ({PREFIX_TENSOR_NAME}meta_{param}) {{ -{code_indent} {PREFIX_TENSOR_NAME}meta_ref_{param} = paddle::make_optional(*{PREFIX_TENSOR_NAME}meta_{param}); -{code_indent} }}""" +{code_indent} paddle::optional {PREFIX_TENSOR_NAME}meta_ref_{param} = paddle::none; +{code_indent} phi::DenseTensor dt; +{code_indent} phi::MetaTensor {PREFIX_TENSOR_NAME}meta_tmp_{param}(dt); +{code_indent} if ({PREFIX_TENSOR_NAME}{param}_ptr) {{ +{code_indent} {PREFIX_TENSOR_NAME}meta_tmp_{param}.set_dtype( {PREFIX_TENSOR_NAME}{param}_ptr->dtype() ); +{code_indent} {PREFIX_TENSOR_NAME}meta_tmp_{param}.set_dims( {PREFIX_TENSOR_NAME}{param}_ptr->dims() ); +{code_indent} {PREFIX_TENSOR_NAME}meta_tmp_{param}.set_layout( {PREFIX_TENSOR_NAME}{param}_ptr->layout() ); +{code_indent} {PREFIX_TENSOR_NAME}meta_ref_{param} = {PREFIX_TENSOR_NAME}meta_tmp_{param}; +{code_indent} }}\n""" param_code = param_code + f"{PREFIX_TENSOR_NAME}meta_ref_{param}, " else: -- GitLab