From daa6cb921c24c6dddb86293b0925612c636e96f6 Mon Sep 17 00:00:00 2001 From: ronnywang Date: Wed, 13 Jul 2022 16:41:07 +0800 Subject: [PATCH] [CustomKernel] phi capi add inference support (#44268) --- paddle/fluid/inference/CMakeLists.txt | 4 ++++ paddle/fluid/inference/api/CMakeLists.txt | 3 +++ paddle/fluid/inference/api/api.cc | 4 ++++ paddle/fluid/inference/paddle_inference_custom_device.map | 1 + paddle/phi/capi/include/c_tensor.h | 2 +- paddle/phi/capi/include/wrapper_base.h | 2 +- paddle/phi/capi/lib/c_tensor.cc | 2 +- 7 files changed, 15 insertions(+), 3 deletions(-) diff --git a/paddle/fluid/inference/CMakeLists.txt b/paddle/fluid/inference/CMakeLists.txt index 6ff4655429..7f2daa942b 100644 --- a/paddle/fluid/inference/CMakeLists.txt +++ b/paddle/fluid/inference/CMakeLists.txt @@ -40,6 +40,10 @@ get_property(phi_modules GLOBAL PROPERTY PHI_MODULES) get_property(phi_kernels GLOBAL PROPERTY PHI_KERNELS) set(utils_modules stringpiece pretty_log string_helper benchmark) +if(WITH_CUSTOM_DEVICE) + set(fluid_modules ${fluid_modules} phi_capi) +endif() + add_subdirectory(api) # Create static inference library if needed diff --git a/paddle/fluid/inference/api/CMakeLists.txt b/paddle/fluid/inference/api/CMakeLists.txt index 9e601df808..3aff5d5536 100755 --- a/paddle/fluid/inference/api/CMakeLists.txt +++ b/paddle/fluid/inference/api/CMakeLists.txt @@ -55,6 +55,9 @@ set(paddle_inference_api_deps if(WITH_CRYPTO) list(APPEND paddle_inference_api_deps paddle_crypto) endif() +if(WITH_CUSTOM_DEVICE) + set(paddle_inference_api_deps ${paddle_inference_api_deps} phi_capi) +endif() cc_library( paddle_inference_api diff --git a/paddle/fluid/inference/api/api.cc b/paddle/fluid/inference/api/api.cc index d5897e3c4f..054b4668c4 100644 --- a/paddle/fluid/inference/api/api.cc +++ b/paddle/fluid/inference/api/api.cc @@ -156,3 +156,7 @@ std::shared_ptr MakeCipher(const std::string &config_file) { #endif } // namespace paddle + +#ifdef PADDLE_WITH_CUSTOM_DEVICE +#include "paddle/phi/capi/capi.h" +#endif diff --git a/paddle/fluid/inference/paddle_inference_custom_device.map b/paddle/fluid/inference/paddle_inference_custom_device.map index 52bc287048..d78860e0a2 100644 --- a/paddle/fluid/inference/paddle_inference_custom_device.map +++ b/paddle/fluid/inference/paddle_inference_custom_device.map @@ -5,6 +5,7 @@ *profile*; *phi*; *FLAGS_*; + PD_*; local: *; }; diff --git a/paddle/phi/capi/include/c_tensor.h b/paddle/phi/capi/include/c_tensor.h index 35ac7dda39..2bebee9777 100644 --- a/paddle/phi/capi/include/c_tensor.h +++ b/paddle/phi/capi/include/c_tensor.h @@ -24,7 +24,7 @@ extern "C" { typedef struct PD_Tensor PD_Tensor; -PD_DataType PD_TensorGetDataType(const PD_Tensor *tensor, PD_Status *status); +PD_DataType PD_TensorGetPDDataType(const PD_Tensor *tensor, PD_Status *status); PD_DataLayout PD_TensorGetDataLayout(const PD_Tensor *tensor, PD_Status *status); diff --git a/paddle/phi/capi/include/wrapper_base.h b/paddle/phi/capi/include/wrapper_base.h index 2b5421bc26..adfb2b5a0e 100644 --- a/paddle/phi/capi/include/wrapper_base.h +++ b/paddle/phi/capi/include/wrapper_base.h @@ -128,7 +128,7 @@ class DenseTensor : public WrapperBase { PD_DataType dtype() const { C_Status status; - auto data_type = PD_TensorGetDataType(raw_data(), &status); + auto data_type = PD_TensorGetPDDataType(raw_data(), &status); PD_CHECK_STATUS(status); return data_type; } diff --git a/paddle/phi/capi/lib/c_tensor.cc b/paddle/phi/capi/lib/c_tensor.cc index c81eefe22f..b460d2e368 100644 --- a/paddle/phi/capi/lib/c_tensor.cc +++ b/paddle/phi/capi/lib/c_tensor.cc @@ -19,7 +19,7 @@ #include "paddle/phi/core/dense_tensor.h" #include "paddle/phi/core/meta_tensor.h" -PD_DataType PD_TensorGetDataType(const PD_Tensor* tensor, PD_Status* status) { +PD_DataType PD_TensorGetPDDataType(const PD_Tensor* tensor, PD_Status* status) { if (status) { if (!tensor) { *status = C_FAILED; -- GitLab