From 71ab0458e1ac9f79a6bb9513d6eeba2c5aaaf3e2 Mon Sep 17 00:00:00 2001 From: liu zhengxi <380185688@qq.com> Date: Wed, 26 Feb 2020 16:37:10 +0800 Subject: [PATCH] Fix pointer and c-api encapsulation (#22663) * refine pointer and c-api prototype, test=develop * fix new c api profile bug, test=develop * add unit tests, test=develop --- paddle/fluid/inference/capi/paddle_c_api.h | 14 ++++++++++++-- paddle/fluid/inference/capi/pd_predictor.cc | 3 ++- paddle/fluid/inference/capi/pd_tensor.cc | 7 +++---- .../tests/api/analyzer_capi_gpu_tester.cc | 2 ++ .../tests/api/analyzer_capi_pd_tensor_tester.cc | 10 ++++++++-- 5 files changed, 27 insertions(+), 9 deletions(-) diff --git a/paddle/fluid/inference/capi/paddle_c_api.h b/paddle/fluid/inference/capi/paddle_c_api.h index e9157c06dec..bfac4baef09 100644 --- a/paddle/fluid/inference/capi/paddle_c_api.h +++ b/paddle/fluid/inference/capi/paddle_c_api.h @@ -117,8 +117,8 @@ PADDLE_CAPI_EXPORT extern PD_DataType PD_GetPaddleTensorDType( PADDLE_CAPI_EXPORT extern PD_PaddleBuf* PD_GetPaddleTensorData( const PD_Tensor* tensor); -PADDLE_CAPI_EXPORT extern int* PD_GetPaddleTensorShape(const PD_Tensor* tensor, - int** size); +PADDLE_CAPI_EXPORT extern const int* PD_GetPaddleTensorShape( + const PD_Tensor* tensor, int* size); // AnalysisPredictor PADDLE_CAPI_EXPORT extern bool PD_PredictorRun(const PD_AnalysisConfig* config, @@ -262,22 +262,32 @@ PADDLE_CAPI_EXPORT extern bool PD_ProfileEnabled( PADDLE_CAPI_EXPORT extern void PD_SetInValid(PD_AnalysisConfig* config); PADDLE_CAPI_EXPORT extern bool PD_IsValid(const PD_AnalysisConfig* config); + PADDLE_CAPI_EXPORT extern void PD_DisableGlogInfo(PD_AnalysisConfig* config); + PADDLE_CAPI_EXPORT extern void PD_DeletePass(PD_AnalysisConfig* config, char* pass_name); PADDLE_CAPI_EXPORT extern PD_Predictor* PD_NewPredictor( const PD_AnalysisConfig* config); + PADDLE_CAPI_EXPORT extern void PD_DeletePredictor(PD_Predictor* predictor); + PADDLE_CAPI_EXPORT extern int PD_GetInputNum(const PD_Predictor*); + PADDLE_CAPI_EXPORT extern int PD_GetOutputNum(const PD_Predictor*); + PADDLE_CAPI_EXPORT extern const char* PD_GetInputName(const PD_Predictor*, int); + PADDLE_CAPI_EXPORT extern const char* PD_GetOutputName(const PD_Predictor*, int); + PADDLE_CAPI_EXPORT extern void PD_SetZeroCopyInput( PD_Predictor* predictor, const PD_ZeroCopyTensor* tensor); + PADDLE_CAPI_EXPORT extern void PD_GetZeroCopyOutput(PD_Predictor* predictor, PD_ZeroCopyTensor* tensor); + PADDLE_CAPI_EXPORT extern void PD_ZeroCopyRun(PD_Predictor* predictor); #ifdef __cplusplus diff --git a/paddle/fluid/inference/capi/pd_predictor.cc b/paddle/fluid/inference/capi/pd_predictor.cc index 389f3cf5c08..f24cd94de63 100644 --- a/paddle/fluid/inference/capi/pd_predictor.cc +++ b/paddle/fluid/inference/capi/pd_predictor.cc @@ -180,7 +180,8 @@ PD_Predictor* PD_NewPredictor(const PD_AnalysisConfig* config) { } void PD_DeletePredictor(PD_Predictor* predictor) { - if (predictor == nullptr) { + if (predictor) { + predictor->predictor = nullptr; delete predictor; predictor = nullptr; } diff --git a/paddle/fluid/inference/capi/pd_tensor.cc b/paddle/fluid/inference/capi/pd_tensor.cc index db428785047..b4811f1d6ff 100644 --- a/paddle/fluid/inference/capi/pd_tensor.cc +++ b/paddle/fluid/inference/capi/pd_tensor.cc @@ -73,11 +73,10 @@ PD_PaddleBuf* PD_GetPaddleTensorData(const PD_Tensor* tensor) { return ret; } -int* PD_GetPaddleTensorShape(const PD_Tensor* tensor, int** size) { +const int* PD_GetPaddleTensorShape(const PD_Tensor* tensor, int* size) { PADDLE_ENFORCE_NOT_NULL(tensor); - std::vector shape = tensor->tensor.shape; - int s = shape.size(); - *size = &s; + const std::vector& shape = tensor->tensor.shape; + *size = shape.size(); return shape.data(); } diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_gpu_tester.cc b/paddle/fluid/inference/tests/api/analyzer_capi_gpu_tester.cc index 4fa58df09d5..46f97ed4771 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_gpu_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_capi_gpu_tester.cc @@ -93,6 +93,8 @@ TEST(PD_AnalysisConfig, trt_fp16) { false); bool trt_enable = PD_TensorrtEngineEnabled(config); CHECK(trt_enable) << "NO"; + PD_Predictor *predictor = PD_NewPredictor(config); + PD_DeletePredictor(predictor); PD_DeleteAnalysisConfig(config); } diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc b/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc index 9edf04c4cfd..0bc67aff7af 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc @@ -67,8 +67,14 @@ void PD_run() { float* result = static_cast(PD_PaddleBufData(b)); LOG(INFO) << *result; PD_DeletePaddleTensor(input); - int* size; - PD_GetPaddleTensorShape(out_data, &size); + int size; + const int* out_shape = PD_GetPaddleTensorShape(out_data, &size); + CHECK(size == 2) << "The Output shape's size is NOT match."; + std::vector ref_outshape_size({9, 6}); + for (int i = 0; i < 2; ++i) { + CHECK(out_shape[i] == ref_outshape_size[i]) + << "The Output's shape is NOT match."; + } PD_DeletePaddleBuf(buf); } -- GitLab