From 33a58e58bec90ad14551af8a284dad1f27e4b3bf Mon Sep 17 00:00:00 2001 From: liu zhengxi <380185688@qq.com> Date: Fri, 18 Oct 2019 13:07:32 +0800 Subject: [PATCH] [cherry-pick] c api update in PD_PredictorRun (#20705) * improve the performance of capi in PD_PredictorRun (#20665) * alter the capi of PD_PredictorRun to provide proper function, test=release/1.6 --- paddle/fluid/inference/capi/c_api.h | 4 ++-- paddle/fluid/inference/capi/pd_predictor.cc | 15 +++++++++++---- .../api/analyzer_capi_pd_tensor_tester.cc | 19 +++++++++---------- 3 files changed, 22 insertions(+), 16 deletions(-) diff --git a/paddle/fluid/inference/capi/c_api.h b/paddle/fluid/inference/capi/c_api.h index 13336cbd19c..b5ef410aada 100644 --- a/paddle/fluid/inference/capi/c_api.h +++ b/paddle/fluid/inference/capi/c_api.h @@ -99,8 +99,8 @@ PADDLE_CAPI_EXPORT extern int* PD_GetPaddleTensorShape(const PD_Tensor* tensor, // AnalysisPredictor PADDLE_CAPI_EXPORT extern bool PD_PredictorRun(const PD_AnalysisConfig* config, PD_Tensor* inputs, int in_size, - PD_Tensor* output_data, - int** out_size, int batch_size); + PD_Tensor** output_data, + int* out_size, int batch_size); PADDLE_CAPI_EXPORT extern bool PD_PredictorZeroCopyRun( const PD_AnalysisConfig* config, PD_ZeroCopyData* inputs, int in_size, diff --git a/paddle/fluid/inference/capi/pd_predictor.cc b/paddle/fluid/inference/capi/pd_predictor.cc index 89d4c415373..51f8237c95a 100644 --- a/paddle/fluid/inference/capi/pd_predictor.cc +++ b/paddle/fluid/inference/capi/pd_predictor.cc @@ -26,10 +26,16 @@ using paddle::ConvertToACPrecision; extern "C" { bool PD_PredictorRun(const PD_AnalysisConfig* config, PD_Tensor* inputs, - int in_size, PD_Tensor* output_data, int** out_size, + int in_size, PD_Tensor** output_data, int* out_size, int batch_size) { PADDLE_ENFORCE_NOT_NULL(config); - auto predictor = paddle::CreatePaddlePredictor(config->config); + static std::map> + predictors; + if (!predictors.count(config->config.model_dir())) { + predictors[config->config.model_dir()] = + paddle::CreatePaddlePredictor(config->config); + } + auto& predictor = predictors[config->config.model_dir()]; std::vector in; for (int i = 0; i < in_size; ++i) { in.emplace_back(inputs->tensor); @@ -37,10 +43,11 @@ bool PD_PredictorRun(const PD_AnalysisConfig* config, PD_Tensor* inputs, std::vector out; if (predictor->Run(in, &out, batch_size)) { int osize = out.size(); + *output_data = new PD_Tensor[osize]; for (int i = 0; i < osize; ++i) { - output_data[i].tensor = out[i]; + output_data[i]->tensor = out[i]; } - *out_size = &osize; + *out_size = osize; return true; } return false; diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc b/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc index a94e0b8ebd4..fcb73c8ca02 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc @@ -21,6 +21,7 @@ limitations under the License. */ #include #include #include "paddle/fluid/inference/capi/c_api.h" +#include "paddle/fluid/inference/capi/c_api_internal.h" #include "paddle/fluid/inference/tests/api/tester_helper.h" namespace paddle { @@ -56,16 +57,15 @@ void PD_run() { PD_SetPaddleTensorData(input, buf); PD_Tensor* out_data = PD_NewPaddleTensor(); - int* out_size; - PD_PredictorRun(config, input, 1, out_data, &out_size, 1); - LOG(INFO) << *out_size; + int out_size; + PD_PredictorRun(config, input, 1, &out_data, &out_size, 1); + LOG(INFO) << out_size; LOG(INFO) << PD_GetPaddleTensorName(out_data); LOG(INFO) << PD_GetPaddleTensorDType(out_data); PD_PaddleBuf* b = PD_GetPaddleTensorData(out_data); - LOG(INFO) << PD_PaddleBufLength(b); + LOG(INFO) << PD_PaddleBufLength(b) / sizeof(float); float* result = static_cast(PD_PaddleBufData(b)); LOG(INFO) << *result; - PD_PaddleBufResize(b, 500); PD_DeletePaddleTensor(input); int* size; PD_GetPaddleTensorShape(out_data, &size); @@ -132,16 +132,15 @@ void buffer_run() { PD_SetPaddleTensorData(input, buf); PD_Tensor* out_data = PD_NewPaddleTensor(); - int* out_size; - PD_PredictorRun(config, input, 1, out_data, &out_size, 1); - LOG(INFO) << *out_size; + int out_size; + PD_PredictorRun(config, input, 1, &out_data, &out_size, 1); + LOG(INFO) << out_size; LOG(INFO) << PD_GetPaddleTensorName(out_data); LOG(INFO) << PD_GetPaddleTensorDType(out_data); PD_PaddleBuf* b = PD_GetPaddleTensorData(out_data); - LOG(INFO) << PD_PaddleBufLength(b); + LOG(INFO) << PD_PaddleBufLength(b) / sizeof(float); float* result = static_cast(PD_PaddleBufData(b)); LOG(INFO) << *result; - PD_PaddleBufResize(b, 500); PD_DeletePaddleTensor(input); PD_DeletePaddleBuf(buf); } -- GitLab