From d39777fefa4eafdde3253ed4ab6445e322b1b121 Mon Sep 17 00:00:00 2001 From: liu zhengxi <380185688@qq.com> Date: Fri, 18 Oct 2019 10:13:07 +0800 Subject: [PATCH] alter the capi of PD_PredictorRun to provide proper function, test=develop (#20697) modify the way to pass parameter out_size in function. --- paddle/fluid/inference/capi/c_api.h | 4 ++-- paddle/fluid/inference/capi/pd_predictor.cc | 7 ++++--- .../api/analyzer_capi_pd_tensor_tester.cc | 19 +++++++++---------- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/paddle/fluid/inference/capi/c_api.h b/paddle/fluid/inference/capi/c_api.h index 13336cbd19c..b5ef410aada 100644 --- a/paddle/fluid/inference/capi/c_api.h +++ b/paddle/fluid/inference/capi/c_api.h @@ -99,8 +99,8 @@ PADDLE_CAPI_EXPORT extern int* PD_GetPaddleTensorShape(const PD_Tensor* tensor, // AnalysisPredictor PADDLE_CAPI_EXPORT extern bool PD_PredictorRun(const PD_AnalysisConfig* config, PD_Tensor* inputs, int in_size, - PD_Tensor* output_data, - int** out_size, int batch_size); + PD_Tensor** output_data, + int* out_size, int batch_size); PADDLE_CAPI_EXPORT extern bool PD_PredictorZeroCopyRun( const PD_AnalysisConfig* config, PD_ZeroCopyData* inputs, int in_size, diff --git a/paddle/fluid/inference/capi/pd_predictor.cc b/paddle/fluid/inference/capi/pd_predictor.cc index bb750524801..51f8237c95a 100644 --- a/paddle/fluid/inference/capi/pd_predictor.cc +++ b/paddle/fluid/inference/capi/pd_predictor.cc @@ -26,7 +26,7 @@ using paddle::ConvertToACPrecision; extern "C" { bool PD_PredictorRun(const PD_AnalysisConfig* config, PD_Tensor* inputs, - int in_size, PD_Tensor* output_data, int** out_size, + int in_size, PD_Tensor** output_data, int* out_size, int batch_size) { PADDLE_ENFORCE_NOT_NULL(config); static std::map> @@ -43,10 +43,11 @@ bool PD_PredictorRun(const PD_AnalysisConfig* config, PD_Tensor* inputs, std::vector out; if (predictor->Run(in, &out, batch_size)) { int osize = out.size(); + *output_data = new PD_Tensor[osize]; for (int i = 0; i < osize; ++i) { - output_data[i].tensor = out[i]; + output_data[i]->tensor = out[i]; } - *out_size = &osize; + *out_size = osize; return true; } return false; diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc b/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc index a94e0b8ebd4..fcb73c8ca02 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc @@ -21,6 +21,7 @@ limitations under the License. */ #include #include #include "paddle/fluid/inference/capi/c_api.h" +#include "paddle/fluid/inference/capi/c_api_internal.h" #include "paddle/fluid/inference/tests/api/tester_helper.h" namespace paddle { @@ -56,16 +57,15 @@ void PD_run() { PD_SetPaddleTensorData(input, buf); PD_Tensor* out_data = PD_NewPaddleTensor(); - int* out_size; - PD_PredictorRun(config, input, 1, out_data, &out_size, 1); - LOG(INFO) << *out_size; + int out_size; + PD_PredictorRun(config, input, 1, &out_data, &out_size, 1); + LOG(INFO) << out_size; LOG(INFO) << PD_GetPaddleTensorName(out_data); LOG(INFO) << PD_GetPaddleTensorDType(out_data); PD_PaddleBuf* b = PD_GetPaddleTensorData(out_data); - LOG(INFO) << PD_PaddleBufLength(b); + LOG(INFO) << PD_PaddleBufLength(b) / sizeof(float); float* result = static_cast(PD_PaddleBufData(b)); LOG(INFO) << *result; - PD_PaddleBufResize(b, 500); PD_DeletePaddleTensor(input); int* size; PD_GetPaddleTensorShape(out_data, &size); @@ -132,16 +132,15 @@ void buffer_run() { PD_SetPaddleTensorData(input, buf); PD_Tensor* out_data = PD_NewPaddleTensor(); - int* out_size; - PD_PredictorRun(config, input, 1, out_data, &out_size, 1); - LOG(INFO) << *out_size; + int out_size; + PD_PredictorRun(config, input, 1, &out_data, &out_size, 1); + LOG(INFO) << out_size; LOG(INFO) << PD_GetPaddleTensorName(out_data); LOG(INFO) << PD_GetPaddleTensorDType(out_data); PD_PaddleBuf* b = PD_GetPaddleTensorData(out_data); - LOG(INFO) << PD_PaddleBufLength(b); + LOG(INFO) << PD_PaddleBufLength(b) / sizeof(float); float* result = static_cast(PD_PaddleBufData(b)); LOG(INFO) << *result; - PD_PaddleBufResize(b, 500); PD_DeletePaddleTensor(input); PD_DeletePaddleBuf(buf); } -- GitLab