diff --git a/paddle/fluid/inference/capi/pd_predictor.cc b/paddle/fluid/inference/capi/pd_predictor.cc index f24cd94de63a85fb10d8b1c2d69b9ec860fae3f0..8aa1e2a7b7f9b99a1636ca2e7396089ab2ae7e15 100644 --- a/paddle/fluid/inference/capi/pd_predictor.cc +++ b/paddle/fluid/inference/capi/pd_predictor.cc @@ -233,7 +233,8 @@ void PD_SetZeroCopyInput(PD_Predictor* predictor, if (tensor->lod.length) { auto* lod_ptr = reinterpret_cast(tensor->lod.data); - std::vector lod(lod_ptr, lod_ptr + tensor->lod.length); + std::vector lod; + lod.assign(lod_ptr, lod_ptr + tensor->lod.length / sizeof(size_t)); input->SetLoD({std::move(lod)}); } } @@ -266,17 +267,19 @@ void PD_GetZeroCopyOutput(PD_Predictor* predictor, PD_ZeroCopyTensor* tensor) { tensor->data.length = length; auto lod = output->lod(); - tensor->lod.length = lod.front().size() * sizeof(size_t); - if (tensor->lod.capacity < lod.front().size()) { - if (tensor->lod.data) { - std::free(tensor->lod.data); - } + if (!lod.empty()) { + tensor->lod.length = lod.front().size() * sizeof(size_t); + if (tensor->lod.capacity < lod.front().size()) { + if (tensor->lod.data) { + std::free(tensor->lod.data); + } - tensor->lod.data = std::malloc(lod.front().size() * sizeof(size_t)); - tensor->lod.capacity = lod.front().size() * sizeof(size_t); + tensor->lod.data = std::malloc(lod.front().size() * sizeof(size_t)); + tensor->lod.capacity = lod.front().size() * sizeof(size_t); + } + std::copy(lod.front().begin(), lod.front().end(), + reinterpret_cast(tensor->lod.data)); } - std::copy(lod.front().begin(), lod.front().end(), - reinterpret_cast(tensor->lod.data)); switch (tensor->dtype) { case PD_FLOAT32: output->copy_to_cpu(reinterpret_cast(tensor->data.data)); diff --git a/paddle/fluid/inference/tests/api/CMakeLists.txt b/paddle/fluid/inference/tests/api/CMakeLists.txt index 145cd250a025906ff2052811d6fbf4aeaeef8206..6e2b8a50bf19c6573762610ad870f02e66d07c00 100644 --- a/paddle/fluid/inference/tests/api/CMakeLists.txt +++ b/paddle/fluid/inference/tests/api/CMakeLists.txt @@ -399,3 +399,7 @@ if(WITH_MKLDNN) EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} paddle_fluid_c ARGS --infer_model=${INT8_DATA_DIR}/resnet50/model) endif() + +inference_analysis_test(test_analyzer_capi_ner SRCS analyzer_capi_ner_tester.cc + EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} paddle_fluid_c + ARGS --infer_model=${CHINESE_NER_INSTALL_DIR}/model) diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_gpu_tester.cc b/paddle/fluid/inference/tests/api/analyzer_capi_gpu_tester.cc index 46f97ed4771cf7dbdf9d6900db2ec55ab8b0a30b..85bd5bafd99abcde98c583cd3a3c15bf3aefa85e 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_gpu_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_capi_gpu_tester.cc @@ -15,8 +15,6 @@ limitations under the License. */ #include #include #include -#include -#include #include #include #include "paddle/fluid/inference/capi/paddle_c_api.h" diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_int_tester.cc b/paddle/fluid/inference/tests/api/analyzer_capi_int_tester.cc index f619b3759dd65c343f5e678b96db8bac0f299d1f..c0c8ff083de57fb26578cfda4533e74ad52dba15 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_int_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_capi_int_tester.cc @@ -15,8 +15,6 @@ limitations under the License. */ #include #include #include -#include -#include #include #include #include "paddle/fluid/inference/capi/paddle_c_api.h" diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_ner_tester.cc b/paddle/fluid/inference/tests/api/analyzer_capi_ner_tester.cc new file mode 100644 index 0000000000000000000000000000000000000000..bf0576f9f93b19221b147137b47bd0944ccf4479 --- /dev/null +++ b/paddle/fluid/inference/tests/api/analyzer_capi_ner_tester.cc @@ -0,0 +1,117 @@ +// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include +#include +#include +#include "paddle/fluid/inference/capi/paddle_c_api.h" +#include "paddle/fluid/inference/tests/api/tester_helper.h" + +namespace paddle { +namespace inference { +namespace analysis { + +void SetConfig(PD_AnalysisConfig *config) { + auto model_dir = FLAGS_infer_model; + PD_SetModel(config, (model_dir + "/__model__").c_str(), + (model_dir + "/param").c_str()); + PD_SwitchUseFeedFetchOps(config, false); + PD_SwitchSpecifyInputNames(config, true); + PD_DisableGpu(config); +} + +TEST(PD_ZeroCopyRun, zero_copy_run) { + PD_AnalysisConfig *config = PD_NewAnalysisConfig(); + SetConfig(config); + PD_Predictor *predictor = PD_NewPredictor(config); + + int input_num = PD_GetInputNum(predictor); + printf("Input num: %d\n", input_num); + int output_num = PD_GetOutputNum(predictor); + printf("Output num: %d\n", output_num); + + PD_ZeroCopyTensor inputs[2]; + + // inputs[0]: word + PD_InitZeroCopyTensor(&inputs[0]); + inputs[0].name = new char[5]; + snprintf(inputs[0].name, strlen(PD_GetInputName(predictor, 0)) + 1, "%s", + PD_GetInputName(predictor, 0)); + + inputs[0].data.capacity = sizeof(int64_t) * 11 * 1; + inputs[0].data.length = inputs[0].data.capacity; + inputs[0].data.data = malloc(inputs[0].data.capacity); + std::vector ref_word( + {12673, 9763, 905, 284, 45, 7474, 20, 17, 1, 4, 9}); + inputs[0].data.data = reinterpret_cast(ref_word.data()); + + int shape0[] = {11, 1}; + inputs[0].shape.data = reinterpret_cast(shape0); + inputs[0].shape.capacity = sizeof(shape0); + inputs[0].shape.length = sizeof(shape0); + inputs[0].dtype = PD_INT64; + + size_t lod0[] = {0, 11}; + inputs[0].lod.data = reinterpret_cast(lod0); + inputs[0].lod.capacity = sizeof(size_t) * 2; + inputs[0].lod.length = sizeof(size_t) * 2; + + PD_SetZeroCopyInput(predictor, &inputs[0]); + + // inputs[1]: mention + PD_InitZeroCopyTensor(&inputs[1]); + inputs[1].name = new char[8]; + snprintf(inputs[1].name, strlen(PD_GetInputName(predictor, 1)) + 1, "%s", + PD_GetInputName(predictor, 1)); + + inputs[1].data.capacity = sizeof(int64_t) * 11 * 1; + inputs[1].data.length = inputs[1].data.capacity; + inputs[1].data.data = malloc(inputs[1].data.capacity); + std::vector ref_mention({27, 0, 0, 33, 34, 33, 0, 0, 0, 1, 2}); + inputs[1].data.data = reinterpret_cast(ref_mention.data()); + + int shape1[] = {11, 1}; + inputs[1].shape.data = reinterpret_cast(shape1); + inputs[1].shape.capacity = sizeof(shape1); + inputs[1].shape.length = sizeof(shape1); + inputs[1].dtype = PD_INT64; + + size_t lod1[] = {0, 11}; + inputs[1].lod.data = reinterpret_cast(lod1); + inputs[1].lod.capacity = sizeof(size_t) * 2; + inputs[1].lod.length = sizeof(size_t) * 2; + + PD_SetZeroCopyInput(predictor, &inputs[1]); + + PD_ZeroCopyRun(predictor); + PD_ZeroCopyTensor output; + PD_InitZeroCopyTensor(&output); + output.name = new char[21]; + snprintf(output.name, strlen(PD_GetOutputName(predictor, 0)) + 1, "%s", + PD_GetOutputName(predictor, 0)); + + // not necessary, just for converage tests + output.lod.data = std::malloc(sizeof(size_t)); + + PD_GetZeroCopyOutput(predictor, &output); + PD_DestroyZeroCopyTensor(&output); + PD_DeleteAnalysisConfig(config); + PD_DeletePredictor(predictor); +} + +} // namespace analysis +} // namespace inference +} // namespace paddle diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_tester.cc b/paddle/fluid/inference/tests/api/analyzer_capi_tester.cc index 8ec70ff6ed5d862c2a885e4e650842b9d7f8c6a7..93fcb43447d01dcafa10d8c85234d243d5095d4e 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_capi_tester.cc @@ -15,8 +15,6 @@ limitations under the License. */ #include #include #include -#include -#include #include #include #include "paddle/fluid/inference/capi/paddle_c_api.h" @@ -71,7 +69,7 @@ void zero_copy_run() { delete[] outputs; } -TEST(PD_ZeroCopyRun, zero_copy_run) { zero_copy_run(); } +TEST(PD_PredictorZeroCopyRun, zero_copy_run) { zero_copy_run(); } #ifdef PADDLE_WITH_MKLDNN TEST(PD_AnalysisConfig, profile_mkldnn) {