未验证 提交 9428c969 编写于 作者: T tianshuo78520a 提交者: GitHub

Revert "make inference_c test linking only paddle_inference_c (#44126)" (#44149)

This reverts commit bbe99555.
上级 e35f0628
......@@ -20,10 +20,6 @@ cc_library(
SRCS ${C_API_SRCS}
DEPS paddle_inference)
if(NOT ON_INFER)
return()
endif()
# Create inference capi shared library
cc_library(
paddle_inference_c_shared SHARED
......
......@@ -20,10 +20,6 @@ cc_library(
SRCS ${C_API_SRCS}
DEPS paddle_inference)
if(NOT ON_INFER)
return()
endif()
# Create inference capi shared library
cc_library(
paddle_inference_c_shared SHARED
......
......@@ -943,17 +943,28 @@ if(WITH_GPU AND TENSORRT_FOUND)
SRCS
analyzer_capi_exp_gpu_tester.cc
EXTRA_DEPS
paddle_inference_c
${INFERENCE_EXTRA_DEPS}
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_gpu paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_gpu paddle_inference_c)
endif()
inference_analysis_test(
test_analyzer_capi_exp_xpu
SRCS
analyzer_capi_exp_xpu_tester.cc
EXTRA_DEPS
paddle_inference_c
${INFERENCE_EXTRA_DEPS}
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_xpu paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_xpu paddle_inference_c)
endif()
set(TRT_MODEL_QUANT_RESNET_DIR
"${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model")
if(NOT EXISTS ${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model.tgz)
......@@ -1099,27 +1110,44 @@ inference_analysis_test(
SRCS
analyzer_capi_exp_tester.cc
EXTRA_DEPS
paddle_inference_c
${INFERENCE_EXTRA_DEPS}
ARGS
--infer_model=${RESNET50_MODEL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp paddle_inference_c)
endif()
inference_analysis_test(
test_analyzer_capi_exp_pd_config
SRCS
analyzer_capi_exp_pd_config_tester.cc
EXTRA_DEPS
paddle_inference_c
${INFERENCE_EXTRA_DEPS}
ARGS
--infer_model=${MOBILENET_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_pd_config
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_pd_config paddle_inference_c)
endif()
inference_analysis_test(
test_analyzer_capi_exp_pd_tensor
SRCS
analyzer_capi_exp_pd_tensor_tester.cc
EXTRA_DEPS
paddle_inference_c
${INFERENCE_EXTRA_DEPS}
ARGS
--infer_model=${MOBILENET_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_pd_tensor
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_pd_tensor paddle_inference_c)
endif()
if(NOT APPLE AND NOT WIN32)
inference_analysis_test(
......@@ -1127,9 +1155,15 @@ if(NOT APPLE AND NOT WIN32)
SRCS
analyzer_capi_exp_pd_threads_tester.cc
EXTRA_DEPS
paddle_inference_c
${INFERENCE_EXTRA_DEPS}
ARGS
--infer_model=${MOBILENET_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_pd_threads
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_pd_threads paddle_inference_c)
endif()
endif()
inference_analysis_test(
......@@ -1171,9 +1205,14 @@ if(WITH_MKLDNN)
SRCS
analyzer_capi_exp_int_tester.cc
EXTRA_DEPS
paddle_inference_c
${INFERENCE_EXTRA_DEPS}
ARGS
--infer_model=${INT8_DATA_DIR}/resnet50/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_int paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_int paddle_inference_c)
endif()
endif()
inference_analysis_test(
......@@ -1181,9 +1220,14 @@ inference_analysis_test(
SRCS
analyzer_capi_exp_ner_tester.cc
EXTRA_DEPS
paddle_inference_c
${INFERENCE_EXTRA_DEPS}
ARGS
--infer_model=${CHINESE_NER_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c)
endif()
if(WITH_GPU)
inference_analysis_test(
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册