未验证 提交 bbe99555 编写于 作者: S Sing_chan 提交者: GitHub

make inference_c test linking only paddle_inference_c (#44126)

上级 aa18ae11
...@@ -20,6 +20,10 @@ cc_library( ...@@ -20,6 +20,10 @@ cc_library(
SRCS ${C_API_SRCS} SRCS ${C_API_SRCS}
DEPS paddle_inference) DEPS paddle_inference)
if(NOT ON_INFER)
return()
endif()
# Create inference capi shared library # Create inference capi shared library
cc_library( cc_library(
paddle_inference_c_shared SHARED paddle_inference_c_shared SHARED
......
...@@ -20,6 +20,10 @@ cc_library( ...@@ -20,6 +20,10 @@ cc_library(
SRCS ${C_API_SRCS} SRCS ${C_API_SRCS}
DEPS paddle_inference) DEPS paddle_inference)
if(NOT ON_INFER)
return()
endif()
# Create inference capi shared library # Create inference capi shared library
cc_library( cc_library(
paddle_inference_c_shared SHARED paddle_inference_c_shared SHARED
......
...@@ -943,28 +943,17 @@ if(WITH_GPU AND TENSORRT_FOUND) ...@@ -943,28 +943,17 @@ if(WITH_GPU AND TENSORRT_FOUND)
SRCS SRCS
analyzer_capi_exp_gpu_tester.cc analyzer_capi_exp_gpu_tester.cc
EXTRA_DEPS EXTRA_DEPS
${INFERENCE_EXTRA_DEPS} paddle_inference_c
ARGS ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_gpu paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_gpu paddle_inference_c)
endif()
inference_analysis_test( inference_analysis_test(
test_analyzer_capi_exp_xpu test_analyzer_capi_exp_xpu
SRCS SRCS
analyzer_capi_exp_xpu_tester.cc analyzer_capi_exp_xpu_tester.cc
EXTRA_DEPS EXTRA_DEPS
${INFERENCE_EXTRA_DEPS} paddle_inference_c
ARGS ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_xpu paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_xpu paddle_inference_c)
endif()
set(TRT_MODEL_QUANT_RESNET_DIR set(TRT_MODEL_QUANT_RESNET_DIR
"${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model") "${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model")
if(NOT EXISTS ${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model.tgz) if(NOT EXISTS ${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model.tgz)
...@@ -1110,44 +1099,27 @@ inference_analysis_test( ...@@ -1110,44 +1099,27 @@ inference_analysis_test(
SRCS SRCS
analyzer_capi_exp_tester.cc analyzer_capi_exp_tester.cc
EXTRA_DEPS EXTRA_DEPS
${INFERENCE_EXTRA_DEPS} paddle_inference_c
ARGS ARGS
--infer_model=${RESNET50_MODEL_DIR}/model) --infer_model=${RESNET50_MODEL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp paddle_inference_c)
endif()
inference_analysis_test( inference_analysis_test(
test_analyzer_capi_exp_pd_config test_analyzer_capi_exp_pd_config
SRCS SRCS
analyzer_capi_exp_pd_config_tester.cc analyzer_capi_exp_pd_config_tester.cc
EXTRA_DEPS EXTRA_DEPS
${INFERENCE_EXTRA_DEPS} paddle_inference_c
ARGS ARGS
--infer_model=${MOBILENET_INSTALL_DIR}/model) --infer_model=${MOBILENET_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_pd_config
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_pd_config paddle_inference_c)
endif()
inference_analysis_test( inference_analysis_test(
test_analyzer_capi_exp_pd_tensor test_analyzer_capi_exp_pd_tensor
SRCS SRCS
analyzer_capi_exp_pd_tensor_tester.cc analyzer_capi_exp_pd_tensor_tester.cc
EXTRA_DEPS EXTRA_DEPS
${INFERENCE_EXTRA_DEPS} paddle_inference_c
ARGS ARGS
--infer_model=${MOBILENET_INSTALL_DIR}/model) --infer_model=${MOBILENET_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_pd_tensor
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_pd_tensor paddle_inference_c)
endif()
if(NOT APPLE AND NOT WIN32) if(NOT APPLE AND NOT WIN32)
inference_analysis_test( inference_analysis_test(
...@@ -1155,15 +1127,9 @@ if(NOT APPLE AND NOT WIN32) ...@@ -1155,15 +1127,9 @@ if(NOT APPLE AND NOT WIN32)
SRCS SRCS
analyzer_capi_exp_pd_threads_tester.cc analyzer_capi_exp_pd_threads_tester.cc
EXTRA_DEPS EXTRA_DEPS
${INFERENCE_EXTRA_DEPS} paddle_inference_c
ARGS ARGS
--infer_model=${MOBILENET_INSTALL_DIR}/model) --infer_model=${MOBILENET_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_pd_threads
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_pd_threads paddle_inference_c)
endif()
endif() endif()
inference_analysis_test( inference_analysis_test(
...@@ -1205,14 +1171,9 @@ if(WITH_MKLDNN) ...@@ -1205,14 +1171,9 @@ if(WITH_MKLDNN)
SRCS SRCS
analyzer_capi_exp_int_tester.cc analyzer_capi_exp_int_tester.cc
EXTRA_DEPS EXTRA_DEPS
${INFERENCE_EXTRA_DEPS} paddle_inference_c
ARGS ARGS
--infer_model=${INT8_DATA_DIR}/resnet50/model) --infer_model=${INT8_DATA_DIR}/resnet50/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_int paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_int paddle_inference_c)
endif()
endif() endif()
inference_analysis_test( inference_analysis_test(
...@@ -1220,14 +1181,9 @@ inference_analysis_test( ...@@ -1220,14 +1181,9 @@ inference_analysis_test(
SRCS SRCS
analyzer_capi_exp_ner_tester.cc analyzer_capi_exp_ner_tester.cc
EXTRA_DEPS EXTRA_DEPS
${INFERENCE_EXTRA_DEPS} paddle_inference_c
ARGS ARGS
--infer_model=${CHINESE_NER_INSTALL_DIR}/model) --infer_model=${CHINESE_NER_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c)
endif()
if(WITH_GPU) if(WITH_GPU)
inference_analysis_test( inference_analysis_test(
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册