diff --git a/paddle/fluid/inference/CMakeLists.txt b/paddle/fluid/inference/CMakeLists.txt index 4e991a3013875744fc70fc01164c92cc067a4349..6ff46554296048f13d447db8e9d07f00c4a3c40a 100644 --- a/paddle/fluid/inference/CMakeLists.txt +++ b/paddle/fluid/inference/CMakeLists.txt @@ -119,6 +119,8 @@ cc_library( get_property(os_dependency_modules GLOBAL PROPERTY OS_DEPENDENCY_MODULES) target_link_libraries(paddle_inference_shared ${os_dependency_modules}) if(WIN32) + set_property(TARGET paddle_inference_shared + PROPERTY WINDOWS_EXPORT_ALL_SYMBOLS ON) target_link_libraries(paddle_inference_shared gflags) endif() diff --git a/paddle/fluid/inference/analysis/CMakeLists.txt b/paddle/fluid/inference/analysis/CMakeLists.txt index 4b7bed65bab77e8f0627679a1b64abffa37691d2..c001f5eb8dfdc883432f11acc2a898d45382329a 100644 --- a/paddle/fluid/inference/analysis/CMakeLists.txt +++ b/paddle/fluid/inference/analysis/CMakeLists.txt @@ -49,10 +49,10 @@ function(inference_analysis_test_build TARGET) SRCS ${analysis_test_SRCS} DEPS + ${analysis_test_EXTRA_DEPS} analysis pass - ${GLOB_PASS_LIB} - ${analysis_test_EXTRA_DEPS}) + ${GLOB_PASS_LIB}) endif() endfunction() @@ -80,10 +80,10 @@ function(inference_analysis_test TARGET) SRCS ${analysis_test_SRCS} DEPS + ${analysis_test_EXTRA_DEPS} analysis pass - ${GLOB_PASS_LIB} - ${analysis_test_EXTRA_DEPS}) + ${GLOB_PASS_LIB}) inference_base_test_run(${TARGET} COMMAND ${TARGET} ARGS ${analysis_test_ARGS}) endif() diff --git a/paddle/fluid/inference/capi/CMakeLists.txt b/paddle/fluid/inference/capi/CMakeLists.txt index 73ba41607aae8b96e1830b6848d5bb0f0efea4bc..25d8a39dc6374427193d93004f435c41d48dbabf 100644 --- a/paddle/fluid/inference/capi/CMakeLists.txt +++ b/paddle/fluid/inference/capi/CMakeLists.txt @@ -20,10 +20,6 @@ cc_library( SRCS ${C_API_SRCS} DEPS paddle_inference) -if(NOT ON_INFER) - return() -endif() - # Create inference capi shared library cc_library( paddle_inference_c_shared SHARED diff --git a/paddle/fluid/inference/capi_exp/CMakeLists.txt b/paddle/fluid/inference/capi_exp/CMakeLists.txt index e35e14a0c0241dbd271636d686612e57a7a19428..56de57cbb9c85e832833663113c500ecee768f22 100644 --- a/paddle/fluid/inference/capi_exp/CMakeLists.txt +++ b/paddle/fluid/inference/capi_exp/CMakeLists.txt @@ -20,10 +20,6 @@ cc_library( SRCS ${C_API_SRCS} DEPS paddle_inference) -if(NOT ON_INFER) - return() -endif() - # Create inference capi shared library cc_library( paddle_inference_c_shared SHARED diff --git a/paddle/fluid/inference/tests/api/CMakeLists.txt b/paddle/fluid/inference/tests/api/CMakeLists.txt index 8261ce288cb97c9726e925e433bf90dc26d2f080..d6c4c18492afb9f073db3d8d5f376d579fc17a06 100644 --- a/paddle/fluid/inference/tests/api/CMakeLists.txt +++ b/paddle/fluid/inference/tests/api/CMakeLists.txt @@ -1,13 +1,4 @@ -if(NOT APPLE AND NOT WIN32) - set(INFERENCE_EXTRA_DEPS paddle_inference_shared) -else() - set(INFERENCE_EXTRA_DEPS paddle_inference_api paddle_inference_io - ir_pass_manager analysis_predictor benchmark) -endif() - -if(WITH_GPU AND TENSORRT_FOUND) - set(INFERENCE_EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} analysis ${analysis_deps}) -endif() +set(INFERENCE_EXTRA_DEPS paddle_inference_shared) function(download_data install_dir data_file check_sum) string(REGEX MATCH "[^/\\]+$" file_name ${data_file}) @@ -948,18 +939,26 @@ if(WITH_GPU AND TENSORRT_FOUND) analyzer_capi_exp_gpu_tester.cc EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} - paddle_inference_c ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) + if(WIN32) + target_link_libraries(test_analyzer_capi_exp_gpu paddle_inference_c_shared) + else() + target_link_libraries(test_analyzer_capi_exp_gpu paddle_inference_c) + endif() inference_analysis_test( test_analyzer_capi_exp_xpu SRCS analyzer_capi_exp_xpu_tester.cc EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} - paddle_inference_c ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) + if(WIN32) + target_link_libraries(test_analyzer_capi_exp_xpu paddle_inference_c_shared) + else() + target_link_libraries(test_analyzer_capi_exp_xpu paddle_inference_c) + endif() set(TRT_MODEL_QUANT_RESNET_DIR "${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model") @@ -1107,9 +1106,13 @@ inference_analysis_test( analyzer_capi_exp_tester.cc EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} - paddle_inference_c ARGS --infer_model=${RESNET50_MODEL_DIR}/model) +if(WIN32) + target_link_libraries(test_analyzer_capi_exp paddle_inference_c_shared) +else() + target_link_libraries(test_analyzer_capi_exp paddle_inference_c) +endif() inference_analysis_test( test_analyzer_capi_exp_pd_config @@ -1117,9 +1120,14 @@ inference_analysis_test( analyzer_capi_exp_pd_config_tester.cc EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} - paddle_inference_c ARGS --infer_model=${MOBILENET_INSTALL_DIR}/model) +if(WIN32) + target_link_libraries(test_analyzer_capi_exp_pd_config + paddle_inference_c_shared) +else() + target_link_libraries(test_analyzer_capi_exp_pd_config paddle_inference_c) +endif() inference_analysis_test( test_analyzer_capi_exp_pd_tensor @@ -1127,9 +1135,14 @@ inference_analysis_test( analyzer_capi_exp_pd_tensor_tester.cc EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} - paddle_inference_c ARGS --infer_model=${MOBILENET_INSTALL_DIR}/model) +if(WIN32) + target_link_libraries(test_analyzer_capi_exp_pd_tensor + paddle_inference_c_shared) +else() + target_link_libraries(test_analyzer_capi_exp_pd_tensor paddle_inference_c) +endif() if(NOT APPLE AND NOT WIN32) inference_analysis_test( @@ -1138,10 +1151,16 @@ if(NOT APPLE AND NOT WIN32) analyzer_capi_exp_pd_threads_tester.cc EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} - paddle_inference_c ARGS --infer_model=${MOBILENET_INSTALL_DIR}/model) + if(WIN32) + target_link_libraries(test_analyzer_capi_exp_pd_threads + paddle_inference_c_shared) + else() + target_link_libraries(test_analyzer_capi_exp_pd_threads paddle_inference_c) + endif() endif() + inference_analysis_test( test_analyzer_zerocopytensor_tensor SRCS @@ -1182,9 +1201,13 @@ if(WITH_MKLDNN) analyzer_capi_exp_int_tester.cc EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} - paddle_inference_c ARGS --infer_model=${INT8_DATA_DIR}/resnet50/model) + if(WIN32) + target_link_libraries(test_analyzer_capi_exp_int paddle_inference_c_shared) + else() + target_link_libraries(test_analyzer_capi_exp_int paddle_inference_c) + endif() endif() inference_analysis_test( @@ -1193,9 +1216,13 @@ inference_analysis_test( analyzer_capi_exp_ner_tester.cc EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} - paddle_inference_c ARGS --infer_model=${CHINESE_NER_INSTALL_DIR}/model) +if(WIN32) + target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c_shared) +else() + target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c) +endif() if(WITH_GPU) inference_analysis_test( diff --git a/paddle/fluid/inference/tests/api/analyzer_image_classification_tester.cc b/paddle/fluid/inference/tests/api/analyzer_image_classification_tester.cc index 0df36592cc39e22ca84522b070a9bad4acebc9f9..dc8921ef7311ebc6b458f4590b60b2c052360d94 100644 --- a/paddle/fluid/inference/tests/api/analyzer_image_classification_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_image_classification_tester.cc @@ -66,11 +66,6 @@ void profile(bool use_mkldnn = false) { FLAGS_num_threads); } -TEST(Analyzer_resnet50, profile) { profile(); } -#ifdef PADDLE_WITH_MKLDNN -TEST(Analyzer_resnet50, profile_mkldnn) { profile(true /* use_mkldnn */); } -#endif - // Check the fuse status TEST(Analyzer_resnet50, fuse_statis) { AnalysisConfig cfg; @@ -82,6 +77,11 @@ TEST(Analyzer_resnet50, fuse_statis) { LOG(INFO) << "num_ops: " << num_ops; } +TEST(Analyzer_resnet50, profile) { profile(); } +#ifdef PADDLE_WITH_MKLDNN +TEST(Analyzer_resnet50, profile_mkldnn) { profile(true /* use_mkldnn */); } +#endif + // Compare result of NativeConfig and AnalysisConfig void compare(bool use_mkldnn = false) { AnalysisConfig cfg; diff --git a/paddle/fluid/inference/tests/api/trt_fc_prelu_test.cc b/paddle/fluid/inference/tests/api/trt_fc_prelu_test.cc index 93d4a88383c33fb029c610e05f54d7b035420d3c..70c1eb8bab2535836912bde51a522062b9fef1a5 100644 --- a/paddle/fluid/inference/tests/api/trt_fc_prelu_test.cc +++ b/paddle/fluid/inference/tests/api/trt_fc_prelu_test.cc @@ -23,6 +23,11 @@ namespace inference { TEST(TensorRT_fc, compare) { std::string model_dir = FLAGS_infer_model + "/fc_uint8"; + AnalysisConfig config; + config.EnableUseGpu(100, 0); + config.SetModel(model_dir); + config.DisableGlogInfo(); + auto predictor = CreatePaddlePredictor(config); compare(model_dir, /* use_tensorrt */ true); // Open it when need. // profile(model_dir, /* use_analysis */ true, FLAGS_use_tensorrt); diff --git a/paddle/fluid/inference/tests/api/trt_mobilenet_test.cc b/paddle/fluid/inference/tests/api/trt_mobilenet_test.cc index 3b25c32fc75147cdd1ceaef7fa1a5bf0ee4e0993..45c14f4fc8b37e78b2dc63a7b030eee1d1479b14 100644 --- a/paddle/fluid/inference/tests/api/trt_mobilenet_test.cc +++ b/paddle/fluid/inference/tests/api/trt_mobilenet_test.cc @@ -23,6 +23,11 @@ namespace inference { TEST(TensorRT_mobilenet, compare) { std::string model_dir = FLAGS_infer_model + "/mobilenet"; + AnalysisConfig config; + config.EnableUseGpu(100, 0); + config.SetModel(model_dir); + config.DisableGlogInfo(); + auto predictor = CreatePaddlePredictor(config); compare(model_dir, /* use_tensorrt */ true); // Open it when need. // profile(model_dir, /* use_analysis */ true, FLAGS_use_tensorrt); diff --git a/paddle/fluid/inference/tests/api/trt_resnext_test.cc b/paddle/fluid/inference/tests/api/trt_resnext_test.cc index 374074957c870a6b747eac5c20bb105db7b2f32d..8d4e331fa9730f326217fc1c97a2a889f5b4c7bd 100644 --- a/paddle/fluid/inference/tests/api/trt_resnext_test.cc +++ b/paddle/fluid/inference/tests/api/trt_resnext_test.cc @@ -23,6 +23,11 @@ namespace inference { TEST(TensorRT_resnext50, compare) { std::string model_dir = FLAGS_infer_model + "/resnext50"; + AnalysisConfig config; + config.EnableUseGpu(100, 0); + config.SetModel(model_dir); + config.DisableGlogInfo(); + auto predictor = CreatePaddlePredictor(config); compare(model_dir, /* use_tensorrt */ true); } diff --git a/paddle/phi/common/place.h b/paddle/phi/common/place.h index cbc1faf94f07c84c7701c6b6d6b3d6d5532a1791..ead3e463c28031f5be1e824b1f1969db671eb147 100644 --- a/paddle/phi/common/place.h +++ b/paddle/phi/common/place.h @@ -39,10 +39,9 @@ enum class AllocationType : int8_t { const char* AllocationTypeStr(AllocationType type); -PADDLE_API size_t -GetOrRegisterGlobalDeviceTypeId(const std::string& device_type); +size_t GetOrRegisterGlobalDeviceTypeId(const std::string& device_type); -PADDLE_API std::string GetGlobalDeviceType(size_t device_type_id_); +std::string GetGlobalDeviceType(size_t device_type_id_); /// \brief The place is used to specify where the data is stored. class PADDLE_API Place { diff --git a/paddle/scripts/paddle_build.bat b/paddle/scripts/paddle_build.bat index d87915d172bb7fb2cb3105dee19e6828068800c9..9680ec234b3b4e38933c392e79b91a1a4dbbd9d3 100644 --- a/paddle/scripts/paddle_build.bat +++ b/paddle/scripts/paddle_build.bat @@ -685,7 +685,8 @@ set PATH=%THIRD_PARTY_PATH:/=\%\install\openblas\lib;%THIRD_PARTY_PATH:/=\%\inst %THIRD_PARTY_PATH:/=\%\install\zlib\bin;%THIRD_PARTY_PATH:/=\%\install\mklml\lib;^ %THIRD_PARTY_PATH:/=\%\install\mkldnn\bin;%THIRD_PARTY_PATH:/=\%\install\warpctc\bin;^ %THIRD_PARTY_PATH:/=\%\install\onnxruntime\lib;%THIRD_PARTY_PATH:/=\%\install\paddle2onnx\lib;^ -%work_dir%\%BUILD_DIR%\paddle\fluid\inference;%PATH% +%work_dir%\%BUILD_DIR%\paddle\fluid\inference;%work_dir%\%BUILD_DIR%\paddle\fluid\inference\capi_exp;^ +%PATH% REM TODO: make ut find .dll in install\onnxruntime\lib xcopy %THIRD_PARTY_PATH:/=\%\install\onnxruntime\lib\onnxruntime.dll %work_dir%\%BUILD_DIR%\paddle\fluid\inference\tests\api\ /Y