From 05b7ef8d78438eca9237cefbc7696d22783d8b49 Mon Sep 17 00:00:00 2001 From: Sing_chan <51314274+betterpig@users.noreply.github.com> Date: Thu, 7 Jul 2022 19:39:56 +0800 Subject: [PATCH] [Windows CI] copy onnxruntime.dll to c++ test folder in windows (#44121) * copy onnxruntime.dll to c++ test folder in windows * remove ut that failed due to onnxrumtime.dll * test_api_impl failed of diff * use TARGET to make sure if the test exist; use POST_BUILD to add copy command --- cmake/external/onnxruntime.cmake | 12 ++++++++++++ paddle/fluid/framework/ir/CMakeLists.txt | 7 +++++++ .../fluid/inference/analysis/CMakeLists.txt | 5 +++++ paddle/fluid/inference/api/CMakeLists.txt | 6 ++++++ .../inference/api/details/CMakeLists.txt | 6 ++++++ .../fluid/inference/tensorrt/CMakeLists.txt | 7 +++++++ .../inference/tensorrt/convert/CMakeLists.txt | 6 ++++++ paddle/fluid/inference/utils/CMakeLists.txt | 7 +++++++ .../fluid/operators/benchmark/CMakeLists.txt | 6 ++++++ .../fluid/operators/tensorrt/CMakeLists.txt | 6 ++++++ tools/windows/run_unittests.sh | 19 ++----------------- 11 files changed, 70 insertions(+), 17 deletions(-) diff --git a/cmake/external/onnxruntime.cmake b/cmake/external/onnxruntime.cmake index b52b2c00d9c..15901568ae1 100644 --- a/cmake/external/onnxruntime.cmake +++ b/cmake/external/onnxruntime.cmake @@ -134,3 +134,15 @@ endif() add_library(onnxruntime STATIC IMPORTED GLOBAL) set_property(TARGET onnxruntime PROPERTY IMPORTED_LOCATION ${ONNXRUNTIME_LIB}) add_dependencies(onnxruntime ${ONNXRUNTIME_PROJECT}) + +function(copy_onnx TARGET_NAME) + # If error of Exitcode0xc000007b happened when a .exe running, copy onnxruntime.dll + # to the .exe folder. + if(TARGET ${TARGET_NAME}) + add_custom_command( + TARGET ${TARGET_NAME} + POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SHARED_LIB} + ${CMAKE_CURRENT_BINARY_DIR} DEPENDS onnxruntime) + endif() +endfunction() diff --git a/paddle/fluid/framework/ir/CMakeLists.txt b/paddle/fluid/framework/ir/CMakeLists.txt index 8569a3bb615..2e4b73c6ac1 100755 --- a/paddle/fluid/framework/ir/CMakeLists.txt +++ b/paddle/fluid/framework/ir/CMakeLists.txt @@ -473,6 +473,13 @@ if(WITH_MKLDNN) test_compute_propagate_scales_mkldnn_pass SRCS mkldnn/compute_propagate_scales_mkldnn_pass_tester.cc DEPS compute_propagate_scales_mkldnn_pass naive_executor) + + if(WITH_ONNXRUNTIME AND WIN32) + # Copy onnxruntime for some c++ test in Windows, since the test will + # be build only in CI, so suppose the generator in Windows is Ninja. + copy_onnx(test_compute_propagate_scales_mkldnn_pass) + endif() + cc_test( test_cpu_quantize_placement_pass SRCS mkldnn/cpu_quantize_placement_pass_tester.cc diff --git a/paddle/fluid/inference/analysis/CMakeLists.txt b/paddle/fluid/inference/analysis/CMakeLists.txt index c001f5eb8df..67f0e3212db 100644 --- a/paddle/fluid/inference/analysis/CMakeLists.txt +++ b/paddle/fluid/inference/analysis/CMakeLists.txt @@ -109,4 +109,9 @@ elseif(WIN32) paddle_inference_api ARGS --inference_model_dir=${WORD2VEC_MODEL_DIR}) + if(WITH_ONNXRUNTIME AND WIN32) + # Copy onnxruntime for some c++ test in Windows, since the test will + # be build only in CI, so suppose the generator in Windows is Ninja. + copy_onnx(test_analyzer) + endif() endif() diff --git a/paddle/fluid/inference/api/CMakeLists.txt b/paddle/fluid/inference/api/CMakeLists.txt index 0d55b9c6641..9e601df8088 100755 --- a/paddle/fluid/inference/api/CMakeLists.txt +++ b/paddle/fluid/inference/api/CMakeLists.txt @@ -99,6 +99,12 @@ cc_test( SRCS api_tester.cc DEPS paddle_inference_api) +if(WITH_ONNXRUNTIME AND WIN32) + # Copy onnxruntime for some c++ test in Windows, since the test will + # be build only in CI, so suppose the generator in Windows is Ninja. + copy_onnx(test_paddle_inference_api) +endif() + if(WITH_TESTING) if(NOT APPLE AND NOT WIN32) if(WITH_GPU) diff --git a/paddle/fluid/inference/api/details/CMakeLists.txt b/paddle/fluid/inference/api/details/CMakeLists.txt index 2acd96b3fb9..02d5f91d630 100644 --- a/paddle/fluid/inference/api/details/CMakeLists.txt +++ b/paddle/fluid/inference/api/details/CMakeLists.txt @@ -38,3 +38,9 @@ cc_test( zero_copy_tensor_test SRCS zero_copy_tensor_test.cc DEPS paddle_inference_api) + +if(WITH_ONNXRUNTIME AND WIN32) + # Copy onnxruntime for some c++ test in Windows, since the test will + # be build only in CI, so suppose the generator in Windows is Ninja. + copy_onnx(zero_copy_tensor_test) +endif() diff --git a/paddle/fluid/inference/tensorrt/CMakeLists.txt b/paddle/fluid/inference/tensorrt/CMakeLists.txt index 0f1350459ef..cd03dce1795 100644 --- a/paddle/fluid/inference/tensorrt/CMakeLists.txt +++ b/paddle/fluid/inference/tensorrt/CMakeLists.txt @@ -24,5 +24,12 @@ nv_test( test_tensorrt_engine SRCS test_engine.cc test_dynamic_engine.cc DEPS dynload_cuda tensorrt_engine tensorrt_plugin) + +if(WITH_ONNXRUNTIME AND WIN32) + # Copy onnxruntime for some c++ test in Windows, since the test will + # be build only in CI, so suppose the generator in Windows is Ninja. + copy_onnx(test_tensorrt_engine) +endif() + add_subdirectory(plugin) add_subdirectory(convert) diff --git a/paddle/fluid/inference/tensorrt/convert/CMakeLists.txt b/paddle/fluid/inference/tensorrt/convert/CMakeLists.txt index c999c009605..90089fcbfd8 100644 --- a/paddle/fluid/inference/tensorrt/convert/CMakeLists.txt +++ b/paddle/fluid/inference/tensorrt/convert/CMakeLists.txt @@ -85,3 +85,9 @@ nv_test( SRCS test_op_converter.cc DEPS paddle_framework ${GLOB_OPERATOR_DEPS} tensorrt_engine tensorrt_converter) + +if(WITH_ONNXRUNTIME AND WIN32) + # Copy onnxruntime for some c++ test in Windows, since the test will + # be build only in CI, so suppose the generator in Windows is Ninja. + copy_onnx(test_op_converter) +endif() diff --git a/paddle/fluid/inference/utils/CMakeLists.txt b/paddle/fluid/inference/utils/CMakeLists.txt index 9ab07633e0f..f165002f353 100644 --- a/paddle/fluid/inference/utils/CMakeLists.txt +++ b/paddle/fluid/inference/utils/CMakeLists.txt @@ -18,6 +18,13 @@ cc_test( infer_io_utils_tester SRCS io_utils_tester.cc DEPS infer_io_utils) + +if(WITH_ONNXRUNTIME AND WIN32) + # Copy onnxruntime for some c++ test in Windows, since the test will + # be build only in CI, so suppose the generator in Windows is Ninja. + copy_onnx(infer_io_utils_tester) +endif() + cc_library(table_printer SRCS table_printer.cc) cc_test( test_table_printer diff --git a/paddle/fluid/operators/benchmark/CMakeLists.txt b/paddle/fluid/operators/benchmark/CMakeLists.txt index e05011eaf6b..b0a1c488f04 100644 --- a/paddle/fluid/operators/benchmark/CMakeLists.txt +++ b/paddle/fluid/operators/benchmark/CMakeLists.txt @@ -12,3 +12,9 @@ cc_test( ${GLOB_OP_LIB} ${GLOB_OPERATOR_DEPS} eigen_function) + +if(WITH_ONNXRUNTIME AND WIN32) + # Copy onnxruntime for some c++ test in Windows, since the test will + # be build only in CI, so suppose the generator in Windows is Ninja. + copy_onnx(op_tester) +endif() diff --git a/paddle/fluid/operators/tensorrt/CMakeLists.txt b/paddle/fluid/operators/tensorrt/CMakeLists.txt index e0fed2804a9..0d731b14c6a 100644 --- a/paddle/fluid/operators/tensorrt/CMakeLists.txt +++ b/paddle/fluid/operators/tensorrt/CMakeLists.txt @@ -4,3 +4,9 @@ nv_test( test_tensorrt_engine_op SRCS tensorrt_engine_op_test.cc DEPS tensorrt_engine_op analysis) + +if(WITH_ONNXRUNTIME AND WIN32) + # Copy onnxruntime for some c++ test in Windows, since the test will + # be build only in CI, so suppose the generator in Windows is Ninja. + copy_onnx(test_tensorrt_engine_op) +endif() diff --git a/tools/windows/run_unittests.sh b/tools/windows/run_unittests.sh index 23a0b4d3282..7af1cd81391 100644 --- a/tools/windows/run_unittests.sh +++ b/tools/windows/run_unittests.sh @@ -87,8 +87,6 @@ disable_win_inference_test="^trt_quant_int8_yolov3_r50_test$|\ ^lite_mul_model_test$|\ ^trt_split_converter_test$|\ ^paddle_infer_api_copy_tensor_tester$|\ -^test_tensorrt_engine_op$|\ -^test_tensorrt_engine$|\ ^test_trt_deformable_conv$|\ ^test_imperative_triple_grad$|\ ^test_full_name_usage$|\ @@ -103,7 +101,6 @@ disable_win_inference_test="^trt_quant_int8_yolov3_r50_test$|\ ^test_tensor_scalar_type_promotion_static$|\ ^test_matrix_power_op$|\ ^test_deformable_conv_v1_op$|\ -^zero_copy_tensor_test$|\ ^test_where_index$|\ ^test_custom_grad_input$|\ ^test_conv3d_transpose_op$|\ @@ -116,16 +113,6 @@ disable_win_inference_test="^trt_quant_int8_yolov3_r50_test$|\ ^test_basic_api_transformation$|\ ^test_deformable_conv_op$|\ ^test_variable$|\ -^test_conv_bias_mkldnn_fuse_pass_cc$|\ -^test_conv_batch_norm_mkldnn_fuse_pass$|\ -^test_compute_propagate_scales_mkldnn_pass$|\ -^test_cpu_quantize_pass$|\ -^test_cpu_quantize_squash_pass$|\ -^op_tester$|\ -^test_analyzer$|\ -^infer_io_utils_tester$|\ -^test_paddle_inference_api$|\ -^test_mkldnn_quantizer$|\ ^test_mkldnn_conv_hard_sigmoid_fuse_pass$|\ ^test_mkldnn_conv_hard_swish_fuse_pass$|\ ^test_conv_act_mkldnn_fuse_pass$|\ @@ -147,11 +134,9 @@ disable_win_inference_test="^trt_quant_int8_yolov3_r50_test$|\ ^test_slice$|\ ^test_conv_elementwise_add_fuse_pass$|\ ^test_executor_and_mul$|\ -^test_op_converter$|\ ^test_analyzer_int8_resnet50$|\ ^test_analyzer_int8_mobilenetv1$|\ ^test_trt_conv_pass$|\ -^test_analysis_predictor$|\ ^test_roll_op$|\ ^test_lcm$|\ ^test_elementwise_floordiv_op$|\ @@ -160,7 +145,6 @@ disable_win_inference_test="^trt_quant_int8_yolov3_r50_test$|\ ^test_trt_convert_deformable_conv$|\ ^test_conv_elementwise_add2_act_fuse_pass$|\ ^test_tensor_scalar_type_promotion_dynamic$|\ -^test_api_impl$|\ ^test_model$|\ ^test_py_reader_combination$|\ ^test_trt_convert_flatten$|\ @@ -198,7 +182,8 @@ disable_win_inference_test="^trt_quant_int8_yolov3_r50_test$|\ ^test_trt_fc_fuse_quant_dequant_pass$|\ ^test_unsqueeze2_eltwise_fuse_pass$|\ ^test_parallel_executor_seresnext_with_fuse_all_reduce_gpu$|\ -^test_parallel_executor_seresnext_with_reduce_gpu$" +^test_parallel_executor_seresnext_with_reduce_gpu$|\ +^test_api_impl$" # /*==========Fixed Disabled Windows CPU OPENBLAS((PR-CI-Windows-OPENBLAS)) unittests==============================*/ -- GitLab