From 008debe7f1c8183555a4bf9add762f4f003110e3 Mon Sep 17 00:00:00 2001 From: tianshuo78520a <707759223@qq.com> Date: Fri, 21 Apr 2023 15:48:13 +0800 Subject: [PATCH] Mv inference ut (#52987) * mv inference/api infer_ut * mv test * merge develop fix error * fix * fix build error * fix build error * fix bug * fix tester_helper.h * fix analyzer_transformer_profile_tester.cc * fix * fix mac * fix mac * fix error * fix * fix --- paddle/fluid/inference/CMakeLists.txt | 8 - .../fluid/inference/analysis/CMakeLists.txt | 81 - paddle/fluid/inference/api/CMakeLists.txt | 77 - .../api/onnxruntime_predictor_tester.cc | 2 +- .../fluid/inference/tests/api/CMakeLists.txt | 1411 --------------- paddle/scripts/paddle_build.sh | 2 +- test/cpp/CMakeLists.txt | 1 + test/cpp/inference/CMakeLists.txt | 6 + test/cpp/inference/analysis/CMakeLists.txt | 80 + .../inference/analysis/analyzer_tester.cc | 0 test/cpp/inference/api/CMakeLists.txt | 1517 +++++++++++++++++ .../api/analysis_predictor_tester.cc | 2 +- .../inference}/api/analyzer_bert_tester.cc | 2 +- ...er_bfloat16_image_classification_tester.cc | 2 +- .../api/analyzer_capi_exp_gpu_tester.cc | 2 +- .../api/analyzer_capi_exp_int_tester.cc | 2 +- .../api/analyzer_capi_exp_ner_tester.cc | 2 +- .../api/analyzer_capi_exp_pd_config_tester.cc | 2 +- .../api/analyzer_capi_exp_pd_tensor_tester.cc | 2 +- .../analyzer_capi_exp_pd_threads_tester.cc | 2 +- .../api/analyzer_capi_exp_tester.cc | 2 +- .../api/analyzer_capi_exp_xpu_tester.cc | 2 +- .../api/analyzer_capi_gpu_tester.cc | 2 +- .../api/analyzer_capi_int_tester.cc | 2 +- .../api/analyzer_capi_ner_tester.cc | 2 +- .../api/analyzer_capi_pd_tensor_tester.cc | 2 +- .../inference}/api/analyzer_capi_tester.cc | 2 +- .../api/analyzer_capi_xpu_tester.cc | 2 +- .../cpp/inference}/api/analyzer_dam_tester.cc | 2 +- ...nalyzer_detect_functional_mkldnn_tester.cc | 2 +- .../inference}/api/analyzer_detect_tester.cc | 2 +- .../api/analyzer_dist_model_tester.cc | 2 +- .../api/analyzer_dist_model_xpu_tester.cc | 2 +- .../api/analyzer_ernie_int8_tester.cc | 2 +- .../inference}/api/analyzer_ernie_tester.cc | 2 +- .../inference}/api/analyzer_ernie_tester.h | 2 +- .../analyzer_image_classification_tester.cc | 2 +- ...alyzer_int8_image_classification_tester.cc | 2 +- .../analyzer_int8_object_detection_tester.cc | 2 +- .../cpp/inference}/api/analyzer_lac_tester.cc | 2 +- .../analyzer_lexical_analysis_gru_tester.cc | 2 +- .../cpp/inference}/api/analyzer_mmp_tester.cc | 6 +- .../cpp/inference}/api/analyzer_ner_tester.cc | 2 +- .../api/analyzer_paddle_tensor_tester.cc | 2 +- .../api/analyzer_pyramid_dnn_tester.cc | 2 +- ...lyzer_quant_image_classification_tester.cc | 2 +- .../inference}/api/analyzer_rnn1_tester.cc | 2 +- .../inference}/api/analyzer_rnn2_tester.cc | 2 +- .../api/analyzer_save_model_tester.cc | 2 +- .../api/analyzer_seq_conv1_tester.cc | 2 +- ...yzer_seq_pool1_compare_determine_tester.cc | 4 +- .../api/analyzer_seq_pool1_compare_tester.cc | 4 +- ...seq_pool1_fuse_compare_zero_copy_tester.cc | 4 +- .../analyzer_seq_pool1_fuse_statis_tester.cc | 4 +- .../api/analyzer_seq_pool1_profile_tester.cc | 4 +- .../api/analyzer_seq_pool1_tester_helper.h | 2 +- .../analyzer_text_classification_tester.cc | 2 +- .../analyzer_transformer_compare_tester.cc | 2 +- .../api/analyzer_transformer_fuse_tester.cc | 2 +- .../analyzer_transformer_profile_tester.cc | 2 +- .../api/analyzer_transformer_tester_helper.h | 2 +- .../cpp/inference}/api/analyzer_vis_tester.cc | 2 +- .../inference}/api/analyzer_vit_ocr_tester.cc | 2 +- .../api/analyzer_zerocopy_tensor_tester.cc | 2 +- .../cpp}/inference/api/api_impl_tester.cc | 2 +- .../cpp}/inference/api/api_tester.cc | 0 .../cpp/inference}/api/config_printer.h | 0 .../api/full_ILSVRC2012_val_preprocess.py | 0 .../api/full_pascalvoc_test_preprocess.py | 0 .../cpp/inference}/api/gpu_ernie_half_test.cc | 2 +- .../cpp}/inference/api/helper_test.cc | 0 .../api/int8_mkldnn_quantization.md | 0 .../cpp/inference}/api/ipu_ernie_fp16_test.cc | 2 +- .../cpp/inference}/api/ipu_ernie_test.cc | 2 +- .../inference}/api/ipu_multi_model_profile.cc | 2 +- .../inference}/api/ipu_resnet50_fp16_test.cc | 2 +- .../cpp/inference}/api/ipu_resnet50_test.cc | 2 +- .../cpp/inference}/api/ipu_word2vec_sample.cc | 0 .../cpp/inference}/api/lite_mul_model_test.cc | 2 +- .../cpp/inference}/api/lite_resnet50_test.cc | 2 +- .../api/mkldnn_quantizer_config_tester.cc | 2 +- .../inference/api/mkldnn_quantizer_tester.cc | 0 .../paddle_infer_api_copy_tensor_tester.cc | 2 +- .../api/paddle_infer_api_errors_tester.cc | 0 .../inference}/api/paddle_infer_api_test.cc | 2 +- .../api/test_detection_dataset_preprocess.py | 0 .../cpp/inference}/api/tester_helper.h | 4 +- .../inference}/api/trt_cascade_rcnn_test.cc | 2 +- ...e_ernie_fp16_serialize_deserialize_test.cc | 2 +- ..._shape_ernie_serialize_deserialize_test.cc | 2 +- ...c_shape_ernie_serialize_deserialize_test.h | 2 +- .../api/trt_dynamic_shape_ernie_test.cc | 2 +- .../inference}/api/trt_dynamic_shape_test.cc | 2 +- ...rt_dynamic_shape_transformer_prune_test.cc | 2 +- .../cpp/inference}/api/trt_fc_prelu_test.cc | 2 +- .../api/trt_instance_norm_converter_test.cc | 2 +- .../cpp/inference}/api/trt_mobilenet_test.cc | 2 +- .../cpp/inference}/api/trt_quant_int8_test.cc | 2 +- .../api/trt_quant_int8_yolov3_r50_test.cc | 2 +- .../cpp/inference}/api/trt_resnet50_test.cc | 2 +- .../cpp/inference}/api/trt_resnext_test.cc | 2 +- .../api/trt_split_converter_test.cc | 2 +- .../cpp/inference}/api/trt_test_helper.h | 2 +- .../cpp/inference}/infer_ut/CMakeLists.txt | 0 .../cpp/inference}/infer_ut/README.md | 0 .../infer_ut/external-cmake/gtest-cpp.cmake | 0 .../cpp/inference}/infer_ut/run.sh | 0 .../cpp/inference}/infer_ut/test_LeViT.cc | 0 .../inference}/infer_ut/test_det_mv3_db.cc | 0 .../infer_ut/test_ernie_text_cls.cc | 0 .../infer_ut/test_ernie_xnli_int8.cc | 0 .../cpp/inference}/infer_ut/test_helper.h | 0 .../inference}/infer_ut/test_mobilnetv1.cc | 0 .../inference}/infer_ut/test_ppyolo_mbv3.cc | 0 .../infer_ut/test_ppyolov2_r50vd.cc | 0 .../cpp/inference}/infer_ut/test_resnet50.cc | 0 .../infer_ut/test_resnet50_quant.cc | 0 .../cpp/inference}/infer_ut/test_suite.h | 0 .../cpp/inference}/infer_ut/test_yolov3.cc | 0 .../tests => test/cpp/inference}/test.cmake | 0 .../cpp/inference}/test_helper.h | 0 121 files changed, 1697 insertions(+), 1668 deletions(-) delete mode 100644 paddle/fluid/inference/tests/api/CMakeLists.txt create mode 100644 test/cpp/inference/CMakeLists.txt create mode 100644 test/cpp/inference/analysis/CMakeLists.txt rename {paddle/fluid => test/cpp}/inference/analysis/analyzer_tester.cc (100%) create mode 100644 test/cpp/inference/api/CMakeLists.txt rename {paddle/fluid => test/cpp}/inference/api/analysis_predictor_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_bert_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_bfloat16_image_classification_tester.cc (97%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_exp_gpu_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_exp_int_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_exp_ner_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_exp_pd_config_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_exp_pd_tensor_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_exp_pd_threads_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_exp_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_exp_xpu_tester.cc (97%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_gpu_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_int_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_ner_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_pd_tensor_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_capi_xpu_tester.cc (97%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_dam_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_detect_functional_mkldnn_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_detect_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_dist_model_tester.cc (97%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_dist_model_xpu_tester.cc (97%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_ernie_int8_tester.cc (97%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_ernie_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_ernie_tester.h (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_image_classification_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_int8_image_classification_tester.cc (97%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_int8_object_detection_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_lac_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_lexical_analysis_gru_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_mmp_tester.cc (96%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_ner_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_paddle_tensor_tester.cc (97%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_pyramid_dnn_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_quant_image_classification_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_rnn1_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_rnn2_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_save_model_tester.cc (97%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_seq_conv1_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_seq_pool1_compare_determine_tester.cc (89%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_seq_pool1_compare_tester.cc (89%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_seq_pool1_fuse_compare_zero_copy_tester.cc (91%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_seq_pool1_fuse_statis_tester.cc (91%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_seq_pool1_profile_tester.cc (90%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_seq_pool1_tester_helper.h (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_text_classification_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_transformer_compare_tester.cc (94%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_transformer_fuse_tester.cc (93%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_transformer_profile_tester.cc (94%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_transformer_tester_helper.h (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_vis_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_vit_ocr_tester.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/analyzer_zerocopy_tensor_tester.cc (97%) rename {paddle/fluid => test/cpp}/inference/api/api_impl_tester.cc (99%) rename {paddle/fluid => test/cpp}/inference/api/api_tester.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/config_printer.h (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/full_ILSVRC2012_val_preprocess.py (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/full_pascalvoc_test_preprocess.py (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/gpu_ernie_half_test.cc (99%) rename {paddle/fluid => test/cpp}/inference/api/helper_test.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/int8_mkldnn_quantization.md (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/ipu_ernie_fp16_test.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/ipu_ernie_test.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/ipu_multi_model_profile.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/ipu_resnet50_fp16_test.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/ipu_resnet50_test.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/ipu_word2vec_sample.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/lite_mul_model_test.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/lite_resnet50_test.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/mkldnn_quantizer_config_tester.cc (98%) rename {paddle/fluid => test/cpp}/inference/api/mkldnn_quantizer_tester.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/paddle_infer_api_copy_tensor_tester.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/paddle_infer_api_errors_tester.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/paddle_infer_api_test.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/test_detection_dataset_preprocess.py (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/tester_helper.h (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_cascade_rcnn_test.cc (96%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_dynamic_shape_ernie_fp16_serialize_deserialize_test.cc (91%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_dynamic_shape_ernie_serialize_deserialize_test.cc (92%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_dynamic_shape_ernie_test.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_dynamic_shape_test.cc (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_dynamic_shape_transformer_prune_test.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_fc_prelu_test.cc (97%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_instance_norm_converter_test.cc (96%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_mobilenet_test.cc (98%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_quant_int8_test.cc (97%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_quant_int8_yolov3_r50_test.cc (97%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_resnet50_test.cc (93%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_resnext_test.cc (94%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_split_converter_test.cc (96%) rename {paddle/fluid/inference/tests => test/cpp/inference}/api/trt_test_helper.h (99%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/CMakeLists.txt (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/README.md (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/external-cmake/gtest-cpp.cmake (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/run.sh (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/test_LeViT.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/test_det_mv3_db.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/test_ernie_text_cls.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/test_ernie_xnli_int8.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/test_helper.h (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/test_mobilnetv1.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/test_ppyolo_mbv3.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/test_ppyolov2_r50vd.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/test_resnet50.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/test_resnet50_quant.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/test_suite.h (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/infer_ut/test_yolov3.cc (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/test.cmake (100%) rename {paddle/fluid/inference/tests => test/cpp/inference}/test_helper.h (100%) diff --git a/paddle/fluid/inference/CMakeLists.txt b/paddle/fluid/inference/CMakeLists.txt index d7170686650..59035ab8cd0 100644 --- a/paddle/fluid/inference/CMakeLists.txt +++ b/paddle/fluid/inference/CMakeLists.txt @@ -13,10 +13,6 @@ # limitations under the License. # -if(WITH_TESTING) - include(tests/test.cmake) # some generic cmake function for inference -endif() - cc_library( paddle_inference_io SRCS io.cc @@ -91,10 +87,6 @@ endif() # C inference API add_subdirectory(capi_exp) -if(WITH_TESTING AND WITH_INFERENCE_API_TEST) - add_subdirectory(tests/api) -endif() - set(SHARED_INFERENCE_SRCS io.cc ${CMAKE_CURRENT_SOURCE_DIR}/../framework/data_feed.cc diff --git a/paddle/fluid/inference/analysis/CMakeLists.txt b/paddle/fluid/inference/analysis/CMakeLists.txt index 06c4a55c5c9..6453aba9aa3 100644 --- a/paddle/fluid/inference/analysis/CMakeLists.txt +++ b/paddle/fluid/inference/analysis/CMakeLists.txt @@ -36,84 +36,3 @@ cc_library( analysis SRCS analyzer.cc DEPS ${analysis_deps} analysis_helper analysis_pass ${INFER_IR_PASSES}) - -function(inference_analysis_test_build TARGET) - if(WITH_TESTING) - set(options "") - set(oneValueArgs "") - set(multiValueArgs SRCS EXTRA_DEPS) - cmake_parse_arguments(analysis_test "${options}" "${oneValueArgs}" - "${multiValueArgs}" ${ARGN}) - inference_base_test_build( - ${TARGET} - SRCS - ${analysis_test_SRCS} - DEPS - ${analysis_test_EXTRA_DEPS} - analysis - pass - ${GLOB_PASS_LIB}) - endif() -endfunction() - -function(inference_analysis_test_run TARGET) - if(WITH_TESTING) - set(options "") - set(oneValueArgs "") - set(multiValueArgs COMMAND ARGS) - cmake_parse_arguments(analysis_test "${options}" "${oneValueArgs}" - "${multiValueArgs}" ${ARGN}) - inference_base_test_run(${TARGET} COMMAND ${analysis_test_COMMAND} ARGS - ${analysis_test_ARGS}) - set_tests_properties(${TARGET} PROPERTIES LABELS "RUN_TYPE=INFER") - endif() -endfunction() - -function(inference_analysis_test TARGET) - if(WITH_TESTING) - set(options "") - set(oneValueArgs "") - set(multiValueArgs SRCS ARGS EXTRA_DEPS) - cmake_parse_arguments(analysis_test "${options}" "${oneValueArgs}" - "${multiValueArgs}" ${ARGN}) - inference_base_test_build( - ${TARGET} - SRCS - ${analysis_test_SRCS} - DEPS - ${analysis_test_EXTRA_DEPS} - analysis - pass - ${GLOB_PASS_LIB}) - inference_base_test_run(${TARGET} COMMAND ${TARGET} ARGS - ${analysis_test_ARGS}) - set_tests_properties(${TARGET} PROPERTIES LABELS "RUN_TYPE=INFER") - endif() -endfunction() - -if(NOT APPLE AND NOT WIN32) - inference_analysis_test( - test_analyzer - SRCS - analyzer_tester.cc - EXTRA_DEPS - reset_tensor_array - paddle_inference_shared - ARGS - --inference_model_dir=${WORD2VEC_MODEL_DIR}) -elseif(WIN32) - inference_analysis_test( - test_analyzer - SRCS - analyzer_tester.cc - EXTRA_DEPS - reset_tensor_array - paddle_inference_api - ARGS - --inference_model_dir=${WORD2VEC_MODEL_DIR}) - if(WITH_ONNXRUNTIME AND WIN32) - # Copy onnxruntime for some c++ test in Windows, since the test will - # be build only in CI, so suppose the generator in Windows is Ninja. - copy_onnx(test_analyzer) - endif() -endif() diff --git a/paddle/fluid/inference/api/CMakeLists.txt b/paddle/fluid/inference/api/CMakeLists.txt index c3eee6888a7..f07e5406a00 100755 --- a/paddle/fluid/inference/api/CMakeLists.txt +++ b/paddle/fluid/inference/api/CMakeLists.txt @@ -97,85 +97,8 @@ else() infer_io_utils model_utils) endif() -cc_test( - test_paddle_inference_api - SRCS api_tester.cc - DEPS paddle_inference_api) - -cc_test( - inference_api_helper_test - SRCS helper_test.cc - DEPS paddle_inference_api) - if(WITH_ONNXRUNTIME AND WIN32) # Copy onnxruntime for some c++ test in Windows, since the test will # be build only in CI, so suppose the generator in Windows is Ninja. copy_onnx(test_paddle_inference_api) endif() - -if(WITH_TESTING) - if(NOT APPLE AND NOT WIN32) - inference_base_test( - test_api_impl - SRCS - api_impl_tester.cc - DEPS - paddle_inference_shared - ARGS - --word2vec_dirname=${WORD2VEC_MODEL_DIR} - --book_dirname=${IMG_CLS_RESNET_INSTALL_DIR}) - elseif(WIN32) - inference_base_test( - test_api_impl - SRCS - api_impl_tester.cc - DEPS - ${inference_deps} - ARGS - --word2vec_dirname=${WORD2VEC_MODEL_DIR} - --book_dirname=${IMG_CLS_RESNET_INSTALL_DIR}) - endif() -endif() - -if(NOT APPLE AND NOT WIN32) - cc_test_old( - test_analysis_predictor - SRCS - analysis_predictor_tester.cc - DEPS - paddle_inference_shared - ARGS - --dirname=${WORD2VEC_MODEL_DIR}) -elseif(WIN32) - cc_test_old( - test_analysis_predictor - SRCS - analysis_predictor_tester.cc - DEPS - analysis_predictor - benchmark - ${inference_deps} - ARGS - --dirname=${WORD2VEC_MODEL_DIR}) -endif() - -if(WITH_TESTING AND WITH_MKLDNN) - if(NOT APPLE AND NOT WIN32) - cc_test( - test_mkldnn_quantizer - SRCS mkldnn_quantizer_tester.cc - DEPS paddle_inference_shared ARGS --dirname=${WORD2VEC_MODEL_DIR}) - elseif(WIN32) - cc_test( - test_mkldnn_quantizer - SRCS mkldnn_quantizer_tester.cc - DEPS analysis_predictor benchmark ${inference_deps} ARGS - --dirname=${WORD2VEC_MODEL_DIR}) - endif() -endif() - -if(WITH_TESTING AND TEST test_api_impl) - if(NOT APPLE) - set_tests_properties(test_api_impl PROPERTIES TIMEOUT 120) - endif() -endif() diff --git a/paddle/fluid/inference/api/onnxruntime_predictor_tester.cc b/paddle/fluid/inference/api/onnxruntime_predictor_tester.cc index deb9f11486e..7b8e4b2ec9f 100644 --- a/paddle/fluid/inference/api/onnxruntime_predictor_tester.cc +++ b/paddle/fluid/inference/api/onnxruntime_predictor_tester.cc @@ -25,9 +25,9 @@ #include "paddle/fluid/inference/api/onnxruntime_predictor.h" #include "paddle/fluid/inference/api/paddle_api.h" #include "paddle/fluid/inference/api/paddle_inference_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "paddle/fluid/inference/utils/io_utils.h" #include "paddle/phi/backends/cpu/cpu_info.h" +#include "test/cpp/inference/api/tester_helper.h" DEFINE_string(dirname, "", "dirname to tests."); diff --git a/paddle/fluid/inference/tests/api/CMakeLists.txt b/paddle/fluid/inference/tests/api/CMakeLists.txt deleted file mode 100644 index cc8e6b1c6a1..00000000000 --- a/paddle/fluid/inference/tests/api/CMakeLists.txt +++ /dev/null @@ -1,1411 +0,0 @@ -# In Windows, c_api test link must link both 2 shared to avoid symbols redefinition, -# in Linux, c_api test cant do like this or graph_to_program register more than once. -# Both Windows and Linux can only use paddle_inference_c, but this will increase size -# of build folder by 30G. -if(WIN32) - set(INFERENCE_C_EXTRA_DEPS paddle_inference_shared paddle_inference_c_shared) -else() - set(INFERENCE_C_EXTRA_DEPS paddle_inference_shared paddle_inference_c) -endif() - -function(download_data install_dir data_file check_sum) - string(REGEX MATCH "[^/\\]+$" file_name ${data_file}) - if(NOT EXISTS ${install_dir}/${file_name}) - inference_download_and_uncompress(${install_dir} ${INFERENCE_URL} - ${data_file} ${check_sum}) - endif() -endfunction() - -function(download_data_without_verify install_dir data_file) - string(REGEX MATCH "[^/\\]+$" file_name ${data_file}) - if(NOT EXISTS ${install_dir}/${file_name}) - inference_download_and_uncompress_without_verify( - ${install_dir} ${INFERENCE_URL} ${data_file}) - endif() -endfunction() - -function(download_int8_data install_dir data_file check_sum) - if(NOT EXISTS ${install_dir}/${data_file}) - inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}/int8 - ${data_file} ${check_sum}) - endif() -endfunction() - -function(download_int8_data_without_verify install_dir data_file) - if(NOT EXISTS ${install_dir}/${data_file}) - inference_download_and_uncompress_without_verify( - ${install_dir} ${INFERENCE_URL}/int8 ${data_file}) - endif() -endfunction() - -function(download_bfloat16_data install_dir data_file check_sum) - if(NOT EXISTS ${install_dir}/${data_file}) - inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}/bfloat16 - ${data_file} ${check_sum}) - endif() -endfunction() - -function(download_bfloat16_data_without_verify install_dir data_file) - if(NOT EXISTS ${install_dir}/${data_file}) - inference_download_and_uncompress_without_verify( - ${install_dir} ${INFERENCE_URL}/bfloat16 ${data_file}) - endif() -endfunction() - -function(download_GRU_data install_dir data_file check_sum) - if(NOT EXISTS ${install_dir}/${data_file}) - inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}/gru - ${data_file} ${check_sum}) - endif() -endfunction() - -function(download_GRU_data_without_verify install_dir data_file) - if(NOT EXISTS ${install_dir}/${data_file}) - inference_download_and_uncompress_without_verify( - ${install_dir} ${INFERENCE_URL}/gru ${data_file}) - endif() -endfunction() - -function(download_quant_data install_dir data_file check_sum) - if(NOT EXISTS ${install_dir}/${data_file}) - inference_download_and_uncompress( - ${install_dir} ${INFERENCE_URL}/int8/QAT_models ${data_file} ${check_sum}) - endif() -endfunction() - -function(download_quant_data_without_verify install_dir data_file) - if(NOT EXISTS ${install_dir}/${data_file}) - inference_download_and_uncompress_without_verify( - ${install_dir} ${INFERENCE_URL}/int8/QAT_models ${data_file}) - endif() -endfunction() - -function(download_model_and_data install_dir model_name model_check_sum - data_name data_check_sum) - download_data(${install_dir} ${model_name} ${model_check_sum}) - download_data(${install_dir} ${data_name} ${data_check_sum}) -endfunction() - -function(download_model_and_data_without_verify install_dir model_name - data_name) - download_data_without_verify(${install_dir} ${model_name}) - download_data_without_verify(${install_dir} ${data_name}) -endfunction() - -function(download_result install_dir result_name check_sum) - download_data(${install_dir} ${result_name} ${check_sum}) -endfunction() - -function(download_result_without_verify install_dir result_name) - download_data_without_verify(${install_dir} ${result_name}) -endfunction() - -function(inference_analysis_api_test target install_dir filename) - inference_analysis_test( - ${target} - SRCS - ${filename} - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${install_dir}/model - --infer_data=${install_dir}/data.txt - --refer_result=${install_dir}/result.txt) -endfunction() - -function(inference_analysis_api_int8_test target install_dir filename) - inference_analysis_test( - ${target} - SRCS - ${filename} - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${install_dir}/model - --infer_data=${install_dir}/data.txt - --refer_result=${install_dir}/result.txt - --accuracy=0.8 - --batch_size=5 - --enable_int8_ptq=true) -endfunction() - -function(inference_multiple_models_analysis_api_test target install_dir - filename) - inference_analysis_test( - ${target} - SRCS - ${filename} - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${install_dir}/mobilenet_v2_models/1 - --infer_model2=${install_dir}/mobilenet_v2_models/xx - --infer_model3=${install_dir}/mobilenet_v2_models/3) -endfunction() - -function(inference_analysis_api_test_build TARGET_NAME filename) - inference_analysis_test_build(${TARGET_NAME} SRCS ${filename} EXTRA_DEPS - paddle_inference_shared) -endfunction() - -function(inference_analysis_api_int8_test_run TARGET_NAME test_binary model_dir - data_path) - inference_analysis_test_run( - ${TARGET_NAME} - COMMAND - ${test_binary} - ARGS - --infer_model=${model_dir}/model - --infer_data=${data_path} - --warmup_batch_size=${WARMUP_BATCH_SIZE} - --batch_size=50 - --enable_int8_ptq=true - --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} - --iterations=2) -endfunction() - -function(inference_analysis_api_int8_test_run_custom_warmup_batch_size - TARGET_NAME test_binary model_dir data_path warmup_batch_size) - set(WARMUP_BATCH_SIZE ${warmup_batch_size}) - inference_analysis_api_int8_test_run(${TARGET_NAME} ${test_binary} - ${model_dir} ${data_path}) -endfunction() - -function(inference_analysis_api_bfloat16_test_run TARGET_NAME test_binary - model_dir data_path) - inference_analysis_test_run( - ${TARGET_NAME} - COMMAND - ${test_binary} - ARGS - --infer_model=${model_dir}/model - --infer_data=${data_path} - --batch_size=50 - --enable_bf16=true - --paddle_num_threads=${CPU_NUM_THREADS_ON_CI} - --iterations=2) -endfunction() - -function(inference_analysis_api_object_dection_int8_test_run TARGET_NAME - test_binary model_dir data_path) - inference_analysis_test_run( - ${TARGET_NAME} - COMMAND - ${test_binary} - ARGS - --infer_model=${model_dir}/model - --infer_data=${data_path} - --warmup_batch_size=10 - --batch_size=300 - --enable_int8_ptq=true - --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} - --iterations=1) -endfunction() - -function(inference_analysis_api_test_with_fake_data_build TARGET_NAME filename) - inference_analysis_test_build(${TARGET_NAME} SRCS ${filename} EXTRA_DEPS - paddle_inference_shared) -endfunction() - -function(inference_analysis_api_test_with_fake_data_run TARGET_NAME test_binary - model_dir disable_fc) - inference_analysis_test_run( - ${TARGET_NAME} COMMAND ${test_binary} ARGS --infer_model=${model_dir}/model - --disable_mkldnn_fc=${disable_fc}) -endfunction() - -function( - inference_analysis_api_quant_test_run - TARGET_NAME - test_binary - fp32_model_dir - int8_model_dir - data_path - enable_int8_qat) - inference_analysis_test_run( - ${TARGET_NAME} - COMMAND - ${test_binary} - ARGS - --fp32_model=${fp32_model_dir} - --int8_model=${int8_model_dir} - --infer_data=${data_path} - --batch_size=50 - --enable_int8_qat=${enable_int8_qat} - --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} - --with_accuracy_layer=false - --iterations=2) -endfunction() - -function(inference_analysis_api_lexical_test_run TARGET_NAME test_binary - infer_model data_path) - inference_analysis_test_run( - ${TARGET_NAME} - COMMAND - ${test_binary} - ARGS - --infer_model=${infer_model} - --infer_data=${data_path} - --batch_size=50 - --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} - --with_accuracy_layer=true - --use_analysis=true - --iterations=2) -endfunction() - -function(inference_analysis_api_lexical_bfloat16_test_run TARGET_NAME - test_binary infer_model data_path) - inference_analysis_test_run( - ${TARGET_NAME} - COMMAND - ${test_binary} - ARGS - --infer_model=${infer_model} - --infer_data=${data_path} - --batch_size=50 - --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} - --with_accuracy_layer=true - --use_analysis=true - --enable_bf16=true - --iterations=2) -endfunction() - -function( - inference_analysis_api_lexical_int8_test_run - TARGET_NAME - test_binary - infer_model - data_path - enable_int8_ptq - enable_int8_qat - fuse_multi_gru) - inference_analysis_test_run( - ${TARGET_NAME} - COMMAND - ${test_binary} - ARGS - --infer_model=${infer_model} - --infer_data=${data_path} - --batch_size=100 - --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} - --with_accuracy_layer=true - --use_analysis=true - --enable_int8_ptq=${enable_int8_ptq} - --enable_int8_qat=${enable_int8_qat} - --quantized_accuracy=0.015 - --fuse_multi_gru=${fuse_multi_gru} - --iterations=4) -endfunction() - -function(preprocess_data2bin_test_run target py_script_source data_dir - output_file) - py_test(${target} - SRCS ${CMAKE_CURRENT_SOURCE_DIR}/${py_script_source} ARGS - --data_dir=${data_dir} --output_file=${output_file} --local) -endfunction() - -if(NOT APPLE AND WITH_MKLML) - # RNN1 - set(RNN1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/rnn1") - download_model_and_data_without_verify( - ${RNN1_INSTALL_DIR} "rnn1/model.tar.gz" "rnn1/data.txt.tar.gz") - inference_analysis_api_test(test_analyzer_rnn1 ${RNN1_INSTALL_DIR} - analyzer_rnn1_tester.cc) - - # seq_pool1 - set(SEQ_POOL1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/seq_pool") - download_model_and_data_without_verify( - ${SEQ_POOL1_INSTALL_DIR} "seq_pool1_model_.tar.gz" - "seq_pool1_data.txt.tar.gz") - inference_analysis_api_test( - test_analyzer_seq_pool1_compare_determine ${SEQ_POOL1_INSTALL_DIR} - analyzer_seq_pool1_compare_determine_tester.cc) - inference_analysis_api_test(test_analyzer_seq_pool1 ${SEQ_POOL1_INSTALL_DIR} - analyzer_seq_pool1_compare_tester.cc) - inference_analysis_api_test( - test_analyzer_seq_pool1_fuse_compare_zero_copy ${SEQ_POOL1_INSTALL_DIR} - analyzer_seq_pool1_fuse_compare_zero_copy_tester.cc) - inference_analysis_api_test( - test_analyzer_seq_pool1_fuse_statis ${SEQ_POOL1_INSTALL_DIR} - analyzer_seq_pool1_fuse_statis_tester.cc) - inference_analysis_api_test( - test_analyzer_seq_pool1_profile ${SEQ_POOL1_INSTALL_DIR} - analyzer_seq_pool1_profile_tester.cc) - if(NOT WIN32) - set_tests_properties(test_analyzer_seq_pool1_compare_determine - PROPERTIES TIMEOUT 120) - set_tests_properties(test_analyzer_seq_pool1 PROPERTIES TIMEOUT 120) - set_tests_properties(test_analyzer_seq_pool1_fuse_compare_zero_copy - PROPERTIES TIMEOUT 120) - set_tests_properties(test_analyzer_seq_pool1_fuse_statis PROPERTIES TIMEOUT - 120) - set_tests_properties(test_analyzer_seq_pool1_profile PROPERTIES TIMEOUT 120) - endif() -else() - # TODO: fix this test on MACOS and OPENBLAS, the reason is that - # fusion_seqexpand_concat_fc_op is not supported on MACOS and OPENBLAS - message( - WARNING - "These tests has been disabled in OSX or WITH_MKL=OFF before being fixed: \n test_analyzer_rnn1" - ) - message( - WARNING - "These tests has been disabled in OSX or WITH_MKL=OFF before being fixed: \n test_analyzer_seq_pool1" - ) -endif() - -# RNN2 -set(RNN2_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/rnn2") -download_model_and_data_without_verify(${RNN2_INSTALL_DIR} "rnn2_model.tar.gz" - "rnn2_data.txt.tar.gz") -inference_analysis_api_test(test_analyzer_rnn2 ${RNN2_INSTALL_DIR} - analyzer_rnn2_tester.cc) - -# TODO(luotao, Superjom) Disable DAM test, temporarily fix -# https://github.com/PaddlePaddle/Paddle/issues/15032#issuecomment-455990914. -# After inference framework refactor, will reopen it. -# normal DAM -set(DAM_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/dam") -download_model_and_data_without_verify(${DAM_INSTALL_DIR} "DAM_model.tar.gz" - "DAM_data.txt.tar.gz") -#inference_analysis_api_test(test_analyzer_dam ${DAM_INSTALL_DIR} analyzer_dam_tester.cc EXTRA_DEPS legacy_allocator) - -# small DAM -set(DAM_SMALL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/small_dam") -download_model_and_data_without_verify( - ${DAM_SMALL_INSTALL_DIR} "dam_small_model.tar.gz" "dam_small_data.txt.tar.gz") -inference_analysis_test( - test_analyzer_small_dam - SRCS - analyzer_dam_tester.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${DAM_SMALL_INSTALL_DIR}/model - --infer_data=${DAM_SMALL_INSTALL_DIR}/data.txt) - -#save model -inference_analysis_api_test(test_analyzer_save_model ${DAM_SMALL_INSTALL_DIR} - analyzer_save_model_tester.cc) - -# chinese_ner -set(CHINESE_NER_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/chinese_ner") -download_model_and_data_without_verify( - ${CHINESE_NER_INSTALL_DIR} "chinese_ner_model.tar.gz" - "chinese_ner-data.txt.tar.gz") -inference_analysis_api_test(test_analyzer_ner ${CHINESE_NER_INSTALL_DIR} - analyzer_ner_tester.cc) - -# lac -set(LAC_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/lac") -download_model_and_data( - ${LAC_INSTALL_DIR} "lac_model.tar.gz" 419ca6eb85f57a01bfe173591910aec5 - "lac_data.txt.tar.gz" 9983539cd6b34fbdc411e43422776bfd) -inference_analysis_api_test(test_analyzer_lac ${LAC_INSTALL_DIR} - analyzer_lac_tester.cc) - -# Pyramid DNN -set(PYRAMID_DNN_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/pyramid_dnn") -download_model_and_data_without_verify( - ${PYRAMID_DNN_INSTALL_DIR} "PyramidDNN_model.tar.gz" - "PyramidDNN_data.txt.tar.gz") -inference_analysis_api_test( - test_analyzer_pyramid_dnn ${PYRAMID_DNN_INSTALL_DIR} - analyzer_pyramid_dnn_tester.cc) - -# Ernie -set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie") -download_model_and_data( - ${ERNIE_INSTALL_DIR} "Ernie_model.tar.gz" aa59192dd41ed377f9f168e3a1309fa6 - "Ernie_data.txt.tar.gz" 5396e63548edad7ca561e7e26a9476d1) -download_result(${ERNIE_INSTALL_DIR} "Ernie_result.txt.tar.gz" - 73beea65abda2edb61c1662cd3180c62) -if(WITH_GPU) - inference_analysis_api_test(test_analyzer_ernie ${ERNIE_INSTALL_DIR} - analyzer_ernie_tester.cc) - inference_analysis_api_test(gpu_ernie_half_test ${ERNIE_INSTALL_DIR} - gpu_ernie_half_test.cc) - set_tests_properties(gpu_ernie_half_test PROPERTIES TIMEOUT 60) -endif() -inference_analysis_api_int8_test(test_analyzer_ernie_int8 ${ERNIE_INSTALL_DIR} - analyzer_ernie_int8_tester.cc) - -# Ernie large -set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie_Large") -download_model_and_data( - ${ERNIE_INSTALL_DIR} "Ernie_large_model.tar.gz" - af7715245ed32cc77374625d4c80f7ef "Ernie_large_data.txt.tar.gz" - edb2113eec93783cad56ed76d47ba57f) -download_result(${ERNIE_INSTALL_DIR} "Ernie_large_result.txt.tar.gz" - 1facda98eef1085dc9d435ebf3f23a73) -inference_analysis_test( - test_analyzer_ernie_large - SRCS - analyzer_ernie_tester.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${ERNIE_INSTALL_DIR}/model - --infer_data=${ERNIE_INSTALL_DIR}/data.txt - --refer_result=${ERNIE_INSTALL_DIR}/result.txt - --ernie_large=true) -if(NOT WIN32 - AND NOT APPLE - AND TEST test_analyzer_ernie_large) - set_tests_properties(test_analyzer_ernie_large PROPERTIES TIMEOUT 150 LABELS - "RUN_TYPE=NIGHTLY") -endif() -if(WIN32 AND TEST test_analyzer_ernie_large) - set_tests_properties(test_analyzer_ernie_large PROPERTIES TIMEOUT 200) -endif() - -# text_classification -set(TEXT_CLASSIFICATION_INSTALL_DIR - "${INFERENCE_DEMO_INSTALL_DIR}/text_classification") -download_model_and_data( - ${TEXT_CLASSIFICATION_INSTALL_DIR} "text-classification-Senta.tar.gz" - 3f0f440313ca50e26184e65ffd5809ab "text_classification_data.txt.tar.gz" - 36ae620020cc3377f45ed330dd36238f) -inference_analysis_api_test( - test_analyzer_text_classification ${TEXT_CLASSIFICATION_INSTALL_DIR} - analyzer_text_classification_tester.cc) - -# seq_conv1 -set(SEQ_CONV1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/seq_conv1") -download_model_and_data_without_verify( - ${SEQ_CONV1_INSTALL_DIR} "seq_conv1_model.tar.gz" "seq_conv1_data.txt.tar.gz") -inference_analysis_api_test(test_analyzer_seq_conv1 ${SEQ_CONV1_INSTALL_DIR} - analyzer_seq_conv1_tester.cc) - -# transformer, the dataset only works on batch_size=8 now -set(TRANSFORMER_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/transformer") -download_model_and_data_without_verify( - ${TRANSFORMER_INSTALL_DIR} "temp/transformer_model.tar.gz" - "temp/transformer_data.txt.tar.gz") -inference_analysis_test( - test_analyzer_transformer - SRCS - analyzer_transformer_compare_tester.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TRANSFORMER_INSTALL_DIR}/model - --infer_data=${TRANSFORMER_INSTALL_DIR}/data.txt - --batch_size=8 - --cpu_num_threads=${CPU_NUM_THREADS_ON_CI}) -inference_analysis_test( - test_analyzer_transformer_fuse - SRCS - analyzer_transformer_fuse_tester.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TRANSFORMER_INSTALL_DIR}/model - --infer_data=${TRANSFORMER_INSTALL_DIR}/data.txt - --batch_size=8 - --cpu_num_threads=${CPU_NUM_THREADS_ON_CI}) -inference_analysis_test( - test_analyzer_transformer_profile - SRCS - analyzer_transformer_profile_tester.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TRANSFORMER_INSTALL_DIR}/model - --infer_data=${TRANSFORMER_INSTALL_DIR}/data.txt - --batch_size=8 - --cpu_num_threads=${CPU_NUM_THREADS_ON_CI}) - -# VIT-OCR -set(VIT_OCR_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/vit") -if(NOT EXISTS ${VIT_OCR_INSTALL_DIR}/vit_ocr.tgz) - inference_download_and_uncompress_without_verify( - ${VIT_OCR_INSTALL_DIR} ${INFERENCE_URL} "ocr/vit_ocr.tgz") -endif() -inference_analysis_test( - test_analyzer_vit_ocr - SRCS - analyzer_vit_ocr_tester.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${VIT_OCR_INSTALL_DIR}/vit_ocr/model - --infer_data=${VIT_OCR_INSTALL_DIR}/vit_ocr/datavit.txt) - -# ocr -set(OCR_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/ocr") -if(NOT EXISTS ${OCR_INSTALL_DIR}/ocr.tar.gz) - inference_download_and_uncompress_without_verify( - ${OCR_INSTALL_DIR} "http://paddlemodels.bj.bcebos.com/" - "inference-vis-demos/ocr.tar.gz") -endif() -inference_analysis_api_test(test_analyzer_ocr ${OCR_INSTALL_DIR} - analyzer_vis_tester.cc) - -# densebox -set(DENSEBOX_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/densebox") -download_data_without_verify(${DENSEBOX_INSTALL_DIR} "densebox.tar.gz") -inference_analysis_test( - test_analyzer_detect_functional_mkldnn - SRCS - analyzer_detect_functional_mkldnn_tester.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${DENSEBOX_INSTALL_DIR}/model - --infer_data=${DENSEBOX_INSTALL_DIR}/detect_input_50.txt - --infer_shape=${DENSEBOX_INSTALL_DIR}/shape_50.txt) - -# mobilenet with transpose op -set(MOBILENET_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet") -if(NOT EXISTS ${MOBILENET_INSTALL_DIR}/mobilenet.tar.gz) - inference_download_and_uncompress_without_verify( - ${MOBILENET_INSTALL_DIR} "http://paddlemodels.bj.bcebos.com/" - "inference-vis-demos/mobilenet.tar.gz") -endif() -inference_analysis_api_test(test_analyzer_mobilenet_transpose - ${MOBILENET_INSTALL_DIR} analyzer_vis_tester.cc) - -### Image classification tests with fake data -set(IMG_CLASS_TEST_APP "test_analyzer_image_classification") -set(IMG_CLASS_TEST_APP_SRC "analyzer_image_classification_tester.cc") - -# build test binary to be used in subsequent tests -inference_analysis_api_test_with_fake_data_build(${IMG_CLASS_TEST_APP} - ${IMG_CLASS_TEST_APP_SRC}) - -# googlenet -set(GOOGLENET_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/googlenet") -download_data_without_verify(${GOOGLENET_MODEL_DIR} "googlenet.tar.gz") -inference_analysis_api_test_with_fake_data_run( - test_analyzer_googlenet ${IMG_CLASS_TEST_APP} ${GOOGLENET_MODEL_DIR} false) - -# resnet50 -set(RESNET50_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/resnet50") -download_data_without_verify(${RESNET50_MODEL_DIR} "resnet50_model.tar.gz") -inference_analysis_api_test_with_fake_data_run( - test_analyzer_resnet50 ${IMG_CLASS_TEST_APP} ${RESNET50_MODEL_DIR} true) -if(WIN32) - set_tests_properties(test_analyzer_resnet50 PROPERTIES TIMEOUT 200) -endif() - -# mobilenet with depthwise_conv op -set(MOBILENET_MODEL_DIR - "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet_depthwise_conv") -download_data_without_verify(${MOBILENET_MODEL_DIR} "mobilenet_model.tar.gz") -inference_analysis_api_test_with_fake_data_run( - test_analyzer_mobilenet_depthwise_conv ${IMG_CLASS_TEST_APP} - ${MOBILENET_MODEL_DIR} false) - -if(WITH_MKLDNN) - - ### INT8 tests - - set(INT8_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/int8v2") - - ## Image classification models - - # ImageNet small dataset - # It may be already downloaded for Quant & INT8 unit tests - set(IMAGENET_DATA_ARCHIVE "imagenet_val_100_tail.tar.gz") - set(IMAGENET_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/imagenet") - set(IMAGENET_DATA_PATH "${IMAGENET_DATA_DIR}/data.bin") - download_int8_data_without_verify(${IMAGENET_DATA_DIR} - ${IMAGENET_DATA_ARCHIVE}) - - # build test binary to be used in subsequent tests - set(INT8_IMG_CLASS_TEST_APP "test_analyzer_int8_image_classification") - set(INT8_IMG_CLASS_TEST_APP_SRC - "analyzer_int8_image_classification_tester.cc") - inference_analysis_api_test_build(${INT8_IMG_CLASS_TEST_APP} - ${INT8_IMG_CLASS_TEST_APP_SRC}) - - # resnet50 int8 - set(INT8_RESNET50_MODEL_DIR "${INT8_DATA_DIR}/resnet50") - download_int8_data_without_verify(${INT8_RESNET50_MODEL_DIR} - "resnet50_int8_model.tar.gz") - inference_analysis_api_int8_test_run( - test_analyzer_int8_resnet50 ${INT8_IMG_CLASS_TEST_APP} - ${INT8_RESNET50_MODEL_DIR} ${IMAGENET_DATA_PATH}) - - # mobilenetv1 int8 - set(INT8_MOBILENETV1_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv1") - download_int8_data_without_verify(${INT8_MOBILENETV1_MODEL_DIR} - "mobilenetv1_int8_model.tar.gz") - inference_analysis_api_int8_test_run( - test_analyzer_int8_mobilenetv1 ${INT8_IMG_CLASS_TEST_APP} - ${INT8_MOBILENETV1_MODEL_DIR} ${IMAGENET_DATA_PATH}) - - # mobilenetv2 int8 - set(INT8_MOBILENETV2_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv2") - download_int8_data_without_verify(${INT8_MOBILENETV2_MODEL_DIR} - "mobilenet_v2_int8_model.tar.gz") - inference_analysis_api_int8_test_run( - test_analyzer_int8_mobilenetv2 ${INT8_IMG_CLASS_TEST_APP} - ${INT8_MOBILENETV2_MODEL_DIR} ${IMAGENET_DATA_PATH}) - - # resnet101 int8 - set(INT8_RESNET101_MODEL_DIR "${INT8_DATA_DIR}/resnet101") - download_int8_data_without_verify(${INT8_RESNET101_MODEL_DIR} - "Res101_int8_model.tar.gz") - # inference_analysis_api_int8_test_run(test_analyzer_int8_resnet101 ${INT8_IMG_CLASS_TEST_APP} ${INT8_RESNET101_MODEL_DIR} ${IMAGENET_DATA_PATH}) - - # vgg16 int8 - set(INT8_VGG16_MODEL_DIR "${INT8_DATA_DIR}/vgg16") - download_int8_data_without_verify(${INT8_VGG16_MODEL_DIR} - "VGG16_int8_model.tar.gz") - # inference_analysis_api_int8_test_run(test_analyzer_int8_vgg16 ${INT8_IMG_CLASS_TEST_APP} ${INT8_VGG16_MODEL_DIR} ${IMAGENET_DATA_PATH}) - - # vgg19 int8 - set(INT8_VGG19_MODEL_DIR "${INT8_DATA_DIR}/vgg19") - download_int8_data_without_verify(${INT8_VGG19_MODEL_DIR} - "VGG19_int8_model.tar.gz") - # inference_analysis_api_int8_test_run(test_analyzer_int8_vgg19 ${INT8_IMG_CLASS_TEST_APP} ${INT8_VGG19_MODEL_DIR} ${IMAGENET_DATA_PATH}) - - # googlenet int8 - set(INT8_GOOGLENET_MODEL_DIR "${INT8_DATA_DIR}/googlenet") - download_int8_data_without_verify(${INT8_GOOGLENET_MODEL_DIR} - "GoogleNet_int8_model.tar.gz") - inference_analysis_api_int8_test_run_custom_warmup_batch_size( - test_analyzer_int8_googlenet ${INT8_IMG_CLASS_TEST_APP} - ${INT8_GOOGLENET_MODEL_DIR} ${IMAGENET_DATA_PATH} 10) - - # mobilenetv3_large_x1_0 int8 - set(INT8_MOBILENETV3_LARGE_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv3_large") - set(INT8_MOBILENETV3_FILE_NAME "MobileNetV3_large_x1_0_infer.tar") - if(NOT EXISTS - ${INT8_MOBILENETV3_LARGE_MODEL_DIR}/${INT8_MOBILENETV3_FILE_NAME}) - inference_download_and_uncompress_without_verify( - ${INT8_MOBILENETV3_LARGE_MODEL_DIR} - "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/" - ${INT8_MOBILENETV3_FILE_NAME}) - endif() - inference_analysis_test_run( - test_analyzer_int8_mobilenetv3_large - COMMAND - ${INT8_IMG_CLASS_TEST_APP} - ARGS - --infer_model=${INT8_MOBILENETV3_LARGE_MODEL_DIR}/MobileNetV3_large_x1_0_infer - --infer_data=${IMAGENET_DATA_PATH} - --warmup_batch_size=50 - --batch_size=1 - --enable_int8_ptq=true - --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} - --iterations=100 - --with_accuracy_layer=false) - - ### BFLOAT16 tests - - # build test binary to be used in subsequent tests - set(BF16_IMG_CLASS_TEST_APP "test_analyzer_bfloat16_image_classification") - set(BF16_IMG_CLASS_TEST_APP_SRC - "analyzer_bfloat16_image_classification_tester.cc") - inference_analysis_api_test_build(${BF16_IMG_CLASS_TEST_APP} - ${BF16_IMG_CLASS_TEST_APP_SRC}) - - # resnet50 bfloat16 - inference_analysis_api_bfloat16_test_run( - test_analyzer_bfloat16_resnet50 ${BF16_IMG_CLASS_TEST_APP} - ${INT8_RESNET50_MODEL_DIR} ${IMAGENET_DATA_PATH}) - - # googlenet bfloat16 - inference_analysis_api_bfloat16_test_run( - test_analyzer_bfloat16_googlenet ${BF16_IMG_CLASS_TEST_APP} - ${INT8_GOOGLENET_MODEL_DIR} ${IMAGENET_DATA_PATH}) - - # mobilenetv1 bfloat16 - inference_analysis_api_bfloat16_test_run( - test_analyzer_bfloat16_mobilenetv1 ${BF16_IMG_CLASS_TEST_APP} - ${INT8_MOBILENETV1_MODEL_DIR} ${IMAGENET_DATA_PATH}) - - # mobilenetv2 bfloat16 - inference_analysis_api_bfloat16_test_run( - test_analyzer_bfloat16_mobilenetv2 ${BF16_IMG_CLASS_TEST_APP} - ${INT8_MOBILENETV2_MODEL_DIR} ${IMAGENET_DATA_PATH}) - - # mobilenetv3_large - inference_analysis_test_run( - test_analyzer_bfloat16_mobilenetv3_large - COMMAND - ${BF16_IMG_CLASS_TEST_APP} - ARGS - --infer_model=${INT8_MOBILENETV3_LARGE_MODEL_DIR}/MobileNetV3_large_x1_0_infer - --infer_data=${IMAGENET_DATA_PATH} - --batch_size=1 - --enable_bf16=true - --paddle_num_threads=${CPU_NUM_THREADS_ON_CI} - --iterations=100 - --with_accuracy_layer=false) - - ### Object detection models - set(PASCALVOC_DATA_PATH "${INT8_DATA_DIR}/pascalvoc_val_head_300.bin") - set(INT8_OBJ_DETECT_TEST_APP "test_analyzer_int8_object_detection") - set(INT8_OBJ_DETECT_TEST_APP_SRC "analyzer_int8_object_detection_tester.cc") - - # download dataset if necessary - download_int8_data_without_verify(${INT8_DATA_DIR} - "pascalvoc_val_head_300.tar.gz") - - # build test binary to be used in subsequent tests - inference_analysis_api_test_build(${INT8_OBJ_DETECT_TEST_APP} - ${INT8_OBJ_DETECT_TEST_APP_SRC}) - - # mobilenet-ssd int8 - set(INT8_MOBILENET_SSD_MODEL_DIR "${INT8_DATA_DIR}/mobilenet-ssd") - download_int8_data_without_verify(${INT8_MOBILENET_SSD_MODEL_DIR} - "mobilenet_ssd_int8_model.tar.gz") - inference_analysis_api_object_dection_int8_test_run( - test_analyzer_int8_mobilenet_ssd ${INT8_OBJ_DETECT_TEST_APP} - ${INT8_MOBILENET_SSD_MODEL_DIR} ${PASCALVOC_DATA_PATH}) - - ### Lexcial analysis GRU model - set(GRU_PATH "${INFERENCE_DEMO_INSTALL_DIR}/gru") - download_gru_data_without_verify("${GRU_PATH}" "GRU_eval_data.tar.gz") - download_gru_data_without_verify("${GRU_PATH}" "GRU_eval_model_v2.tar.gz") - set(GRU_DATA_PATH "${GRU_PATH}/GRU_eval_data.bin") - set(GRU_MODEL_PATH "${GRU_PATH}/GRU_eval_model_v2") - set(LEXICAL_TEST_APP "test_analyzer_lexical_analysis") - set(LEXICAL_TEST_APP_SRC "analyzer_lexical_analysis_gru_tester.cc") - - # build test binary to be used in subsequent tests - inference_analysis_api_test_build(${LEXICAL_TEST_APP} ${LEXICAL_TEST_APP_SRC}) - # run lexcial analysis test - inference_analysis_api_lexical_test_run( - test_analyzer_lexical_gru ${LEXICAL_TEST_APP} ${GRU_MODEL_PATH} - ${GRU_DATA_PATH}) - # run bfloat16 lexical analysis test - inference_analysis_api_lexical_bfloat16_test_run( - test_analyzer_lexical_gru_bfloat16 ${LEXICAL_TEST_APP} ${GRU_MODEL_PATH} - ${GRU_DATA_PATH}) - # run post-training quantization lexical analysis test - inference_analysis_api_lexical_int8_test_run( - test_analyzer_lexical_gru_int8 - ${LEXICAL_TEST_APP} - ${GRU_MODEL_PATH} - ${GRU_DATA_PATH} - true # enable_int8_ptq - false # enable_int8_qat - false) # fuse_multi_gru - # run post-training quantization lexical analysis test with multi_gru fuse - inference_analysis_api_lexical_int8_test_run( - test_analyzer_lexical_gru_int8_multi_gru - ${LEXICAL_TEST_APP} - ${GRU_MODEL_PATH} - ${GRU_DATA_PATH} - true # enable_int8_ptq - false # enable_int8_qat - true) # fuse_multi_gru - - # run qat gru test - set(QAT_GRU_MODEL_ARCHIVE "GRU_quant_acc.tar.gz") - set(QAT_GRU_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/quant/GRU_quant2") - download_quant_data(${QAT_GRU_MODEL_DIR} ${QAT_GRU_MODEL_ARCHIVE} - cf207f8076dcfb8b74d8b6bdddf9090c) - - inference_analysis_api_lexical_int8_test_run( - test_analyzer_lexical_gru_qat_int8 - ${LEXICAL_TEST_APP} - "${QAT_GRU_MODEL_DIR}/GRU_quant_acc" - ${GRU_DATA_PATH} - false # enable_int8_ptq - true # enable_int8_qat - false) # fuse_multi_gru - - ### optimized FP32 vs. Quant INT8 tests - - set(QUANT_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/quant") - set(QUANT_IMG_CLASS_TEST_APP "test_analyzer_quant_image_classification") - set(QUANT_IMG_CLASS_TEST_APP_SRC - "analyzer_quant_image_classification_tester.cc") - - # build test binary to be used in subsequent tests - inference_analysis_api_test_build(${QUANT_IMG_CLASS_TEST_APP} - ${QUANT_IMG_CLASS_TEST_APP_SRC}) - - # MobileNetV1 FP32 vs. Quant INT8 - # The FP32 model should already be downloaded for slim Quant unit tests on Linux - set(QUANT2_MobileNetV1_MODEL_DIR "${QUANT_DATA_DIR}/MobileNetV1_quant2") - set(QUANT2_INT8_MobileNetV1_MODEL_DIR - "${QUANT_DATA_DIR}/MobileNetV1_quant2_int8") - if(NOT LINUX) - download_quant_data_without_verify(${QUANT2_MobileNetV1_MODEL_DIR} - "MobileNet_qat_perf.tar.gz") - endif() - download_quant_data_without_verify(${QUANT2_INT8_MobileNetV1_MODEL_DIR} - "MobileNet_qat_perf_int8.tar.gz") - inference_analysis_api_quant_test_run( - test_analyzer_quant_performance_benchmark - ${QUANT_IMG_CLASS_TEST_APP} - ${QUANT2_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf/float - ${QUANT2_INT8_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf_int8 - ${IMAGENET_DATA_PATH} - false) - - # Quant2 MobileNetV1 - inference_analysis_api_quant_test_run( - test_analyzer_quant2_mobilenetv1_mkldnn - ${QUANT_IMG_CLASS_TEST_APP} - ${QUANT2_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf/float - ${QUANT2_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf/float - ${IMAGENET_DATA_PATH} - true) - - # Quant2 ResNet50 with input/output scales in `fake_quantize_range_abs_max` operators and the `out_threshold` attributes, - # with weight scales in `fake_channel_wise_dequantize_max_abs` operators - set(QUANT2_RESNET50_CHANNELWISE_MODEL_DIR - "${QUANT_DATA_DIR}/ResNet50_quant2_channelwise") - set(QUANT2_RESNET50_CHANNELWISE_MODEL_ARCHIVE - "ResNet50_qat_channelwise.tar.gz") - if(NOT LINUX) - download_quant_data_without_verify( - ${QUANT2_RESNET50_CHANNELWISE_MODEL_DIR} - ${QUANT2_RESNET50_CHANNELWISE_MODEL_ARCHIVE}) - endif() - set(QUANT2_RESNET50_MODEL - ${QUANT2_RESNET50_CHANNELWISE_MODEL_DIR}/ResNet50_qat_channelwise) - inference_analysis_api_quant_test_run( - test_analyzer_quant2_resnet50_channelwise_mkldnn - ${QUANT_IMG_CLASS_TEST_APP} ${QUANT2_RESNET50_MODEL} - ${QUANT2_RESNET50_MODEL} ${IMAGENET_DATA_PATH} true) - - ### Other tests - - # MKLDNN quantizer config - set(MKLDNN_QUANTIZER_CONFIG_TEST_APP "test_mkldnn_quantizer_config") - set(MKLDNN_QUANTIZER_CONFIG_TEST_APP_SRC "mkldnn_quantizer_config_tester.cc") - inference_analysis_api_test_build(${MKLDNN_QUANTIZER_CONFIG_TEST_APP} - ${MKLDNN_QUANTIZER_CONFIG_TEST_APP_SRC}) - inference_analysis_test_run(test_mkldnn_quantizer_config COMMAND - ${MKLDNN_QUANTIZER_CONFIG_TEST_APP}) - - # preprocess data2bin imagenet - download_int8_data_without_verify(${INT8_DATA_DIR} "imagenet_small.tar.gz") - set(IMAGENET_SMALL_DATA_DIR "${INT8_DATA_DIR}/imagenet_small") - set(IMAGENET_SMALL_OUTPUT_FILE "imagenet_small.bin") - preprocess_data2bin_test_run( - preprocess_local_imagenet "full_ILSVRC2012_val_preprocess.py" - ${IMAGENET_SMALL_DATA_DIR} ${IMAGENET_SMALL_OUTPUT_FILE}) - - # preprocess data2bin pascalvoc - download_int8_data_without_verify(${INT8_DATA_DIR} "pascalvoc_small.tar.gz") - set(PASCALVOC_SMALL_DATA_DIR "${INT8_DATA_DIR}/pascalvoc_small") - set(PASCALVOC_SMALL_OUTPUT_FILE "pascalvoc_small.bin") - preprocess_data2bin_test_run( - preprocess_local_pascalvoc "full_pascalvoc_test_preprocess.py" - ${PASCALVOC_SMALL_DATA_DIR} ${PASCALVOC_SMALL_OUTPUT_FILE}) - -endif() - -# bert, max_len=20, embedding_dim=128 -set(BERT_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/bert_emb128") -download_model_and_data_without_verify( - ${BERT_INSTALL_DIR} "bert_emb128_model.tar.gz" "bert_data_len20.txt.tar.gz") -inference_analysis_api_test(test_analyzer_bert ${BERT_INSTALL_DIR} - analyzer_bert_tester.cc) - -# multiple models prediction -set(MMP_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/multi_model_prediction") -download_data_without_verify(${MMP_INSTALL_DIR} - PaddleInference/mobilenet_v2_models.tar.gz) -inference_multiple_models_analysis_api_test( - test_analyzer_multi_model_prediction ${MMP_INSTALL_DIR} - analyzer_mmp_tester.cc) - -if(WITH_GPU AND TENSORRT_FOUND) - set(TRT_MODEL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/trt_models") - if(NOT EXISTS ${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models.tar.gz) - inference_download_and_uncompress( - ${TRT_MODEL_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test - "trt_inference_test_models.tar.gz" 3dcccdc38b549b6b1b4089723757bd98) - endif() - set(TEST_SPLIT_CONVERTER_MODEL - "${TRT_MODEL_INSTALL_DIR}/trt_split_op_converter_test") - if(NOT EXISTS ${TEST_SPLIT_CONVERTER_MODEL}/split_converter.tgz) - inference_download_and_uncompress_without_verify( - ${TEST_SPLIT_CONVERTER_MODEL} ${INFERENCE_URL}/tensorrt_test - "split_converter.tgz") - endif() - inference_analysis_test( - trt_mobilenet_test - SRCS - trt_mobilenet_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) - inference_analysis_test( - trt_resnet50_test - SRCS - trt_resnet50_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) - inference_analysis_test( - trt_resnext_test - SRCS - trt_resnext_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) - inference_analysis_test( - trt_fc_prelu_test - SRCS - trt_fc_prelu_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) - inference_analysis_test( - trt_cascade_rcnn_test - SRCS - trt_cascade_rcnn_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) - inference_analysis_test( - trt_split_converter_test - SRCS - trt_split_converter_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TEST_SPLIT_CONVERTER_MODEL}/) - inference_analysis_test( - test_analyzer_capi_exp_gpu - SRCS - analyzer_capi_exp_gpu_tester.cc - EXTRA_DEPS - ${INFERENCE_C_EXTRA_DEPS} - ARGS - --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) - if(WIN32) - target_link_libraries(test_analyzer_capi_exp_gpu paddle_inference_c_shared) - else() - target_link_libraries(test_analyzer_capi_exp_gpu paddle_inference_c) - endif() - inference_analysis_test( - test_analyzer_capi_exp_xpu - SRCS - analyzer_capi_exp_xpu_tester.cc - EXTRA_DEPS - ${INFERENCE_C_EXTRA_DEPS} - ARGS - --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) - if(WIN32) - target_link_libraries(test_analyzer_capi_exp_xpu paddle_inference_c_shared) - else() - target_link_libraries(test_analyzer_capi_exp_xpu paddle_inference_c) - endif() - - set(TRT_MODEL_QUANT_RESNET_DIR - "${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model") - if(NOT EXISTS ${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model.tgz) - inference_download_and_uncompress_without_verify( - ${INFERENCE_DEMO_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test - "small_quant_model.tgz") - endif() - inference_analysis_test( - trt_quant_int8_test - SRCS - trt_quant_int8_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TRT_MODEL_QUANT_RESNET_DIR}) - - set(TRT_MODEL_QUANT_YOLOV3_DIR - "${INFERENCE_DEMO_INSTALL_DIR}/yolov3_r50_quant_aware") - if(NOT EXISTS ${INFERENCE_DEMO_INSTALL_DIR}/yolov3_r50_quant_aware.tgz) - inference_download_and_uncompress_without_verify( - ${INFERENCE_DEMO_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test - "yolov3_r50_quant_aware.tgz") - endif() - inference_analysis_test( - trt_quant_int8_yolov3_r50_test - SRCS - trt_quant_int8_yolov3_r50_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TRT_MODEL_QUANT_YOLOV3_DIR}) - - set(TEST_TRT_DYNAMIC_MODEL2 "${TRT_MODEL_INSTALL_DIR}/complex_model_dynamic") - if(NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL2}/complex_model_dynamic2.tar.gz) - inference_download_and_uncompress_without_verify( - ${TEST_TRT_DYNAMIC_MODEL2} ${INFERENCE_URL}/tensorrt_test - "complex_model_dynamic2.tar.gz") - endif() - - set(TEST_TRT_DYNAMIC_MODEL - "${TRT_MODEL_INSTALL_DIR}/conv_bn_swish_split_gelu") - if(NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL}/conv_bn_swish_split_gelu.tar.gz) - inference_download_and_uncompress( - ${TEST_TRT_DYNAMIC_MODEL} ${INFERENCE_URL}/tensorrt_test - "conv_bn_swish_split_gelu.tar.gz" 2a5e8791e47b221b4f782151d76da9c6) - endif() - inference_analysis_test( - trt_dynamic_shape_test - SRCS - trt_dynamic_shape_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TRT_MODEL_INSTALL_DIR}) - - set(TEST_TRT_ERNIE_MODEL "${TRT_MODEL_INSTALL_DIR}/ernie_test") - if(NOT EXISTS ${TEST_TRT_ERNIE_MODEL}/ernie_model_4.tar.gz) - inference_download_and_uncompress( - ${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test - "ernie_model_4.tar.gz" 5fa371efa75706becbaad79195d2ca68) - endif() - - inference_analysis_test( - test_trt_dynamic_shape_ernie - SRCS - trt_dynamic_shape_ernie_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4) - - set(TEST_TRT_TRANSFORMER_PRUNE_MODEL - "${TRT_MODEL_INSTALL_DIR}/transformer_prune") - if(NOT EXISTS ${TEST_TRT_TRANSFORMER_PRUNE_MODEL}/transformer_prune.tar.gz) - inference_download_and_uncompress( - ${TEST_TRT_TRANSFORMER_PRUNE_MODEL} ${INFERENCE_URL}/tensorrt_test - "transformer_prune.tar.gz" 77b56dc73ff0cf44ddb1ce9ca0b0f471) - endif() - - inference_analysis_test( - test_trt_dynamic_shape_transformer_prune - SRCS - trt_dynamic_shape_transformer_prune_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TEST_TRT_TRANSFORMER_PRUNE_MODEL}/transformer_prune) - - if(NOT EXISTS ${TEST_TRT_ERNIE_MODEL}/ernie_model_4_unserialized.tgz) - inference_download_and_uncompress( - ${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test - "ernie_model_4_unserialized.tgz" 833d73fc6a7f7e1ee4a1fd6419209e55) - endif() - - inference_analysis_test( - test_trt_dynamic_shape_ernie_ser_deser - SRCS - trt_dynamic_shape_ernie_serialize_deserialize_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4_unserialized) - - if(NOT EXISTS ${TEST_TRT_ERNIE_MODEL}/ernie_model_4_fp16_unserialized.tgz) - inference_download_and_uncompress( - ${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test - "ernie_model_4_fp16_unserialized.tgz" c5ff2d0cad79953ffbf2b8b9e2fae6e4) - endif() - - inference_analysis_test( - test_trt_dynamic_shape_ernie_fp16_ser_deser - SRCS - trt_dynamic_shape_ernie_fp16_serialize_deserialize_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4_fp16_unserialized) - -endif() - -set(LITE_MODEL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/lite") -download_data_without_verify(${LITE_MODEL_INSTALL_DIR} "mul_model_fp32.tgz") - -inference_analysis_test( - lite_mul_model_test - SRCS - lite_mul_model_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${LITE_MODEL_INSTALL_DIR}) -inference_analysis_test( - lite_resnet50_test - SRCS - lite_resnet50_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${RESNET50_MODEL_DIR}) - -inference_analysis_test( - test_analyzer_capi_exp - SRCS - analyzer_capi_exp_tester.cc - EXTRA_DEPS - ${INFERENCE_C_EXTRA_DEPS} - ARGS - --infer_model=${RESNET50_MODEL_DIR}/model) -if(WIN32) - target_link_libraries(test_analyzer_capi_exp paddle_inference_c_shared) -else() - target_link_libraries(test_analyzer_capi_exp paddle_inference_c) -endif() - -inference_analysis_test( - test_analyzer_capi_exp_pd_config - SRCS - analyzer_capi_exp_pd_config_tester.cc - EXTRA_DEPS - ${INFERENCE_C_EXTRA_DEPS} - ARGS - --infer_model=${MOBILENET_INSTALL_DIR}/model) -if(WIN32) - target_link_libraries(test_analyzer_capi_exp_pd_config - paddle_inference_c_shared) -else() - target_link_libraries(test_analyzer_capi_exp_pd_config paddle_inference_c) -endif() - -inference_analysis_test( - test_analyzer_capi_exp_pd_tensor - SRCS - analyzer_capi_exp_pd_tensor_tester.cc - EXTRA_DEPS - ${INFERENCE_C_EXTRA_DEPS} - ARGS - --infer_model=${MOBILENET_INSTALL_DIR}/model) -if(WIN32) - target_link_libraries(test_analyzer_capi_exp_pd_tensor - paddle_inference_c_shared) -else() - target_link_libraries(test_analyzer_capi_exp_pd_tensor paddle_inference_c) -endif() - -if(NOT APPLE AND NOT WIN32) - inference_analysis_test( - test_analyzer_capi_exp_pd_threads - SRCS - analyzer_capi_exp_pd_threads_tester.cc - EXTRA_DEPS - ${INFERENCE_C_EXTRA_DEPS} - ARGS - --infer_model=${MOBILENET_INSTALL_DIR}/model) - if(WIN32) - target_link_libraries(test_analyzer_capi_exp_pd_threads - paddle_inference_c_shared) - else() - target_link_libraries(test_analyzer_capi_exp_pd_threads paddle_inference_c) - endif() -endif() - -inference_analysis_test( - test_analyzer_zerocopytensor_tensor - SRCS - analyzer_zerocopy_tensor_tester.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${OCR_INSTALL_DIR}/model) - -if(WITH_DISTRIBUTE AND WITH_PSCORE) - inference_analysis_test( - test_analyzer_dist_model - SRCS - analyzer_dist_model_tester.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${OCR_INSTALL_DIR}/model) -endif() - -if(WITH_DISTRIBUTE - AND WITH_PSCORE - AND WITH_XPU - AND WITH_XPU_BKCL) - inference_analysis_test( - test_analyzer_dist_model_xpu - SRCS - analyzer_dist_model_xpu_tester.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${OCR_INSTALL_DIR}/model) -endif() - -inference_analysis_test( - test_analyzer_paddletensor_tensor - SRCS - analyzer_paddle_tensor_tester.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${OCR_INSTALL_DIR}/model - --infer_data=${OCR_INSTALL_DIR}/data.txt - --refer_result=${OCR_INSTALL_DIR}/result.txt) - -if(WITH_MKLDNN) - inference_analysis_test( - test_analyzer_capi_exp_int - SRCS - analyzer_capi_exp_int_tester.cc - EXTRA_DEPS - ${INFERENCE_C_EXTRA_DEPS} - ARGS - --infer_model=${INT8_DATA_DIR}/resnet50/model) - if(WIN32) - target_link_libraries(test_analyzer_capi_exp_int paddle_inference_c_shared) - else() - target_link_libraries(test_analyzer_capi_exp_int paddle_inference_c) - endif() -endif() - -inference_analysis_test( - test_analyzer_capi_exp_ner - SRCS - analyzer_capi_exp_ner_tester.cc - EXTRA_DEPS - ${INFERENCE_C_EXTRA_DEPS} - ARGS - --infer_model=${CHINESE_NER_INSTALL_DIR}/model) -if(WIN32) - target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c_shared) -else() - target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c) -endif() - -if(WITH_GPU) - inference_analysis_test( - paddle_infer_api_test - SRCS - paddle_infer_api_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${RESNET50_MODEL_DIR}) - - inference_analysis_test( - paddle_infer_api_copy_tensor_tester - SRCS - paddle_infer_api_copy_tensor_tester.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${RESNET50_MODEL_DIR}) - set_tests_properties(paddle_infer_api_copy_tensor_tester PROPERTIES TIMEOUT - 30) -endif() - -cc_test( - paddle_infer_api_errors_test - SRCS paddle_infer_api_errors_tester.cc - DEPS paddle_inference_api) - -if(WITH_GPU AND TENSORRT_FOUND) - set_tests_properties(trt_resnext_test PROPERTIES TIMEOUT 300) - set_tests_properties(trt_quant_int8_yolov3_r50_test PROPERTIES TIMEOUT 400) - set_tests_properties(trt_resnet50_test PROPERTIES TIMEOUT 300) - set_tests_properties(trt_cascade_rcnn_test PROPERTIES TIMEOUT 300) - set_tests_properties(test_trt_dynamic_shape_ernie_ser_deser PROPERTIES TIMEOUT - 300) - set_tests_properties(test_trt_dynamic_shape_ernie_fp16_ser_deser - PROPERTIES TIMEOUT 300) - set_tests_properties(test_trt_dynamic_shape_ernie PROPERTIES TIMEOUT 300) -endif() - -if(WITH_MKLDNN) - set_tests_properties(test_analyzer_int8_resnet50 PROPERTIES TIMEOUT 120) - set_tests_properties(test_analyzer_int8_mobilenet_ssd PROPERTIES TIMEOUT 120) - set_tests_properties(test_analyzer_quant_performance_benchmark - PROPERTIES TIMEOUT 120) - set_tests_properties(test_analyzer_int8_mobilenetv2 PROPERTIES TIMEOUT 120) - set_tests_properties(test_analyzer_int8_mobilenetv1 PROPERTIES TIMEOUT 120) - set_tests_properties(test_analyzer_int8_mobilenetv3_large PROPERTIES TIMEOUT - 120) - set_tests_properties(test_analyzer_quant2_mobilenetv1_mkldnn - PROPERTIES TIMEOUT 120) - set_tests_properties(test_analyzer_quant2_resnet50_channelwise_mkldnn - PROPERTIES TIMEOUT 120) -endif() - -set_tests_properties(lite_resnet50_test PROPERTIES TIMEOUT 120) -set_tests_properties(test_analyzer_mobilenet_transpose PROPERTIES TIMEOUT 120) -set_tests_properties(test_analyzer_resnet50 PROPERTIES TIMEOUT 120) -set_tests_properties(test_analyzer_ner PROPERTIES TIMEOUT 120) -set_tests_properties(test_analyzer_ernie_int8 PROPERTIES TIMEOUT 120) -set_tests_properties(test_analyzer_googlenet PROPERTIES TIMEOUT 120) -set_tests_properties(test_analyzer_small_dam PROPERTIES TIMEOUT 120) -set_tests_properties(test_analyzer_transformer PROPERTIES TIMEOUT 120) -set_tests_properties(test_analyzer_mobilenet_depthwise_conv PROPERTIES TIMEOUT - 120) -if(WITH_GPU) - set_tests_properties(test_analyzer_bert PROPERTIES TIMEOUT 120) - set_tests_properties(test_analyzer_ernie PROPERTIES TIMEOUT 120) -endif() -if(WITH_GPU AND TENSORRT_FOUND) - set_tests_properties(trt_mobilenet_test PROPERTIES TIMEOUT 120) - if(WITH_MKLDNN) - set_tests_properties(test_analyzer_bfloat16_resnet50 PROPERTIES TIMEOUT 120) - endif() -endif() -if(ON_INFER OR WITH_GPU) - set_tests_properties(test_analyzer_transformer_profile PROPERTIES TIMEOUT 120) -endif() - -if(WITH_IPU) - #word2vec sample - set(WORD2VEC_INSTALL_DIR - "${INFERENCE_DEMO_INSTALL_DIR}/word2vec/word2vec.inference.model") - inference_analysis_test( - ipu_word2vec_sample - SRCS - ipu_word2vec_sample.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${WORD2VEC_INSTALL_DIR}) - - # ERNIE - set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie") - inference_analysis_api_test(ipu_ernie_test ${ERNIE_INSTALL_DIR} - ipu_ernie_test.cc ARGS --warmup=true --repeat=10) - inference_analysis_api_test( - ipu_ernie_fp16_test ${ERNIE_INSTALL_DIR} ipu_ernie_fp16_test.cc ARGS - --warmup=true --repeat=10) - - # Resnet50 - set(RESNET50_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/resnet50") - inference_analysis_test( - ipu_resnet50_test - SRCS - ipu_resnet50_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${RESNET50_MODEL_DIR} - --warmup=true - --repeat=10) - inference_analysis_test( - ipu_resnet50_fp16_test - SRCS - ipu_resnet50_fp16_test.cc - EXTRA_DEPS - paddle_inference_shared - ARGS - --infer_model=${RESNET50_MODEL_DIR} - --warmup=true - --repeat=10) - - # Only support Resnet50 and Ernie currently - inference_analysis_api_test( - ipu_multi_model_profile - SRCS - ipu_multi_model_profile.cc - ARGS - --model_name="Resnet50" - --infer_model=${RESNET50_MODEL_DIR} - --warmup=true - --repeat=10) -endif() diff --git a/paddle/scripts/paddle_build.sh b/paddle/scripts/paddle_build.sh index 6714657c3f4..77ed8e531da 100644 --- a/paddle/scripts/paddle_build.sh +++ b/paddle/scripts/paddle_build.sh @@ -3008,7 +3008,7 @@ EOF echo "ipipe_log_param_Demo_Ci_Tests_Total_Time: $[ $demo_ci_endTime_s - $demo_ci_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt infer_ut_startTime_s=`date +%s` - cd ${PADDLE_ROOT}/paddle/fluid/inference/tests/infer_ut + cd ${PADDLE_ROOT}/test/cpp/inference/infer_ut ./run.sh ${PADDLE_ROOT} ${WITH_MKL:-ON} ${WITH_GPU:-OFF} ${INFERENCE_DEMO_INSTALL_DIR} \ ${TENSORRT_ROOT_DIR:-/usr} ${WITH_ONNXRUNTIME:-ON} TEST_EXIT_CODE=$? diff --git a/test/cpp/CMakeLists.txt b/test/cpp/CMakeLists.txt index 3b896f38c5c..603b7eb78ae 100644 --- a/test/cpp/CMakeLists.txt +++ b/test/cpp/CMakeLists.txt @@ -4,3 +4,4 @@ add_subdirectory(new_executor) add_subdirectory(prim) add_subdirectory(imperative) add_subdirectory(ir) +add_subdirectory(inference) diff --git a/test/cpp/inference/CMakeLists.txt b/test/cpp/inference/CMakeLists.txt new file mode 100644 index 00000000000..5228292d887 --- /dev/null +++ b/test/cpp/inference/CMakeLists.txt @@ -0,0 +1,6 @@ +if(WITH_TESTING) + include(test.cmake) # some generic cmake function for inference +endif() + +add_subdirectory(analysis) +add_subdirectory(api) diff --git a/test/cpp/inference/analysis/CMakeLists.txt b/test/cpp/inference/analysis/CMakeLists.txt new file mode 100644 index 00000000000..11e346855fe --- /dev/null +++ b/test/cpp/inference/analysis/CMakeLists.txt @@ -0,0 +1,80 @@ +function(inference_analysis_test_build TARGET) + if(WITH_TESTING) + set(options "") + set(oneValueArgs "") + set(multiValueArgs SRCS EXTRA_DEPS) + cmake_parse_arguments(analysis_test "${options}" "${oneValueArgs}" + "${multiValueArgs}" ${ARGN}) + inference_base_test_build( + ${TARGET} + SRCS + ${analysis_test_SRCS} + DEPS + ${analysis_test_EXTRA_DEPS} + analysis + pass + ${GLOB_PASS_LIB}) + endif() +endfunction() + +function(inference_analysis_test_run TARGET) + if(WITH_TESTING) + set(options "") + set(oneValueArgs "") + set(multiValueArgs COMMAND ARGS) + cmake_parse_arguments(analysis_test "${options}" "${oneValueArgs}" + "${multiValueArgs}" ${ARGN}) + inference_base_test_run(${TARGET} COMMAND ${analysis_test_COMMAND} ARGS + ${analysis_test_ARGS}) + set_tests_properties(${TARGET} PROPERTIES LABELS "RUN_TYPE=INFER") + endif() +endfunction() + +function(inference_analysis_test TARGET) + if(WITH_TESTING) + set(options "") + set(oneValueArgs "") + set(multiValueArgs SRCS ARGS EXTRA_DEPS) + cmake_parse_arguments(analysis_test "${options}" "${oneValueArgs}" + "${multiValueArgs}" ${ARGN}) + inference_base_test_build( + ${TARGET} + SRCS + ${analysis_test_SRCS} + DEPS + ${analysis_test_EXTRA_DEPS} + analysis + pass + ${GLOB_PASS_LIB}) + inference_base_test_run(${TARGET} COMMAND ${TARGET} ARGS + ${analysis_test_ARGS}) + set_tests_properties(${TARGET} PROPERTIES LABELS "RUN_TYPE=INFER") + endif() +endfunction() + +if(NOT APPLE AND NOT WIN32) + inference_analysis_test( + test_analyzer + SRCS + analyzer_tester.cc + EXTRA_DEPS + reset_tensor_array + paddle_inference_shared + ARGS + --inference_model_dir=${WORD2VEC_MODEL_DIR}) +elseif(WIN32) + inference_analysis_test( + test_analyzer + SRCS + analyzer_tester.cc + EXTRA_DEPS + reset_tensor_array + paddle_inference_api + ARGS + --inference_model_dir=${WORD2VEC_MODEL_DIR}) + if(WITH_ONNXRUNTIME AND WIN32) + # Copy onnxruntime for some c++ test in Windows, since the test will + # be build only in CI, so suppose the generator in Windows is Ninja. + copy_onnx(test_analyzer) + endif() +endif() diff --git a/paddle/fluid/inference/analysis/analyzer_tester.cc b/test/cpp/inference/analysis/analyzer_tester.cc similarity index 100% rename from paddle/fluid/inference/analysis/analyzer_tester.cc rename to test/cpp/inference/analysis/analyzer_tester.cc diff --git a/test/cpp/inference/api/CMakeLists.txt b/test/cpp/inference/api/CMakeLists.txt new file mode 100644 index 00000000000..01010715c81 --- /dev/null +++ b/test/cpp/inference/api/CMakeLists.txt @@ -0,0 +1,1517 @@ +# In Windows, c_api test link must link both 2 shared to avoid symbols redefinition, +# in Linux, c_api test cant do like this or graph_to_program register more than once. +# Both Windows and Linux can only use paddle_inference_c, but this will increase size +# of build folder by 30G. +cc_test( + test_paddle_inference_api + SRCS api_tester.cc + DEPS paddle_inference_api) + +cc_test( + inference_api_helper_test + SRCS helper_test.cc + DEPS paddle_inference_api) + +if(WITH_ONNXRUNTIME AND WIN32) + # Copy onnxruntime for some c++ test in Windows, since the test will + # be build only in CI, so suppose the generator in Windows is Ninja. + copy_onnx(test_paddle_inference_api) +endif() + +if(WITH_TESTING AND WITH_INFERENCE_API_TEST) + if(WIN32) + set(INFERENCE_C_EXTRA_DEPS paddle_inference_shared + paddle_inference_c_shared) + else() + set(INFERENCE_C_EXTRA_DEPS paddle_inference_shared paddle_inference_c) + endif() + + function(download_data install_dir data_file check_sum) + string(REGEX MATCH "[^/\\]+$" file_name ${data_file}) + if(NOT EXISTS ${install_dir}/${file_name}) + inference_download_and_uncompress(${install_dir} ${INFERENCE_URL} + ${data_file} ${check_sum}) + endif() + endfunction() + + function(download_data_without_verify install_dir data_file) + string(REGEX MATCH "[^/\\]+$" file_name ${data_file}) + if(NOT EXISTS ${install_dir}/${file_name}) + inference_download_and_uncompress_without_verify( + ${install_dir} ${INFERENCE_URL} ${data_file}) + endif() + endfunction() + + function(download_int8_data install_dir data_file check_sum) + if(NOT EXISTS ${install_dir}/${data_file}) + inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}/int8 + ${data_file} ${check_sum}) + endif() + endfunction() + + function(download_int8_data_without_verify install_dir data_file) + if(NOT EXISTS ${install_dir}/${data_file}) + inference_download_and_uncompress_without_verify( + ${install_dir} ${INFERENCE_URL}/int8 ${data_file}) + endif() + endfunction() + + function(download_bfloat16_data install_dir data_file check_sum) + if(NOT EXISTS ${install_dir}/${data_file}) + inference_download_and_uncompress( + ${install_dir} ${INFERENCE_URL}/bfloat16 ${data_file} ${check_sum}) + endif() + endfunction() + + function(download_bfloat16_data_without_verify install_dir data_file) + if(NOT EXISTS ${install_dir}/${data_file}) + inference_download_and_uncompress_without_verify( + ${install_dir} ${INFERENCE_URL}/bfloat16 ${data_file}) + endif() + endfunction() + + function(download_GRU_data install_dir data_file check_sum) + if(NOT EXISTS ${install_dir}/${data_file}) + inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}/gru + ${data_file} ${check_sum}) + endif() + endfunction() + + function(download_GRU_data_without_verify install_dir data_file) + if(NOT EXISTS ${install_dir}/${data_file}) + inference_download_and_uncompress_without_verify( + ${install_dir} ${INFERENCE_URL}/gru ${data_file}) + endif() + endfunction() + + function(download_quant_data install_dir data_file check_sum) + if(NOT EXISTS ${install_dir}/${data_file}) + inference_download_and_uncompress( + ${install_dir} ${INFERENCE_URL}/int8/QAT_models ${data_file} + ${check_sum}) + endif() + endfunction() + + function(download_quant_data_without_verify install_dir data_file) + if(NOT EXISTS ${install_dir}/${data_file}) + inference_download_and_uncompress_without_verify( + ${install_dir} ${INFERENCE_URL}/int8/QAT_models ${data_file}) + endif() + endfunction() + + function(download_model_and_data install_dir model_name model_check_sum + data_name data_check_sum) + download_data(${install_dir} ${model_name} ${model_check_sum}) + download_data(${install_dir} ${data_name} ${data_check_sum}) + endfunction() + + function(download_model_and_data_without_verify install_dir model_name + data_name) + download_data_without_verify(${install_dir} ${model_name}) + download_data_without_verify(${install_dir} ${data_name}) + endfunction() + + function(download_result install_dir result_name check_sum) + download_data(${install_dir} ${result_name} ${check_sum}) + endfunction() + + function(download_result_without_verify install_dir result_name) + download_data_without_verify(${install_dir} ${result_name}) + endfunction() + + function(inference_analysis_api_test target install_dir filename) + inference_analysis_test( + ${target} + SRCS + ${filename} + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${install_dir}/model + --infer_data=${install_dir}/data.txt + --refer_result=${install_dir}/result.txt) + endfunction() + + function(inference_analysis_api_int8_test target install_dir filename) + inference_analysis_test( + ${target} + SRCS + ${filename} + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${install_dir}/model + --infer_data=${install_dir}/data.txt + --refer_result=${install_dir}/result.txt + --accuracy=0.8 + --batch_size=5 + --enable_int8_ptq=true) + endfunction() + + function(inference_multiple_models_analysis_api_test target install_dir + filename) + inference_analysis_test( + ${target} + SRCS + ${filename} + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${install_dir}/mobilenet_v2_models/1 + --infer_model2=${install_dir}/mobilenet_v2_models/xx + --infer_model3=${install_dir}/mobilenet_v2_models/3) + endfunction() + + function(inference_analysis_api_test_build TARGET_NAME filename) + inference_analysis_test_build(${TARGET_NAME} SRCS ${filename} EXTRA_DEPS + paddle_inference_shared) + endfunction() + + function(inference_analysis_api_int8_test_run TARGET_NAME test_binary + model_dir data_path) + inference_analysis_test_run( + ${TARGET_NAME} + COMMAND + ${test_binary} + ARGS + --infer_model=${model_dir}/model + --infer_data=${data_path} + --warmup_batch_size=${WARMUP_BATCH_SIZE} + --batch_size=50 + --enable_int8_ptq=true + --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} + --iterations=2) + endfunction() + + function(inference_analysis_api_int8_test_run_custom_warmup_batch_size + TARGET_NAME test_binary model_dir data_path warmup_batch_size) + set(WARMUP_BATCH_SIZE ${warmup_batch_size}) + inference_analysis_api_int8_test_run(${TARGET_NAME} ${test_binary} + ${model_dir} ${data_path}) + endfunction() + + function(inference_analysis_api_bfloat16_test_run TARGET_NAME test_binary + model_dir data_path) + inference_analysis_test_run( + ${TARGET_NAME} + COMMAND + ${test_binary} + ARGS + --infer_model=${model_dir}/model + --infer_data=${data_path} + --batch_size=50 + --enable_bf16=true + --paddle_num_threads=${CPU_NUM_THREADS_ON_CI} + --iterations=2) + endfunction() + + function(inference_analysis_api_object_dection_int8_test_run TARGET_NAME + test_binary model_dir data_path) + inference_analysis_test_run( + ${TARGET_NAME} + COMMAND + ${test_binary} + ARGS + --infer_model=${model_dir}/model + --infer_data=${data_path} + --warmup_batch_size=10 + --batch_size=300 + --enable_int8_ptq=true + --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} + --iterations=1) + endfunction() + + function(inference_analysis_api_test_with_fake_data_build TARGET_NAME + filename) + inference_analysis_test_build(${TARGET_NAME} SRCS ${filename} EXTRA_DEPS + paddle_inference_shared) + endfunction() + + function(inference_analysis_api_test_with_fake_data_run TARGET_NAME + test_binary model_dir disable_fc) + inference_analysis_test_run( + ${TARGET_NAME} COMMAND ${test_binary} ARGS + --infer_model=${model_dir}/model --disable_mkldnn_fc=${disable_fc}) + endfunction() + + function( + inference_analysis_api_quant_test_run + TARGET_NAME + test_binary + fp32_model_dir + int8_model_dir + data_path + enable_int8_qat) + inference_analysis_test_run( + ${TARGET_NAME} + COMMAND + ${test_binary} + ARGS + --fp32_model=${fp32_model_dir} + --int8_model=${int8_model_dir} + --infer_data=${data_path} + --batch_size=50 + --enable_int8_qat=${enable_int8_qat} + --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} + --with_accuracy_layer=false + --iterations=2) + endfunction() + + function(inference_analysis_api_lexical_test_run TARGET_NAME test_binary + infer_model data_path) + inference_analysis_test_run( + ${TARGET_NAME} + COMMAND + ${test_binary} + ARGS + --infer_model=${infer_model} + --infer_data=${data_path} + --batch_size=50 + --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} + --with_accuracy_layer=true + --use_analysis=true + --iterations=2) + endfunction() + + function(inference_analysis_api_lexical_bfloat16_test_run TARGET_NAME + test_binary infer_model data_path) + inference_analysis_test_run( + ${TARGET_NAME} + COMMAND + ${test_binary} + ARGS + --infer_model=${infer_model} + --infer_data=${data_path} + --batch_size=50 + --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} + --with_accuracy_layer=true + --use_analysis=true + --enable_bf16=true + --iterations=2) + endfunction() + + function( + inference_analysis_api_lexical_int8_test_run + TARGET_NAME + test_binary + infer_model + data_path + enable_int8_ptq + enable_int8_qat + fuse_multi_gru) + inference_analysis_test_run( + ${TARGET_NAME} + COMMAND + ${test_binary} + ARGS + --infer_model=${infer_model} + --infer_data=${data_path} + --batch_size=100 + --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} + --with_accuracy_layer=true + --use_analysis=true + --enable_int8_ptq=${enable_int8_ptq} + --enable_int8_qat=${enable_int8_qat} + --quantized_accuracy=0.015 + --fuse_multi_gru=${fuse_multi_gru} + --iterations=4) + endfunction() + + function(preprocess_data2bin_test_run target py_script_source data_dir + output_file) + py_test(${target} + SRCS ${CMAKE_CURRENT_SOURCE_DIR}/${py_script_source} ARGS + --data_dir=${data_dir} --output_file=${output_file} --local) + endfunction() + + if(NOT APPLE AND WITH_MKLML) + # RNN1 + set(RNN1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/rnn1") + download_model_and_data_without_verify( + ${RNN1_INSTALL_DIR} "rnn1/model.tar.gz" "rnn1/data.txt.tar.gz") + inference_analysis_api_test(test_analyzer_rnn1 ${RNN1_INSTALL_DIR} + analyzer_rnn1_tester.cc) + + # seq_pool1 + set(SEQ_POOL1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/seq_pool") + download_model_and_data_without_verify( + ${SEQ_POOL1_INSTALL_DIR} "seq_pool1_model_.tar.gz" + "seq_pool1_data.txt.tar.gz") + inference_analysis_api_test( + test_analyzer_seq_pool1_compare_determine ${SEQ_POOL1_INSTALL_DIR} + analyzer_seq_pool1_compare_determine_tester.cc) + inference_analysis_api_test( + test_analyzer_seq_pool1 ${SEQ_POOL1_INSTALL_DIR} + analyzer_seq_pool1_compare_tester.cc) + inference_analysis_api_test( + test_analyzer_seq_pool1_fuse_compare_zero_copy ${SEQ_POOL1_INSTALL_DIR} + analyzer_seq_pool1_fuse_compare_zero_copy_tester.cc) + inference_analysis_api_test( + test_analyzer_seq_pool1_fuse_statis ${SEQ_POOL1_INSTALL_DIR} + analyzer_seq_pool1_fuse_statis_tester.cc) + inference_analysis_api_test( + test_analyzer_seq_pool1_profile ${SEQ_POOL1_INSTALL_DIR} + analyzer_seq_pool1_profile_tester.cc) + if(NOT WIN32) + set_tests_properties(test_analyzer_seq_pool1_compare_determine + PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_seq_pool1 PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_seq_pool1_fuse_compare_zero_copy + PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_seq_pool1_fuse_statis + PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_seq_pool1_profile PROPERTIES TIMEOUT + 120) + endif() + else() + # TODO: fix this test on MACOS and OPENBLAS, the reason is that + # fusion_seqexpand_concat_fc_op is not supported on MACOS and OPENBLAS + message( + WARNING + "These tests has been disabled in OSX or WITH_MKL=OFF before being fixed: \n test_analyzer_rnn1" + ) + message( + WARNING + "These tests has been disabled in OSX or WITH_MKL=OFF before being fixed: \n test_analyzer_seq_pool1" + ) + endif() + + # RNN2 + set(RNN2_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/rnn2") + download_model_and_data_without_verify( + ${RNN2_INSTALL_DIR} "rnn2_model.tar.gz" "rnn2_data.txt.tar.gz") + inference_analysis_api_test(test_analyzer_rnn2 ${RNN2_INSTALL_DIR} + analyzer_rnn2_tester.cc) + + # TODO(luotao, Superjom) Disable DAM test, temporarily fix + # https://github.com/PaddlePaddle/Paddle/issues/15032#issuecomment-455990914. + # After inference framework refactor, will reopen it. + # normal DAM + set(DAM_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/dam") + download_model_and_data_without_verify(${DAM_INSTALL_DIR} "DAM_model.tar.gz" + "DAM_data.txt.tar.gz") + #inference_analysis_api_test(test_analyzer_dam ${DAM_INSTALL_DIR} analyzer_dam_tester.cc EXTRA_DEPS legacy_allocator) + + # small DAM + set(DAM_SMALL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/small_dam") + download_model_and_data_without_verify( + ${DAM_SMALL_INSTALL_DIR} "dam_small_model.tar.gz" + "dam_small_data.txt.tar.gz") + inference_analysis_test( + test_analyzer_small_dam + SRCS + analyzer_dam_tester.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${DAM_SMALL_INSTALL_DIR}/model + --infer_data=${DAM_SMALL_INSTALL_DIR}/data.txt) + + #save model + inference_analysis_api_test(test_analyzer_save_model ${DAM_SMALL_INSTALL_DIR} + analyzer_save_model_tester.cc) + + # chinese_ner + set(CHINESE_NER_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/chinese_ner") + download_model_and_data_without_verify( + ${CHINESE_NER_INSTALL_DIR} "chinese_ner_model.tar.gz" + "chinese_ner-data.txt.tar.gz") + inference_analysis_api_test(test_analyzer_ner ${CHINESE_NER_INSTALL_DIR} + analyzer_ner_tester.cc) + + # lac + set(LAC_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/lac") + download_model_and_data( + ${LAC_INSTALL_DIR} "lac_model.tar.gz" 419ca6eb85f57a01bfe173591910aec5 + "lac_data.txt.tar.gz" 9983539cd6b34fbdc411e43422776bfd) + inference_analysis_api_test(test_analyzer_lac ${LAC_INSTALL_DIR} + analyzer_lac_tester.cc) + + # Pyramid DNN + set(PYRAMID_DNN_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/pyramid_dnn") + download_model_and_data_without_verify( + ${PYRAMID_DNN_INSTALL_DIR} "PyramidDNN_model.tar.gz" + "PyramidDNN_data.txt.tar.gz") + inference_analysis_api_test( + test_analyzer_pyramid_dnn ${PYRAMID_DNN_INSTALL_DIR} + analyzer_pyramid_dnn_tester.cc) + + # Ernie + set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie") + download_model_and_data( + ${ERNIE_INSTALL_DIR} "Ernie_model.tar.gz" aa59192dd41ed377f9f168e3a1309fa6 + "Ernie_data.txt.tar.gz" 5396e63548edad7ca561e7e26a9476d1) + download_result(${ERNIE_INSTALL_DIR} "Ernie_result.txt.tar.gz" + 73beea65abda2edb61c1662cd3180c62) + if(WITH_GPU) + inference_analysis_api_test(test_analyzer_ernie ${ERNIE_INSTALL_DIR} + analyzer_ernie_tester.cc) + inference_analysis_api_test(gpu_ernie_half_test ${ERNIE_INSTALL_DIR} + gpu_ernie_half_test.cc) + set_tests_properties(gpu_ernie_half_test PROPERTIES TIMEOUT 60) + endif() + inference_analysis_api_int8_test( + test_analyzer_ernie_int8 ${ERNIE_INSTALL_DIR} analyzer_ernie_int8_tester.cc) + + # Ernie large + set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie_Large") + download_model_and_data( + ${ERNIE_INSTALL_DIR} "Ernie_large_model.tar.gz" + af7715245ed32cc77374625d4c80f7ef "Ernie_large_data.txt.tar.gz" + edb2113eec93783cad56ed76d47ba57f) + download_result(${ERNIE_INSTALL_DIR} "Ernie_large_result.txt.tar.gz" + 1facda98eef1085dc9d435ebf3f23a73) + inference_analysis_test( + test_analyzer_ernie_large + SRCS + analyzer_ernie_tester.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${ERNIE_INSTALL_DIR}/model + --infer_data=${ERNIE_INSTALL_DIR}/data.txt + --refer_result=${ERNIE_INSTALL_DIR}/result.txt + --ernie_large=true) + if(NOT WIN32 + AND NOT APPLE + AND TEST test_analyzer_ernie_large) + set_tests_properties(test_analyzer_ernie_large + PROPERTIES TIMEOUT 150 LABELS "RUN_TYPE=NIGHTLY") + endif() + if(WIN32 AND TEST test_analyzer_ernie_large) + set_tests_properties(test_analyzer_ernie_large PROPERTIES TIMEOUT 200) + endif() + + # text_classification + set(TEXT_CLASSIFICATION_INSTALL_DIR + "${INFERENCE_DEMO_INSTALL_DIR}/text_classification") + download_model_and_data( + ${TEXT_CLASSIFICATION_INSTALL_DIR} "text-classification-Senta.tar.gz" + 3f0f440313ca50e26184e65ffd5809ab "text_classification_data.txt.tar.gz" + 36ae620020cc3377f45ed330dd36238f) + inference_analysis_api_test( + test_analyzer_text_classification ${TEXT_CLASSIFICATION_INSTALL_DIR} + analyzer_text_classification_tester.cc) + + # seq_conv1 + set(SEQ_CONV1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/seq_conv1") + download_model_and_data_without_verify( + ${SEQ_CONV1_INSTALL_DIR} "seq_conv1_model.tar.gz" + "seq_conv1_data.txt.tar.gz") + inference_analysis_api_test(test_analyzer_seq_conv1 ${SEQ_CONV1_INSTALL_DIR} + analyzer_seq_conv1_tester.cc) + + # transformer, the dataset only works on batch_size=8 now + set(TRANSFORMER_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/transformer") + download_model_and_data_without_verify( + ${TRANSFORMER_INSTALL_DIR} "temp/transformer_model.tar.gz" + "temp/transformer_data.txt.tar.gz") + inference_analysis_test( + test_analyzer_transformer + SRCS + analyzer_transformer_compare_tester.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TRANSFORMER_INSTALL_DIR}/model + --infer_data=${TRANSFORMER_INSTALL_DIR}/data.txt + --batch_size=8 + --cpu_num_threads=${CPU_NUM_THREADS_ON_CI}) + inference_analysis_test( + test_analyzer_transformer_fuse + SRCS + analyzer_transformer_fuse_tester.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TRANSFORMER_INSTALL_DIR}/model + --infer_data=${TRANSFORMER_INSTALL_DIR}/data.txt + --batch_size=8 + --cpu_num_threads=${CPU_NUM_THREADS_ON_CI}) + inference_analysis_test( + test_analyzer_transformer_profile + SRCS + analyzer_transformer_profile_tester.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TRANSFORMER_INSTALL_DIR}/model + --infer_data=${TRANSFORMER_INSTALL_DIR}/data.txt + --batch_size=8 + --cpu_num_threads=${CPU_NUM_THREADS_ON_CI}) + + # VIT-OCR + set(VIT_OCR_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/vit") + if(NOT EXISTS ${VIT_OCR_INSTALL_DIR}/vit_ocr.tgz) + inference_download_and_uncompress_without_verify( + ${VIT_OCR_INSTALL_DIR} ${INFERENCE_URL} "ocr/vit_ocr.tgz") + endif() + inference_analysis_test( + test_analyzer_vit_ocr + SRCS + analyzer_vit_ocr_tester.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${VIT_OCR_INSTALL_DIR}/vit_ocr/model + --infer_data=${VIT_OCR_INSTALL_DIR}/vit_ocr/datavit.txt) + + # ocr + set(OCR_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/ocr") + if(NOT EXISTS ${OCR_INSTALL_DIR}/ocr.tar.gz) + inference_download_and_uncompress_without_verify( + ${OCR_INSTALL_DIR} "http://paddlemodels.bj.bcebos.com/" + "inference-vis-demos/ocr.tar.gz") + endif() + inference_analysis_api_test(test_analyzer_ocr ${OCR_INSTALL_DIR} + analyzer_vis_tester.cc) + + # densebox + set(DENSEBOX_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/densebox") + download_data_without_verify(${DENSEBOX_INSTALL_DIR} "densebox.tar.gz") + inference_analysis_test( + test_analyzer_detect_functional_mkldnn + SRCS + analyzer_detect_functional_mkldnn_tester.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${DENSEBOX_INSTALL_DIR}/model + --infer_data=${DENSEBOX_INSTALL_DIR}/detect_input_50.txt + --infer_shape=${DENSEBOX_INSTALL_DIR}/shape_50.txt) + + # mobilenet with transpose op + set(MOBILENET_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet") + if(NOT EXISTS ${MOBILENET_INSTALL_DIR}/mobilenet.tar.gz) + inference_download_and_uncompress_without_verify( + ${MOBILENET_INSTALL_DIR} "http://paddlemodels.bj.bcebos.com/" + "inference-vis-demos/mobilenet.tar.gz") + endif() + inference_analysis_api_test(test_analyzer_mobilenet_transpose + ${MOBILENET_INSTALL_DIR} analyzer_vis_tester.cc) + + ### Image classification tests with fake data + set(IMG_CLASS_TEST_APP "test_analyzer_image_classification") + set(IMG_CLASS_TEST_APP_SRC "analyzer_image_classification_tester.cc") + + # build test binary to be used in subsequent tests + inference_analysis_api_test_with_fake_data_build(${IMG_CLASS_TEST_APP} + ${IMG_CLASS_TEST_APP_SRC}) + + # googlenet + set(GOOGLENET_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/googlenet") + download_data_without_verify(${GOOGLENET_MODEL_DIR} "googlenet.tar.gz") + inference_analysis_api_test_with_fake_data_run( + test_analyzer_googlenet ${IMG_CLASS_TEST_APP} ${GOOGLENET_MODEL_DIR} false) + + # resnet50 + set(RESNET50_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/resnet50") + download_data_without_verify(${RESNET50_MODEL_DIR} "resnet50_model.tar.gz") + inference_analysis_api_test_with_fake_data_run( + test_analyzer_resnet50 ${IMG_CLASS_TEST_APP} ${RESNET50_MODEL_DIR} true) + if(WIN32) + set_tests_properties(test_analyzer_resnet50 PROPERTIES TIMEOUT 200) + endif() + + # mobilenet with depthwise_conv op + set(MOBILENET_MODEL_DIR + "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet_depthwise_conv") + download_data_without_verify(${MOBILENET_MODEL_DIR} "mobilenet_model.tar.gz") + inference_analysis_api_test_with_fake_data_run( + test_analyzer_mobilenet_depthwise_conv ${IMG_CLASS_TEST_APP} + ${MOBILENET_MODEL_DIR} false) + + if(WITH_MKLDNN) + + ### INT8 tests + + set(INT8_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/int8v2") + + ## Image classification models + + # ImageNet small dataset + # It may be already downloaded for Quant & INT8 unit tests + set(IMAGENET_DATA_ARCHIVE "imagenet_val_100_tail.tar.gz") + set(IMAGENET_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/imagenet") + set(IMAGENET_DATA_PATH "${IMAGENET_DATA_DIR}/data.bin") + download_int8_data_without_verify(${IMAGENET_DATA_DIR} + ${IMAGENET_DATA_ARCHIVE}) + + # build test binary to be used in subsequent tests + set(INT8_IMG_CLASS_TEST_APP "test_analyzer_int8_image_classification") + set(INT8_IMG_CLASS_TEST_APP_SRC + "analyzer_int8_image_classification_tester.cc") + inference_analysis_api_test_build(${INT8_IMG_CLASS_TEST_APP} + ${INT8_IMG_CLASS_TEST_APP_SRC}) + + # resnet50 int8 + set(INT8_RESNET50_MODEL_DIR "${INT8_DATA_DIR}/resnet50") + download_int8_data_without_verify(${INT8_RESNET50_MODEL_DIR} + "resnet50_int8_model.tar.gz") + inference_analysis_api_int8_test_run( + test_analyzer_int8_resnet50 ${INT8_IMG_CLASS_TEST_APP} + ${INT8_RESNET50_MODEL_DIR} ${IMAGENET_DATA_PATH}) + + # mobilenetv1 int8 + set(INT8_MOBILENETV1_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv1") + download_int8_data_without_verify(${INT8_MOBILENETV1_MODEL_DIR} + "mobilenetv1_int8_model.tar.gz") + inference_analysis_api_int8_test_run( + test_analyzer_int8_mobilenetv1 ${INT8_IMG_CLASS_TEST_APP} + ${INT8_MOBILENETV1_MODEL_DIR} ${IMAGENET_DATA_PATH}) + + # mobilenetv2 int8 + set(INT8_MOBILENETV2_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv2") + download_int8_data_without_verify(${INT8_MOBILENETV2_MODEL_DIR} + "mobilenet_v2_int8_model.tar.gz") + inference_analysis_api_int8_test_run( + test_analyzer_int8_mobilenetv2 ${INT8_IMG_CLASS_TEST_APP} + ${INT8_MOBILENETV2_MODEL_DIR} ${IMAGENET_DATA_PATH}) + + # resnet101 int8 + set(INT8_RESNET101_MODEL_DIR "${INT8_DATA_DIR}/resnet101") + download_int8_data_without_verify(${INT8_RESNET101_MODEL_DIR} + "Res101_int8_model.tar.gz") + # inference_analysis_api_int8_test_run(test_analyzer_int8_resnet101 ${INT8_IMG_CLASS_TEST_APP} ${INT8_RESNET101_MODEL_DIR} ${IMAGENET_DATA_PATH}) + + # vgg16 int8 + set(INT8_VGG16_MODEL_DIR "${INT8_DATA_DIR}/vgg16") + download_int8_data_without_verify(${INT8_VGG16_MODEL_DIR} + "VGG16_int8_model.tar.gz") + # inference_analysis_api_int8_test_run(test_analyzer_int8_vgg16 ${INT8_IMG_CLASS_TEST_APP} ${INT8_VGG16_MODEL_DIR} ${IMAGENET_DATA_PATH}) + + # vgg19 int8 + set(INT8_VGG19_MODEL_DIR "${INT8_DATA_DIR}/vgg19") + download_int8_data_without_verify(${INT8_VGG19_MODEL_DIR} + "VGG19_int8_model.tar.gz") + # inference_analysis_api_int8_test_run(test_analyzer_int8_vgg19 ${INT8_IMG_CLASS_TEST_APP} ${INT8_VGG19_MODEL_DIR} ${IMAGENET_DATA_PATH}) + + # googlenet int8 + set(INT8_GOOGLENET_MODEL_DIR "${INT8_DATA_DIR}/googlenet") + download_int8_data_without_verify(${INT8_GOOGLENET_MODEL_DIR} + "GoogleNet_int8_model.tar.gz") + inference_analysis_api_int8_test_run_custom_warmup_batch_size( + test_analyzer_int8_googlenet ${INT8_IMG_CLASS_TEST_APP} + ${INT8_GOOGLENET_MODEL_DIR} ${IMAGENET_DATA_PATH} 10) + + # mobilenetv3_large_x1_0 int8 + set(INT8_MOBILENETV3_LARGE_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv3_large") + set(INT8_MOBILENETV3_FILE_NAME "MobileNetV3_large_x1_0_infer.tar") + if(NOT EXISTS + ${INT8_MOBILENETV3_LARGE_MODEL_DIR}/${INT8_MOBILENETV3_FILE_NAME}) + inference_download_and_uncompress_without_verify( + ${INT8_MOBILENETV3_LARGE_MODEL_DIR} + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/" + ${INT8_MOBILENETV3_FILE_NAME}) + endif() + inference_analysis_test_run( + test_analyzer_int8_mobilenetv3_large + COMMAND + ${INT8_IMG_CLASS_TEST_APP} + ARGS + --infer_model=${INT8_MOBILENETV3_LARGE_MODEL_DIR}/MobileNetV3_large_x1_0_infer + --infer_data=${IMAGENET_DATA_PATH} + --warmup_batch_size=50 + --batch_size=1 + --enable_int8_ptq=true + --cpu_num_threads=${CPU_NUM_THREADS_ON_CI} + --iterations=100 + --with_accuracy_layer=false) + + ### BFLOAT16 tests + + # build test binary to be used in subsequent tests + set(BF16_IMG_CLASS_TEST_APP "test_analyzer_bfloat16_image_classification") + set(BF16_IMG_CLASS_TEST_APP_SRC + "analyzer_bfloat16_image_classification_tester.cc") + inference_analysis_api_test_build(${BF16_IMG_CLASS_TEST_APP} + ${BF16_IMG_CLASS_TEST_APP_SRC}) + + # resnet50 bfloat16 + inference_analysis_api_bfloat16_test_run( + test_analyzer_bfloat16_resnet50 ${BF16_IMG_CLASS_TEST_APP} + ${INT8_RESNET50_MODEL_DIR} ${IMAGENET_DATA_PATH}) + + # googlenet bfloat16 + inference_analysis_api_bfloat16_test_run( + test_analyzer_bfloat16_googlenet ${BF16_IMG_CLASS_TEST_APP} + ${INT8_GOOGLENET_MODEL_DIR} ${IMAGENET_DATA_PATH}) + + # mobilenetv1 bfloat16 + inference_analysis_api_bfloat16_test_run( + test_analyzer_bfloat16_mobilenetv1 ${BF16_IMG_CLASS_TEST_APP} + ${INT8_MOBILENETV1_MODEL_DIR} ${IMAGENET_DATA_PATH}) + + # mobilenetv2 bfloat16 + inference_analysis_api_bfloat16_test_run( + test_analyzer_bfloat16_mobilenetv2 ${BF16_IMG_CLASS_TEST_APP} + ${INT8_MOBILENETV2_MODEL_DIR} ${IMAGENET_DATA_PATH}) + + # mobilenetv3_large + inference_analysis_test_run( + test_analyzer_bfloat16_mobilenetv3_large + COMMAND + ${BF16_IMG_CLASS_TEST_APP} + ARGS + --infer_model=${INT8_MOBILENETV3_LARGE_MODEL_DIR}/MobileNetV3_large_x1_0_infer + --infer_data=${IMAGENET_DATA_PATH} + --batch_size=1 + --enable_bf16=true + --paddle_num_threads=${CPU_NUM_THREADS_ON_CI} + --iterations=100 + --with_accuracy_layer=false) + + ### Object detection models + set(PASCALVOC_DATA_PATH "${INT8_DATA_DIR}/pascalvoc_val_head_300.bin") + set(INT8_OBJ_DETECT_TEST_APP "test_analyzer_int8_object_detection") + set(INT8_OBJ_DETECT_TEST_APP_SRC "analyzer_int8_object_detection_tester.cc") + + # download dataset if necessary + download_int8_data_without_verify(${INT8_DATA_DIR} + "pascalvoc_val_head_300.tar.gz") + + # build test binary to be used in subsequent tests + inference_analysis_api_test_build(${INT8_OBJ_DETECT_TEST_APP} + ${INT8_OBJ_DETECT_TEST_APP_SRC}) + + # mobilenet-ssd int8 + set(INT8_MOBILENET_SSD_MODEL_DIR "${INT8_DATA_DIR}/mobilenet-ssd") + download_int8_data_without_verify(${INT8_MOBILENET_SSD_MODEL_DIR} + "mobilenet_ssd_int8_model.tar.gz") + inference_analysis_api_object_dection_int8_test_run( + test_analyzer_int8_mobilenet_ssd ${INT8_OBJ_DETECT_TEST_APP} + ${INT8_MOBILENET_SSD_MODEL_DIR} ${PASCALVOC_DATA_PATH}) + + ### Lexcial analysis GRU model + set(GRU_PATH "${INFERENCE_DEMO_INSTALL_DIR}/gru") + download_gru_data_without_verify("${GRU_PATH}" "GRU_eval_data.tar.gz") + download_gru_data_without_verify("${GRU_PATH}" "GRU_eval_model_v2.tar.gz") + set(GRU_DATA_PATH "${GRU_PATH}/GRU_eval_data.bin") + set(GRU_MODEL_PATH "${GRU_PATH}/GRU_eval_model_v2") + set(LEXICAL_TEST_APP "test_analyzer_lexical_analysis") + set(LEXICAL_TEST_APP_SRC "analyzer_lexical_analysis_gru_tester.cc") + + # build test binary to be used in subsequent tests + inference_analysis_api_test_build(${LEXICAL_TEST_APP} + ${LEXICAL_TEST_APP_SRC}) + # run lexcial analysis test + inference_analysis_api_lexical_test_run( + test_analyzer_lexical_gru ${LEXICAL_TEST_APP} ${GRU_MODEL_PATH} + ${GRU_DATA_PATH}) + # run bfloat16 lexical analysis test + inference_analysis_api_lexical_bfloat16_test_run( + test_analyzer_lexical_gru_bfloat16 ${LEXICAL_TEST_APP} ${GRU_MODEL_PATH} + ${GRU_DATA_PATH}) + # run post-training quantization lexical analysis test + inference_analysis_api_lexical_int8_test_run( + test_analyzer_lexical_gru_int8 + ${LEXICAL_TEST_APP} + ${GRU_MODEL_PATH} + ${GRU_DATA_PATH} + true # enable_int8_ptq + false # enable_int8_qat + false) # fuse_multi_gru + # run post-training quantization lexical analysis test with multi_gru fuse + inference_analysis_api_lexical_int8_test_run( + test_analyzer_lexical_gru_int8_multi_gru + ${LEXICAL_TEST_APP} + ${GRU_MODEL_PATH} + ${GRU_DATA_PATH} + true # enable_int8_ptq + false # enable_int8_qat + true) # fuse_multi_gru + + # run qat gru test + set(QAT_GRU_MODEL_ARCHIVE "GRU_quant_acc.tar.gz") + set(QAT_GRU_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/quant/GRU_quant2") + download_quant_data(${QAT_GRU_MODEL_DIR} ${QAT_GRU_MODEL_ARCHIVE} + cf207f8076dcfb8b74d8b6bdddf9090c) + + inference_analysis_api_lexical_int8_test_run( + test_analyzer_lexical_gru_qat_int8 + ${LEXICAL_TEST_APP} + "${QAT_GRU_MODEL_DIR}/GRU_quant_acc" + ${GRU_DATA_PATH} + false # enable_int8_ptq + true # enable_int8_qat + false) # fuse_multi_gru + + ### optimized FP32 vs. Quant INT8 tests + + set(QUANT_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/quant") + set(QUANT_IMG_CLASS_TEST_APP "test_analyzer_quant_image_classification") + set(QUANT_IMG_CLASS_TEST_APP_SRC + "analyzer_quant_image_classification_tester.cc") + + # build test binary to be used in subsequent tests + inference_analysis_api_test_build(${QUANT_IMG_CLASS_TEST_APP} + ${QUANT_IMG_CLASS_TEST_APP_SRC}) + + # MobileNetV1 FP32 vs. Quant INT8 + # The FP32 model should already be downloaded for slim Quant unit tests on Linux + set(QUANT2_MobileNetV1_MODEL_DIR "${QUANT_DATA_DIR}/MobileNetV1_quant2") + set(QUANT2_INT8_MobileNetV1_MODEL_DIR + "${QUANT_DATA_DIR}/MobileNetV1_quant2_int8") + if(NOT LINUX) + download_quant_data_without_verify(${QUANT2_MobileNetV1_MODEL_DIR} + "MobileNet_qat_perf.tar.gz") + endif() + download_quant_data_without_verify(${QUANT2_INT8_MobileNetV1_MODEL_DIR} + "MobileNet_qat_perf_int8.tar.gz") + inference_analysis_api_quant_test_run( + test_analyzer_quant_performance_benchmark + ${QUANT_IMG_CLASS_TEST_APP} + ${QUANT2_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf/float + ${QUANT2_INT8_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf_int8 + ${IMAGENET_DATA_PATH} + false) + + # Quant2 MobileNetV1 + inference_analysis_api_quant_test_run( + test_analyzer_quant2_mobilenetv1_mkldnn + ${QUANT_IMG_CLASS_TEST_APP} + ${QUANT2_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf/float + ${QUANT2_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf/float + ${IMAGENET_DATA_PATH} + true) + + # Quant2 ResNet50 with input/output scales in `fake_quantize_range_abs_max` operators and the `out_threshold` attributes, + # with weight scales in `fake_channel_wise_dequantize_max_abs` operators + set(QUANT2_RESNET50_CHANNELWISE_MODEL_DIR + "${QUANT_DATA_DIR}/ResNet50_quant2_channelwise") + set(QUANT2_RESNET50_CHANNELWISE_MODEL_ARCHIVE + "ResNet50_qat_channelwise.tar.gz") + if(NOT LINUX) + download_quant_data_without_verify( + ${QUANT2_RESNET50_CHANNELWISE_MODEL_DIR} + ${QUANT2_RESNET50_CHANNELWISE_MODEL_ARCHIVE}) + endif() + set(QUANT2_RESNET50_MODEL + ${QUANT2_RESNET50_CHANNELWISE_MODEL_DIR}/ResNet50_qat_channelwise) + inference_analysis_api_quant_test_run( + test_analyzer_quant2_resnet50_channelwise_mkldnn + ${QUANT_IMG_CLASS_TEST_APP} ${QUANT2_RESNET50_MODEL} + ${QUANT2_RESNET50_MODEL} ${IMAGENET_DATA_PATH} true) + + ### Other tests + + # MKLDNN quantizer config + set(MKLDNN_QUANTIZER_CONFIG_TEST_APP "test_mkldnn_quantizer_config") + set(MKLDNN_QUANTIZER_CONFIG_TEST_APP_SRC + "mkldnn_quantizer_config_tester.cc") + inference_analysis_api_test_build(${MKLDNN_QUANTIZER_CONFIG_TEST_APP} + ${MKLDNN_QUANTIZER_CONFIG_TEST_APP_SRC}) + inference_analysis_test_run(test_mkldnn_quantizer_config COMMAND + ${MKLDNN_QUANTIZER_CONFIG_TEST_APP}) + + # preprocess data2bin imagenet + download_int8_data_without_verify(${INT8_DATA_DIR} "imagenet_small.tar.gz") + set(IMAGENET_SMALL_DATA_DIR "${INT8_DATA_DIR}/imagenet_small") + set(IMAGENET_SMALL_OUTPUT_FILE "imagenet_small.bin") + preprocess_data2bin_test_run( + preprocess_local_imagenet "full_ILSVRC2012_val_preprocess.py" + ${IMAGENET_SMALL_DATA_DIR} ${IMAGENET_SMALL_OUTPUT_FILE}) + + # preprocess data2bin pascalvoc + download_int8_data_without_verify(${INT8_DATA_DIR} "pascalvoc_small.tar.gz") + set(PASCALVOC_SMALL_DATA_DIR "${INT8_DATA_DIR}/pascalvoc_small") + set(PASCALVOC_SMALL_OUTPUT_FILE "pascalvoc_small.bin") + preprocess_data2bin_test_run( + preprocess_local_pascalvoc "full_pascalvoc_test_preprocess.py" + ${PASCALVOC_SMALL_DATA_DIR} ${PASCALVOC_SMALL_OUTPUT_FILE}) + + endif() + + # bert, max_len=20, embedding_dim=128 + set(BERT_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/bert_emb128") + download_model_and_data_without_verify( + ${BERT_INSTALL_DIR} "bert_emb128_model.tar.gz" "bert_data_len20.txt.tar.gz") + inference_analysis_api_test(test_analyzer_bert ${BERT_INSTALL_DIR} + analyzer_bert_tester.cc) + + # multiple models prediction + set(MMP_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/multi_model_prediction") + download_data_without_verify(${MMP_INSTALL_DIR} + PaddleInference/mobilenet_v2_models.tar.gz) + inference_multiple_models_analysis_api_test( + test_analyzer_multi_model_prediction ${MMP_INSTALL_DIR} + analyzer_mmp_tester.cc) + + if(WITH_GPU AND TENSORRT_FOUND) + set(TRT_MODEL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/trt_models") + if(NOT EXISTS ${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models.tar.gz) + inference_download_and_uncompress( + ${TRT_MODEL_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test + "trt_inference_test_models.tar.gz" 3dcccdc38b549b6b1b4089723757bd98) + endif() + set(TEST_SPLIT_CONVERTER_MODEL + "${TRT_MODEL_INSTALL_DIR}/trt_split_op_converter_test") + if(NOT EXISTS ${TEST_SPLIT_CONVERTER_MODEL}/split_converter.tgz) + inference_download_and_uncompress_without_verify( + ${TEST_SPLIT_CONVERTER_MODEL} ${INFERENCE_URL}/tensorrt_test + "split_converter.tgz") + endif() + inference_analysis_test( + trt_mobilenet_test + SRCS + trt_mobilenet_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) + inference_analysis_test( + trt_resnet50_test + SRCS + trt_resnet50_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) + inference_analysis_test( + trt_resnext_test + SRCS + trt_resnext_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) + inference_analysis_test( + trt_fc_prelu_test + SRCS + trt_fc_prelu_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) + inference_analysis_test( + trt_cascade_rcnn_test + SRCS + trt_cascade_rcnn_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) + inference_analysis_test( + trt_split_converter_test + SRCS + trt_split_converter_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TEST_SPLIT_CONVERTER_MODEL}/) + inference_analysis_test( + test_analyzer_capi_exp_gpu + SRCS + analyzer_capi_exp_gpu_tester.cc + EXTRA_DEPS + ${INFERENCE_C_EXTRA_DEPS} + ARGS + --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) + if(WIN32) + target_link_libraries(test_analyzer_capi_exp_gpu + paddle_inference_c_shared) + else() + target_link_libraries(test_analyzer_capi_exp_gpu paddle_inference_c) + endif() + inference_analysis_test( + test_analyzer_capi_exp_xpu + SRCS + analyzer_capi_exp_xpu_tester.cc + EXTRA_DEPS + ${INFERENCE_C_EXTRA_DEPS} + ARGS + --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) + if(WIN32) + target_link_libraries(test_analyzer_capi_exp_xpu + paddle_inference_c_shared) + else() + target_link_libraries(test_analyzer_capi_exp_xpu paddle_inference_c) + endif() + + set(TRT_MODEL_QUANT_RESNET_DIR + "${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model") + if(NOT EXISTS ${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model.tgz) + inference_download_and_uncompress_without_verify( + ${INFERENCE_DEMO_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test + "small_quant_model.tgz") + endif() + inference_analysis_test( + trt_quant_int8_test + SRCS + trt_quant_int8_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TRT_MODEL_QUANT_RESNET_DIR}) + + set(TRT_MODEL_QUANT_YOLOV3_DIR + "${INFERENCE_DEMO_INSTALL_DIR}/yolov3_r50_quant_aware") + if(NOT EXISTS ${INFERENCE_DEMO_INSTALL_DIR}/yolov3_r50_quant_aware.tgz) + inference_download_and_uncompress_without_verify( + ${INFERENCE_DEMO_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test + "yolov3_r50_quant_aware.tgz") + endif() + inference_analysis_test( + trt_quant_int8_yolov3_r50_test + SRCS + trt_quant_int8_yolov3_r50_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TRT_MODEL_QUANT_YOLOV3_DIR}) + + set(TEST_TRT_DYNAMIC_MODEL2 + "${TRT_MODEL_INSTALL_DIR}/complex_model_dynamic") + if(NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL2}/complex_model_dynamic2.tar.gz) + inference_download_and_uncompress_without_verify( + ${TEST_TRT_DYNAMIC_MODEL2} ${INFERENCE_URL}/tensorrt_test + "complex_model_dynamic2.tar.gz") + endif() + + set(TEST_TRT_DYNAMIC_MODEL + "${TRT_MODEL_INSTALL_DIR}/conv_bn_swish_split_gelu") + if(NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL}/conv_bn_swish_split_gelu.tar.gz) + inference_download_and_uncompress( + ${TEST_TRT_DYNAMIC_MODEL} ${INFERENCE_URL}/tensorrt_test + "conv_bn_swish_split_gelu.tar.gz" 2a5e8791e47b221b4f782151d76da9c6) + endif() + inference_analysis_test( + trt_dynamic_shape_test + SRCS + trt_dynamic_shape_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TRT_MODEL_INSTALL_DIR}) + + set(TEST_TRT_ERNIE_MODEL "${TRT_MODEL_INSTALL_DIR}/ernie_test") + if(NOT EXISTS ${TEST_TRT_ERNIE_MODEL}/ernie_model_4.tar.gz) + inference_download_and_uncompress( + ${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test + "ernie_model_4.tar.gz" 5fa371efa75706becbaad79195d2ca68) + endif() + + inference_analysis_test( + test_trt_dynamic_shape_ernie + SRCS + trt_dynamic_shape_ernie_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4) + + set(TEST_TRT_TRANSFORMER_PRUNE_MODEL + "${TRT_MODEL_INSTALL_DIR}/transformer_prune") + if(NOT EXISTS ${TEST_TRT_TRANSFORMER_PRUNE_MODEL}/transformer_prune.tar.gz) + inference_download_and_uncompress( + ${TEST_TRT_TRANSFORMER_PRUNE_MODEL} ${INFERENCE_URL}/tensorrt_test + "transformer_prune.tar.gz" 77b56dc73ff0cf44ddb1ce9ca0b0f471) + endif() + + inference_analysis_test( + test_trt_dynamic_shape_transformer_prune + SRCS + trt_dynamic_shape_transformer_prune_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TEST_TRT_TRANSFORMER_PRUNE_MODEL}/transformer_prune) + + if(NOT EXISTS ${TEST_TRT_ERNIE_MODEL}/ernie_model_4_unserialized.tgz) + inference_download_and_uncompress( + ${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test + "ernie_model_4_unserialized.tgz" 833d73fc6a7f7e1ee4a1fd6419209e55) + endif() + + inference_analysis_test( + test_trt_dynamic_shape_ernie_ser_deser + SRCS + trt_dynamic_shape_ernie_serialize_deserialize_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4_unserialized) + + if(NOT EXISTS ${TEST_TRT_ERNIE_MODEL}/ernie_model_4_fp16_unserialized.tgz) + inference_download_and_uncompress( + ${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test + "ernie_model_4_fp16_unserialized.tgz" c5ff2d0cad79953ffbf2b8b9e2fae6e4) + endif() + + inference_analysis_test( + test_trt_dynamic_shape_ernie_fp16_ser_deser + SRCS + trt_dynamic_shape_ernie_fp16_serialize_deserialize_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4_fp16_unserialized) + + endif() + + set(LITE_MODEL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/lite") + download_data_without_verify(${LITE_MODEL_INSTALL_DIR} "mul_model_fp32.tgz") + + inference_analysis_test( + lite_mul_model_test + SRCS + lite_mul_model_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${LITE_MODEL_INSTALL_DIR}) + inference_analysis_test( + lite_resnet50_test + SRCS + lite_resnet50_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${RESNET50_MODEL_DIR}) + + inference_analysis_test( + test_analyzer_capi_exp + SRCS + analyzer_capi_exp_tester.cc + EXTRA_DEPS + ${INFERENCE_C_EXTRA_DEPS} + ARGS + --infer_model=${RESNET50_MODEL_DIR}/model) + if(WIN32) + target_link_libraries(test_analyzer_capi_exp paddle_inference_c_shared) + else() + target_link_libraries(test_analyzer_capi_exp paddle_inference_c) + endif() + + inference_analysis_test( + test_analyzer_capi_exp_pd_config + SRCS + analyzer_capi_exp_pd_config_tester.cc + EXTRA_DEPS + ${INFERENCE_C_EXTRA_DEPS} + ARGS + --infer_model=${MOBILENET_INSTALL_DIR}/model) + if(WIN32) + target_link_libraries(test_analyzer_capi_exp_pd_config + paddle_inference_c_shared) + else() + target_link_libraries(test_analyzer_capi_exp_pd_config paddle_inference_c) + endif() + + inference_analysis_test( + test_analyzer_capi_exp_pd_tensor + SRCS + analyzer_capi_exp_pd_tensor_tester.cc + EXTRA_DEPS + ${INFERENCE_C_EXTRA_DEPS} + ARGS + --infer_model=${MOBILENET_INSTALL_DIR}/model) + if(WIN32) + target_link_libraries(test_analyzer_capi_exp_pd_tensor + paddle_inference_c_shared) + else() + target_link_libraries(test_analyzer_capi_exp_pd_tensor paddle_inference_c) + endif() + + if(NOT APPLE AND NOT WIN32) + inference_analysis_test( + test_analyzer_capi_exp_pd_threads + SRCS + analyzer_capi_exp_pd_threads_tester.cc + EXTRA_DEPS + ${INFERENCE_C_EXTRA_DEPS} + ARGS + --infer_model=${MOBILENET_INSTALL_DIR}/model) + if(WIN32) + target_link_libraries(test_analyzer_capi_exp_pd_threads + paddle_inference_c_shared) + else() + target_link_libraries(test_analyzer_capi_exp_pd_threads + paddle_inference_c) + endif() + endif() + + inference_analysis_test( + test_analyzer_zerocopytensor_tensor + SRCS + analyzer_zerocopy_tensor_tester.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${OCR_INSTALL_DIR}/model) + + if(WITH_DISTRIBUTE AND WITH_PSCORE) + inference_analysis_test( + test_analyzer_dist_model + SRCS + analyzer_dist_model_tester.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${OCR_INSTALL_DIR}/model) + endif() + + if(WITH_DISTRIBUTE + AND WITH_PSCORE + AND WITH_XPU + AND WITH_XPU_BKCL) + inference_analysis_test( + test_analyzer_dist_model_xpu + SRCS + analyzer_dist_model_xpu_tester.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${OCR_INSTALL_DIR}/model) + endif() + + inference_analysis_test( + test_analyzer_paddletensor_tensor + SRCS + analyzer_paddle_tensor_tester.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${OCR_INSTALL_DIR}/model + --infer_data=${OCR_INSTALL_DIR}/data.txt + --refer_result=${OCR_INSTALL_DIR}/result.txt) + + if(WITH_MKLDNN) + inference_analysis_test( + test_analyzer_capi_exp_int + SRCS + analyzer_capi_exp_int_tester.cc + EXTRA_DEPS + ${INFERENCE_C_EXTRA_DEPS} + ARGS + --infer_model=${INT8_DATA_DIR}/resnet50/model) + if(WIN32) + target_link_libraries(test_analyzer_capi_exp_int + paddle_inference_c_shared) + else() + target_link_libraries(test_analyzer_capi_exp_int paddle_inference_c) + endif() + endif() + + inference_analysis_test( + test_analyzer_capi_exp_ner + SRCS + analyzer_capi_exp_ner_tester.cc + EXTRA_DEPS + ${INFERENCE_C_EXTRA_DEPS} + ARGS + --infer_model=${CHINESE_NER_INSTALL_DIR}/model) + if(WIN32) + target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c_shared) + else() + target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c) + endif() + + if(WITH_GPU) + inference_analysis_test( + paddle_infer_api_test + SRCS + paddle_infer_api_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${RESNET50_MODEL_DIR}) + + inference_analysis_test( + paddle_infer_api_copy_tensor_tester + SRCS + paddle_infer_api_copy_tensor_tester.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${RESNET50_MODEL_DIR}) + set_tests_properties(paddle_infer_api_copy_tensor_tester PROPERTIES TIMEOUT + 30) + endif() + + cc_test( + paddle_infer_api_errors_test + SRCS paddle_infer_api_errors_tester.cc + DEPS paddle_inference_api) + + if(WITH_GPU AND TENSORRT_FOUND) + set_tests_properties(trt_resnext_test PROPERTIES TIMEOUT 300) + set_tests_properties(trt_quant_int8_yolov3_r50_test PROPERTIES TIMEOUT 400) + set_tests_properties(trt_resnet50_test PROPERTIES TIMEOUT 300) + set_tests_properties(trt_cascade_rcnn_test PROPERTIES TIMEOUT 300) + set_tests_properties(test_trt_dynamic_shape_ernie_ser_deser + PROPERTIES TIMEOUT 300) + set_tests_properties(test_trt_dynamic_shape_ernie_fp16_ser_deser + PROPERTIES TIMEOUT 300) + set_tests_properties(test_trt_dynamic_shape_ernie PROPERTIES TIMEOUT 300) + endif() + + if(WITH_MKLDNN) + set_tests_properties(test_analyzer_int8_resnet50 PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_int8_mobilenet_ssd PROPERTIES TIMEOUT + 120) + set_tests_properties(test_analyzer_quant_performance_benchmark + PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_int8_mobilenetv2 PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_int8_mobilenetv1 PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_int8_mobilenetv3_large PROPERTIES TIMEOUT + 120) + set_tests_properties(test_analyzer_quant2_mobilenetv1_mkldnn + PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_quant2_resnet50_channelwise_mkldnn + PROPERTIES TIMEOUT 120) + endif() + + set_tests_properties(lite_resnet50_test PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_mobilenet_transpose PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_resnet50 PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_ner PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_ernie_int8 PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_googlenet PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_small_dam PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_transformer PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_mobilenet_depthwise_conv PROPERTIES TIMEOUT + 120) + if(WITH_GPU) + set_tests_properties(test_analyzer_bert PROPERTIES TIMEOUT 120) + set_tests_properties(test_analyzer_ernie PROPERTIES TIMEOUT 120) + endif() + if(WITH_GPU AND TENSORRT_FOUND) + set_tests_properties(trt_mobilenet_test PROPERTIES TIMEOUT 120) + if(WITH_MKLDNN) + set_tests_properties(test_analyzer_bfloat16_resnet50 PROPERTIES TIMEOUT + 120) + endif() + endif() + if(ON_INFER OR WITH_GPU) + set_tests_properties(test_analyzer_transformer_profile PROPERTIES TIMEOUT + 120) + endif() + + if(WITH_IPU) + #word2vec sample + set(WORD2VEC_INSTALL_DIR + "${INFERENCE_DEMO_INSTALL_DIR}/word2vec/word2vec.inference.model") + inference_analysis_test( + ipu_word2vec_sample + SRCS + ipu_word2vec_sample.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${WORD2VEC_INSTALL_DIR}) + + # ERNIE + set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie") + inference_analysis_api_test( + ipu_ernie_test ${ERNIE_INSTALL_DIR} ipu_ernie_test.cc ARGS --warmup=true + --repeat=10) + inference_analysis_api_test( + ipu_ernie_fp16_test ${ERNIE_INSTALL_DIR} ipu_ernie_fp16_test.cc ARGS + --warmup=true --repeat=10) + + # Resnet50 + set(RESNET50_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/resnet50") + inference_analysis_test( + ipu_resnet50_test + SRCS + ipu_resnet50_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${RESNET50_MODEL_DIR} + --warmup=true + --repeat=10) + inference_analysis_test( + ipu_resnet50_fp16_test + SRCS + ipu_resnet50_fp16_test.cc + EXTRA_DEPS + paddle_inference_shared + ARGS + --infer_model=${RESNET50_MODEL_DIR} + --warmup=true + --repeat=10) + + # Only support Resnet50 and Ernie currently + inference_analysis_api_test( + ipu_multi_model_profile + SRCS + ipu_multi_model_profile.cc + ARGS + --model_name="Resnet50" + --infer_model=${RESNET50_MODEL_DIR} + --warmup=true + --repeat=10) + endif() + + set(inference_deps ${analysis_deps} paddle_inference_api analysis + naive_executor ${GLOB_PASS_LIB}) + + if(WITH_TESTING) + if(NOT APPLE AND NOT WIN32) + inference_base_test( + test_api_impl + SRCS + api_impl_tester.cc + DEPS + paddle_inference_shared + ARGS + --word2vec_dirname=${WORD2VEC_MODEL_DIR} + --book_dirname=${IMG_CLS_RESNET_INSTALL_DIR}) + elseif(WIN32) + inference_base_test( + test_api_impl + SRCS + api_impl_tester.cc + DEPS + ${inference_deps} + ARGS + --word2vec_dirname=${WORD2VEC_MODEL_DIR} + --book_dirname=${IMG_CLS_RESNET_INSTALL_DIR}) + endif() + endif() + + if(NOT APPLE AND NOT WIN32) + cc_test_old( + test_analysis_predictor + SRCS + analysis_predictor_tester.cc + DEPS + paddle_inference_shared + ARGS + --dirname=${WORD2VEC_MODEL_DIR}) + elseif(WIN32) + cc_test_old( + test_analysis_predictor + SRCS + analysis_predictor_tester.cc + DEPS + analysis_predictor + benchmark + ${inference_deps} + ARGS + --dirname=${WORD2VEC_MODEL_DIR}) + endif() + + if(WITH_TESTING AND WITH_MKLDNN) + if(NOT APPLE AND NOT WIN32) + cc_test( + test_mkldnn_quantizer + SRCS mkldnn_quantizer_tester.cc + DEPS paddle_inference_shared ARGS --dirname=${WORD2VEC_MODEL_DIR}) + elseif(WIN32) + cc_test( + test_mkldnn_quantizer + SRCS mkldnn_quantizer_tester.cc + DEPS analysis_predictor benchmark ${inference_deps} ARGS + --dirname=${WORD2VEC_MODEL_DIR}) + endif() + endif() + + if(WITH_TESTING AND TEST test_api_impl) + if(NOT APPLE) + set_tests_properties(test_api_impl PROPERTIES TIMEOUT 120) + endif() + endif() +endif() diff --git a/paddle/fluid/inference/api/analysis_predictor_tester.cc b/test/cpp/inference/api/analysis_predictor_tester.cc similarity index 99% rename from paddle/fluid/inference/api/analysis_predictor_tester.cc rename to test/cpp/inference/api/analysis_predictor_tester.cc index 3b462cc941e..5692d7607bb 100644 --- a/paddle/fluid/inference/api/analysis_predictor_tester.cc +++ b/test/cpp/inference/api/analysis_predictor_tester.cc @@ -27,9 +27,9 @@ #include "paddle/fluid/inference/api/helper.h" #include "paddle/fluid/inference/api/paddle_api.h" #include "paddle/fluid/inference/api/paddle_inference_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "paddle/fluid/inference/utils/io_utils.h" #include "paddle/phi/backends/cpu/cpu_info.h" +#include "test/cpp/inference/api/tester_helper.h" DEFINE_string(dirname, "", "dirname to tests."); diff --git a/paddle/fluid/inference/tests/api/analyzer_bert_tester.cc b/test/cpp/inference/api/analyzer_bert_tester.cc similarity index 99% rename from paddle/fluid/inference/tests/api/analyzer_bert_tester.cc rename to test/cpp/inference/api/analyzer_bert_tester.cc index e7462786c40..ea4db73f6f5 100644 --- a/paddle/fluid/inference/tests/api/analyzer_bert_tester.cc +++ b/test/cpp/inference/api/analyzer_bert_tester.cc @@ -13,7 +13,7 @@ // limitations under the License. #include "paddle/fluid/framework/transfer_scope_cache.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_bfloat16_image_classification_tester.cc b/test/cpp/inference/api/analyzer_bfloat16_image_classification_tester.cc similarity index 97% rename from paddle/fluid/inference/tests/api/analyzer_bfloat16_image_classification_tester.cc rename to test/cpp/inference/api/analyzer_bfloat16_image_classification_tester.cc index c92d2ebf278..f6d4d8e8f76 100644 --- a/paddle/fluid/inference/tests/api/analyzer_bfloat16_image_classification_tester.cc +++ b/test/cpp/inference/api/analyzer_bfloat16_image_classification_tester.cc @@ -13,8 +13,8 @@ limitations under the License. */ #include #include "paddle/fluid/inference/api/paddle_analysis_config.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "paddle/phi/backends/cpu/cpu_info.h" +#include "test/cpp/inference/api/tester_helper.h" DEFINE_bool(enable_mkldnn, true, "Enable MKLDNN"); diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_exp_gpu_tester.cc b/test/cpp/inference/api/analyzer_capi_exp_gpu_tester.cc similarity index 99% rename from paddle/fluid/inference/tests/api/analyzer_capi_exp_gpu_tester.cc rename to test/cpp/inference/api/analyzer_capi_exp_gpu_tester.cc index 7faf06567dd..eae65e45858 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_exp_gpu_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_exp_gpu_tester.cc @@ -24,7 +24,7 @@ limitations under the License. */ #endif #include "paddle/fluid/inference/capi_exp/pd_inference_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_exp_int_tester.cc b/test/cpp/inference/api/analyzer_capi_exp_int_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_capi_exp_int_tester.cc rename to test/cpp/inference/api/analyzer_capi_exp_int_tester.cc index b83e5d68cca..98d3d2fd099 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_exp_int_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_exp_int_tester.cc @@ -20,7 +20,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/capi_exp/pd_inference_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_exp_ner_tester.cc b/test/cpp/inference/api/analyzer_capi_exp_ner_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_capi_exp_ner_tester.cc rename to test/cpp/inference/api/analyzer_capi_exp_ner_tester.cc index 2477752e870..5ed9399426d 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_exp_ner_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_exp_ner_tester.cc @@ -20,7 +20,7 @@ #include #include "paddle/fluid/inference/capi_exp/pd_inference_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_exp_pd_config_tester.cc b/test/cpp/inference/api/analyzer_capi_exp_pd_config_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_capi_exp_pd_config_tester.cc rename to test/cpp/inference/api/analyzer_capi_exp_pd_config_tester.cc index 6bce6b0d9df..f8bc06b8dda 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_exp_pd_config_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_exp_pd_config_tester.cc @@ -20,7 +20,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/capi_exp/pd_inference_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_exp_pd_tensor_tester.cc b/test/cpp/inference/api/analyzer_capi_exp_pd_tensor_tester.cc similarity index 99% rename from paddle/fluid/inference/tests/api/analyzer_capi_exp_pd_tensor_tester.cc rename to test/cpp/inference/api/analyzer_capi_exp_pd_tensor_tester.cc index 759d00d8caf..c53996f5595 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_exp_pd_tensor_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_exp_pd_tensor_tester.cc @@ -23,7 +23,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/capi_exp/pd_inference_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_exp_pd_threads_tester.cc b/test/cpp/inference/api/analyzer_capi_exp_pd_threads_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_capi_exp_pd_threads_tester.cc rename to test/cpp/inference/api/analyzer_capi_exp_pd_threads_tester.cc index 6b34028e2d8..5f43927e933 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_exp_pd_threads_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_exp_pd_threads_tester.cc @@ -23,7 +23,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/capi_exp/pd_inference_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_exp_tester.cc b/test/cpp/inference/api/analyzer_capi_exp_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_capi_exp_tester.cc rename to test/cpp/inference/api/analyzer_capi_exp_tester.cc index 4b2852be861..27afcda3619 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_exp_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_exp_tester.cc @@ -21,7 +21,7 @@ limitations under the License. */ #include "paddle/fluid/inference/capi_exp/pd_config.h" #include "paddle/fluid/inference/capi_exp/pd_inference_api.h" #include "paddle/fluid/inference/capi_exp/pd_utils.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_exp_xpu_tester.cc b/test/cpp/inference/api/analyzer_capi_exp_xpu_tester.cc similarity index 97% rename from paddle/fluid/inference/tests/api/analyzer_capi_exp_xpu_tester.cc rename to test/cpp/inference/api/analyzer_capi_exp_xpu_tester.cc index 347f0e6e253..6584c2ed9fa 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_exp_xpu_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_exp_xpu_tester.cc @@ -20,7 +20,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/capi_exp/pd_inference_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_gpu_tester.cc b/test/cpp/inference/api/analyzer_capi_gpu_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_capi_gpu_tester.cc rename to test/cpp/inference/api/analyzer_capi_gpu_tester.cc index 255d87c799e..6a58ecc8a00 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_gpu_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_gpu_tester.cc @@ -20,7 +20,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/capi/paddle_c_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_int_tester.cc b/test/cpp/inference/api/analyzer_capi_int_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_capi_int_tester.cc rename to test/cpp/inference/api/analyzer_capi_int_tester.cc index cf8582ee778..0a0a7b4460f 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_int_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_int_tester.cc @@ -20,7 +20,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/capi/paddle_c_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_ner_tester.cc b/test/cpp/inference/api/analyzer_capi_ner_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_capi_ner_tester.cc rename to test/cpp/inference/api/analyzer_capi_ner_tester.cc index 05e808e9fe3..feeb583446c 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_ner_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_ner_tester.cc @@ -20,7 +20,7 @@ #include #include "paddle/fluid/inference/capi/paddle_c_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc b/test/cpp/inference/api/analyzer_capi_pd_tensor_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc rename to test/cpp/inference/api/analyzer_capi_pd_tensor_tester.cc index c707fa825bb..9d291b24021 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_pd_tensor_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_pd_tensor_tester.cc @@ -24,7 +24,7 @@ limitations under the License. */ #include "paddle/fluid/inference/capi/c_api_internal.h" #include "paddle/fluid/inference/capi/paddle_c_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_tester.cc b/test/cpp/inference/api/analyzer_capi_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_capi_tester.cc rename to test/cpp/inference/api/analyzer_capi_tester.cc index 4ff3e27f420..afa8b59e5f3 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_tester.cc @@ -20,7 +20,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/capi/paddle_c_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_capi_xpu_tester.cc b/test/cpp/inference/api/analyzer_capi_xpu_tester.cc similarity index 97% rename from paddle/fluid/inference/tests/api/analyzer_capi_xpu_tester.cc rename to test/cpp/inference/api/analyzer_capi_xpu_tester.cc index e6a6a8c1037..af2990e728a 100644 --- a/paddle/fluid/inference/tests/api/analyzer_capi_xpu_tester.cc +++ b/test/cpp/inference/api/analyzer_capi_xpu_tester.cc @@ -20,7 +20,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/capi/paddle_c_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_dam_tester.cc b/test/cpp/inference/api/analyzer_dam_tester.cc similarity index 99% rename from paddle/fluid/inference/tests/api/analyzer_dam_tester.cc rename to test/cpp/inference/api/analyzer_dam_tester.cc index 36a2dfcb715..707743b2655 100644 --- a/paddle/fluid/inference/tests/api/analyzer_dam_tester.cc +++ b/test/cpp/inference/api/analyzer_dam_tester.cc @@ -15,7 +15,7 @@ #include #include "paddle/fluid/inference/analysis/helper.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" const int FLAGS_max_turn_num = 1; diff --git a/paddle/fluid/inference/tests/api/analyzer_detect_functional_mkldnn_tester.cc b/test/cpp/inference/api/analyzer_detect_functional_mkldnn_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_detect_functional_mkldnn_tester.cc rename to test/cpp/inference/api/analyzer_detect_functional_mkldnn_tester.cc index 277f68d09bd..c8e9546b3dd 100644 --- a/paddle/fluid/inference/tests/api/analyzer_detect_functional_mkldnn_tester.cc +++ b/test/cpp/inference/api/analyzer_detect_functional_mkldnn_tester.cc @@ -17,8 +17,8 @@ limitations under the License. */ #include #include -#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "paddle/phi/common/place.h" +#include "test/cpp/inference/api/tester_helper.h" DEFINE_string(infer_shape, "", "data shape file"); DEFINE_int32(sample, 20, "number of sample"); diff --git a/paddle/fluid/inference/tests/api/analyzer_detect_tester.cc b/test/cpp/inference/api/analyzer_detect_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_detect_tester.cc rename to test/cpp/inference/api/analyzer_detect_tester.cc index d90800e7f48..ba0e61dcd60 100644 --- a/paddle/fluid/inference/tests/api/analyzer_detect_tester.cc +++ b/test/cpp/inference/api/analyzer_detect_tester.cc @@ -17,7 +17,7 @@ limitations under the License. */ #include #include -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" DEFINE_string(infer_shape, "", "data shape file"); DEFINE_int32(sample, 20, "number of sample"); diff --git a/paddle/fluid/inference/tests/api/analyzer_dist_model_tester.cc b/test/cpp/inference/api/analyzer_dist_model_tester.cc similarity index 97% rename from paddle/fluid/inference/tests/api/analyzer_dist_model_tester.cc rename to test/cpp/inference/api/analyzer_dist_model_tester.cc index eda679bccde..989b8122e16 100644 --- a/paddle/fluid/inference/tests/api/analyzer_dist_model_tester.cc +++ b/test/cpp/inference/api/analyzer_dist_model_tester.cc @@ -17,8 +17,8 @@ #include "paddle/fluid/framework/op_desc.h" #include "paddle/fluid/framework/program_desc.h" #include "paddle/fluid/framework/scope.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "paddle/fluid/inference/utils/singleton.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_dist_model_xpu_tester.cc b/test/cpp/inference/api/analyzer_dist_model_xpu_tester.cc similarity index 97% rename from paddle/fluid/inference/tests/api/analyzer_dist_model_xpu_tester.cc rename to test/cpp/inference/api/analyzer_dist_model_xpu_tester.cc index cb7688221e2..ff5ca34bfb2 100644 --- a/paddle/fluid/inference/tests/api/analyzer_dist_model_xpu_tester.cc +++ b/test/cpp/inference/api/analyzer_dist_model_xpu_tester.cc @@ -17,8 +17,8 @@ #include "paddle/fluid/framework/op_desc.h" #include "paddle/fluid/framework/program_desc.h" #include "paddle/fluid/framework/scope.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "paddle/fluid/inference/utils/singleton.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_ernie_int8_tester.cc b/test/cpp/inference/api/analyzer_ernie_int8_tester.cc similarity index 97% rename from paddle/fluid/inference/tests/api/analyzer_ernie_int8_tester.cc rename to test/cpp/inference/api/analyzer_ernie_int8_tester.cc index 87f4883b7f0..a93bbb1ab7e 100644 --- a/paddle/fluid/inference/tests/api/analyzer_ernie_int8_tester.cc +++ b/test/cpp/inference/api/analyzer_ernie_int8_tester.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/analyzer_ernie_tester.h" +#include "test/cpp/inference/api/analyzer_ernie_tester.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_ernie_tester.cc b/test/cpp/inference/api/analyzer_ernie_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_ernie_tester.cc rename to test/cpp/inference/api/analyzer_ernie_tester.cc index 8159633f514..2c12b716650 100644 --- a/paddle/fluid/inference/tests/api/analyzer_ernie_tester.cc +++ b/test/cpp/inference/api/analyzer_ernie_tester.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/analyzer_ernie_tester.h" +#include "test/cpp/inference/api/analyzer_ernie_tester.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_ernie_tester.h b/test/cpp/inference/api/analyzer_ernie_tester.h similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_ernie_tester.h rename to test/cpp/inference/api/analyzer_ernie_tester.h index 2fec8d9b12c..48e0cc5df31 100644 --- a/paddle/fluid/inference/tests/api/analyzer_ernie_tester.h +++ b/test/cpp/inference/api/analyzer_ernie_tester.h @@ -14,7 +14,7 @@ #pragma once -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_image_classification_tester.cc b/test/cpp/inference/api/analyzer_image_classification_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_image_classification_tester.cc rename to test/cpp/inference/api/analyzer_image_classification_tester.cc index e25c78bd287..f1479c56edb 100644 --- a/paddle/fluid/inference/tests/api/analyzer_image_classification_tester.cc +++ b/test/cpp/inference/api/analyzer_image_classification_tester.cc @@ -15,7 +15,7 @@ limitations under the License. */ #include #include -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" DEFINE_bool(disable_mkldnn_fc, false, "Disable usage of MKL-DNN's FC op"); diff --git a/paddle/fluid/inference/tests/api/analyzer_int8_image_classification_tester.cc b/test/cpp/inference/api/analyzer_int8_image_classification_tester.cc similarity index 97% rename from paddle/fluid/inference/tests/api/analyzer_int8_image_classification_tester.cc rename to test/cpp/inference/api/analyzer_int8_image_classification_tester.cc index cb28c8b891c..57552ccb82e 100644 --- a/paddle/fluid/inference/tests/api/analyzer_int8_image_classification_tester.cc +++ b/test/cpp/inference/api/analyzer_int8_image_classification_tester.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/api/paddle_analysis_config.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" DEFINE_bool(enable_mkldnn, true, "Enable MKLDNN"); diff --git a/paddle/fluid/inference/tests/api/analyzer_int8_object_detection_tester.cc b/test/cpp/inference/api/analyzer_int8_object_detection_tester.cc similarity index 99% rename from paddle/fluid/inference/tests/api/analyzer_int8_object_detection_tester.cc rename to test/cpp/inference/api/analyzer_int8_object_detection_tester.cc index 9eecd90890b..a3b13dd691c 100644 --- a/paddle/fluid/inference/tests/api/analyzer_int8_object_detection_tester.cc +++ b/test/cpp/inference/api/analyzer_int8_object_detection_tester.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/api/paddle_analysis_config.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" DEFINE_bool(enable_mkldnn, true, "Enable MKLDNN"); diff --git a/paddle/fluid/inference/tests/api/analyzer_lac_tester.cc b/test/cpp/inference/api/analyzer_lac_tester.cc similarity index 99% rename from paddle/fluid/inference/tests/api/analyzer_lac_tester.cc rename to test/cpp/inference/api/analyzer_lac_tester.cc index 1bbd4d14173..c4ef9389bd4 100644 --- a/paddle/fluid/inference/tests/api/analyzer_lac_tester.cc +++ b/test/cpp/inference/api/analyzer_lac_tester.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_lexical_analysis_gru_tester.cc b/test/cpp/inference/api/analyzer_lexical_analysis_gru_tester.cc similarity index 99% rename from paddle/fluid/inference/tests/api/analyzer_lexical_analysis_gru_tester.cc rename to test/cpp/inference/api/analyzer_lexical_analysis_gru_tester.cc index 77decf3e1f9..829f617884f 100644 --- a/paddle/fluid/inference/tests/api/analyzer_lexical_analysis_gru_tester.cc +++ b/test/cpp/inference/api/analyzer_lexical_analysis_gru_tester.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/api/paddle_analysis_config.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" // setting iterations to 0 means processing the whole dataset namespace paddle { diff --git a/paddle/fluid/inference/tests/api/analyzer_mmp_tester.cc b/test/cpp/inference/api/analyzer_mmp_tester.cc similarity index 96% rename from paddle/fluid/inference/tests/api/analyzer_mmp_tester.cc rename to test/cpp/inference/api/analyzer_mmp_tester.cc index 9701305b8bb..59e4953d48c 100644 --- a/paddle/fluid/inference/tests/api/analyzer_mmp_tester.cc +++ b/test/cpp/inference/api/analyzer_mmp_tester.cc @@ -15,7 +15,7 @@ #include #include "paddle/fluid/framework/transfer_scope_cache.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" // Here add missing commands DEFINE_string(infer_model2, "", "model path"); @@ -35,7 +35,9 @@ void SetConfig(AnalysisConfig* config, const std::string& infer_model) { } std::unique_ptr InitializePredictor( - const std::string& infer_model, std::vector& data, bool use_mkldnn) { + const std::string& infer_model, + const std::vector& data, + bool use_mkldnn) { AnalysisConfig cfg; SetConfig(&cfg, infer_model); if (use_mkldnn) { diff --git a/paddle/fluid/inference/tests/api/analyzer_ner_tester.cc b/test/cpp/inference/api/analyzer_ner_tester.cc similarity index 99% rename from paddle/fluid/inference/tests/api/analyzer_ner_tester.cc rename to test/cpp/inference/api/analyzer_ner_tester.cc index 191b883a5d5..cdd375fc154 100644 --- a/paddle/fluid/inference/tests/api/analyzer_ner_tester.cc +++ b/test/cpp/inference/api/analyzer_ner_tester.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_paddle_tensor_tester.cc b/test/cpp/inference/api/analyzer_paddle_tensor_tester.cc similarity index 97% rename from paddle/fluid/inference/tests/api/analyzer_paddle_tensor_tester.cc rename to test/cpp/inference/api/analyzer_paddle_tensor_tester.cc index 2c02b87ba2b..ed2c8d18587 100644 --- a/paddle/fluid/inference/tests/api/analyzer_paddle_tensor_tester.cc +++ b/test/cpp/inference/api/analyzer_paddle_tensor_tester.cc @@ -16,8 +16,8 @@ #include "paddle/fluid/framework/op_desc.h" #include "paddle/fluid/framework/program_desc.h" #include "paddle/fluid/framework/scope.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "paddle/fluid/inference/utils/singleton.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_pyramid_dnn_tester.cc b/test/cpp/inference/api/analyzer_pyramid_dnn_tester.cc similarity index 99% rename from paddle/fluid/inference/tests/api/analyzer_pyramid_dnn_tester.cc rename to test/cpp/inference/api/analyzer_pyramid_dnn_tester.cc index 578d3c57273..e7c606c0f73 100644 --- a/paddle/fluid/inference/tests/api/analyzer_pyramid_dnn_tester.cc +++ b/test/cpp/inference/api/analyzer_pyramid_dnn_tester.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_quant_image_classification_tester.cc b/test/cpp/inference/api/analyzer_quant_image_classification_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_quant_image_classification_tester.cc rename to test/cpp/inference/api/analyzer_quant_image_classification_tester.cc index 5ef328d7ff2..69b627275cd 100644 --- a/paddle/fluid/inference/tests/api/analyzer_quant_image_classification_tester.cc +++ b/test/cpp/inference/api/analyzer_quant_image_classification_tester.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/api/paddle_analysis_config.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" DEFINE_bool(enable_mkldnn, true, "Enable MKLDNN"); diff --git a/paddle/fluid/inference/tests/api/analyzer_rnn1_tester.cc b/test/cpp/inference/api/analyzer_rnn1_tester.cc similarity index 99% rename from paddle/fluid/inference/tests/api/analyzer_rnn1_tester.cc rename to test/cpp/inference/api/analyzer_rnn1_tester.cc index 2317d02331f..7f0f11f5515 100644 --- a/paddle/fluid/inference/tests/api/analyzer_rnn1_tester.cc +++ b/test/cpp/inference/api/analyzer_rnn1_tester.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" DEFINE_bool(with_precision_check, true, "turn on test"); diff --git a/paddle/fluid/inference/tests/api/analyzer_rnn2_tester.cc b/test/cpp/inference/api/analyzer_rnn2_tester.cc similarity index 99% rename from paddle/fluid/inference/tests/api/analyzer_rnn2_tester.cc rename to test/cpp/inference/api/analyzer_rnn2_tester.cc index 9a80f004e24..dac6877b24a 100644 --- a/paddle/fluid/inference/tests/api/analyzer_rnn2_tester.cc +++ b/test/cpp/inference/api/analyzer_rnn2_tester.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_save_model_tester.cc b/test/cpp/inference/api/analyzer_save_model_tester.cc similarity index 97% rename from paddle/fluid/inference/tests/api/analyzer_save_model_tester.cc rename to test/cpp/inference/api/analyzer_save_model_tester.cc index e0310d3bf1a..dd6157b5429 100644 --- a/paddle/fluid/inference/tests/api/analyzer_save_model_tester.cc +++ b/test/cpp/inference/api/analyzer_save_model_tester.cc @@ -13,7 +13,7 @@ // limitations under the License. #include "paddle/fluid/inference/analysis/helper.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_seq_conv1_tester.cc b/test/cpp/inference/api/analyzer_seq_conv1_tester.cc similarity index 99% rename from paddle/fluid/inference/tests/api/analyzer_seq_conv1_tester.cc rename to test/cpp/inference/api/analyzer_seq_conv1_tester.cc index f5a583343ac..17bd7d3d0a4 100644 --- a/paddle/fluid/inference/tests/api/analyzer_seq_conv1_tester.cc +++ b/test/cpp/inference/api/analyzer_seq_conv1_tester.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_seq_pool1_compare_determine_tester.cc b/test/cpp/inference/api/analyzer_seq_pool1_compare_determine_tester.cc similarity index 89% rename from paddle/fluid/inference/tests/api/analyzer_seq_pool1_compare_determine_tester.cc rename to test/cpp/inference/api/analyzer_seq_pool1_compare_determine_tester.cc index 1ef5e81e18a..80ffde9786a 100644 --- a/paddle/fluid/inference/tests/api/analyzer_seq_pool1_compare_determine_tester.cc +++ b/test/cpp/inference/api/analyzer_seq_pool1_compare_determine_tester.cc @@ -16,8 +16,8 @@ limitations under the License. */ #include #include -#include "paddle/fluid/inference/tests/api/analyzer_seq_pool1_tester_helper.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/analyzer_seq_pool1_tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_seq_pool1_compare_tester.cc b/test/cpp/inference/api/analyzer_seq_pool1_compare_tester.cc similarity index 89% rename from paddle/fluid/inference/tests/api/analyzer_seq_pool1_compare_tester.cc rename to test/cpp/inference/api/analyzer_seq_pool1_compare_tester.cc index 5a78d36276c..e0eaa2293c4 100644 --- a/paddle/fluid/inference/tests/api/analyzer_seq_pool1_compare_tester.cc +++ b/test/cpp/inference/api/analyzer_seq_pool1_compare_tester.cc @@ -16,8 +16,8 @@ limitations under the License. */ #include #include -#include "paddle/fluid/inference/tests/api/analyzer_seq_pool1_tester_helper.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/analyzer_seq_pool1_tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_seq_pool1_fuse_compare_zero_copy_tester.cc b/test/cpp/inference/api/analyzer_seq_pool1_fuse_compare_zero_copy_tester.cc similarity index 91% rename from paddle/fluid/inference/tests/api/analyzer_seq_pool1_fuse_compare_zero_copy_tester.cc rename to test/cpp/inference/api/analyzer_seq_pool1_fuse_compare_zero_copy_tester.cc index acf7f94f3fe..d7c588abff6 100644 --- a/paddle/fluid/inference/tests/api/analyzer_seq_pool1_fuse_compare_zero_copy_tester.cc +++ b/test/cpp/inference/api/analyzer_seq_pool1_fuse_compare_zero_copy_tester.cc @@ -16,8 +16,8 @@ limitations under the License. */ #include #include -#include "paddle/fluid/inference/tests/api/analyzer_seq_pool1_tester_helper.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/analyzer_seq_pool1_tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_seq_pool1_fuse_statis_tester.cc b/test/cpp/inference/api/analyzer_seq_pool1_fuse_statis_tester.cc similarity index 91% rename from paddle/fluid/inference/tests/api/analyzer_seq_pool1_fuse_statis_tester.cc rename to test/cpp/inference/api/analyzer_seq_pool1_fuse_statis_tester.cc index e72d8484eed..3dce562d711 100644 --- a/paddle/fluid/inference/tests/api/analyzer_seq_pool1_fuse_statis_tester.cc +++ b/test/cpp/inference/api/analyzer_seq_pool1_fuse_statis_tester.cc @@ -16,8 +16,8 @@ limitations under the License. */ #include #include -#include "paddle/fluid/inference/tests/api/analyzer_seq_pool1_tester_helper.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/analyzer_seq_pool1_tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_seq_pool1_profile_tester.cc b/test/cpp/inference/api/analyzer_seq_pool1_profile_tester.cc similarity index 90% rename from paddle/fluid/inference/tests/api/analyzer_seq_pool1_profile_tester.cc rename to test/cpp/inference/api/analyzer_seq_pool1_profile_tester.cc index ba49ad8aa45..e85e92e0ced 100644 --- a/paddle/fluid/inference/tests/api/analyzer_seq_pool1_profile_tester.cc +++ b/test/cpp/inference/api/analyzer_seq_pool1_profile_tester.cc @@ -16,8 +16,8 @@ limitations under the License. */ #include #include -#include "paddle/fluid/inference/tests/api/analyzer_seq_pool1_tester_helper.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/analyzer_seq_pool1_tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_seq_pool1_tester_helper.h b/test/cpp/inference/api/analyzer_seq_pool1_tester_helper.h similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_seq_pool1_tester_helper.h rename to test/cpp/inference/api/analyzer_seq_pool1_tester_helper.h index 8386ac7445a..3c0ad4b5f82 100644 --- a/paddle/fluid/inference/tests/api/analyzer_seq_pool1_tester_helper.h +++ b/test/cpp/inference/api/analyzer_seq_pool1_tester_helper.h @@ -20,7 +20,7 @@ limitations under the License. */ #include #include -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_text_classification_tester.cc b/test/cpp/inference/api/analyzer_text_classification_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_text_classification_tester.cc rename to test/cpp/inference/api/analyzer_text_classification_tester.cc index 4e587fa1178..85529ca34ab 100644 --- a/paddle/fluid/inference/tests/api/analyzer_text_classification_tester.cc +++ b/test/cpp/inference/api/analyzer_text_classification_tester.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_transformer_compare_tester.cc b/test/cpp/inference/api/analyzer_transformer_compare_tester.cc similarity index 94% rename from paddle/fluid/inference/tests/api/analyzer_transformer_compare_tester.cc rename to test/cpp/inference/api/analyzer_transformer_compare_tester.cc index 1d511309177..4ab7092f832 100644 --- a/paddle/fluid/inference/tests/api/analyzer_transformer_compare_tester.cc +++ b/test/cpp/inference/api/analyzer_transformer_compare_tester.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/analyzer_transformer_tester_helper.h" +#include "test/cpp/inference/api/analyzer_transformer_tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_transformer_fuse_tester.cc b/test/cpp/inference/api/analyzer_transformer_fuse_tester.cc similarity index 93% rename from paddle/fluid/inference/tests/api/analyzer_transformer_fuse_tester.cc rename to test/cpp/inference/api/analyzer_transformer_fuse_tester.cc index 4e5484c9ea0..14933c8c20e 100644 --- a/paddle/fluid/inference/tests/api/analyzer_transformer_fuse_tester.cc +++ b/test/cpp/inference/api/analyzer_transformer_fuse_tester.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/analyzer_transformer_tester_helper.h" +#include "test/cpp/inference/api/analyzer_transformer_tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_transformer_profile_tester.cc b/test/cpp/inference/api/analyzer_transformer_profile_tester.cc similarity index 94% rename from paddle/fluid/inference/tests/api/analyzer_transformer_profile_tester.cc rename to test/cpp/inference/api/analyzer_transformer_profile_tester.cc index 9cbba30f9d0..d31b71a77a5 100644 --- a/paddle/fluid/inference/tests/api/analyzer_transformer_profile_tester.cc +++ b/test/cpp/inference/api/analyzer_transformer_profile_tester.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/analyzer_transformer_tester_helper.h" +#include "test/cpp/inference/api/analyzer_transformer_tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_transformer_tester_helper.h b/test/cpp/inference/api/analyzer_transformer_tester_helper.h similarity index 99% rename from paddle/fluid/inference/tests/api/analyzer_transformer_tester_helper.h rename to test/cpp/inference/api/analyzer_transformer_tester_helper.h index 569d62f637f..06b0b04886e 100644 --- a/paddle/fluid/inference/tests/api/analyzer_transformer_tester_helper.h +++ b/test/cpp/inference/api/analyzer_transformer_tester_helper.h @@ -16,7 +16,7 @@ #include #include -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_vis_tester.cc b/test/cpp/inference/api/analyzer_vis_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_vis_tester.cc rename to test/cpp/inference/api/analyzer_vis_tester.cc index 0581eb614a4..4db6e818293 100644 --- a/paddle/fluid/inference/tests/api/analyzer_vis_tester.cc +++ b/test/cpp/inference/api/analyzer_vis_tester.cc @@ -17,7 +17,7 @@ limitations under the License. */ #include #include -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_vit_ocr_tester.cc b/test/cpp/inference/api/analyzer_vit_ocr_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/analyzer_vit_ocr_tester.cc rename to test/cpp/inference/api/analyzer_vit_ocr_tester.cc index 8180d951050..3582fc22c9c 100644 --- a/paddle/fluid/inference/tests/api/analyzer_vit_ocr_tester.cc +++ b/test/cpp/inference/api/analyzer_vit_ocr_tester.cc @@ -15,7 +15,7 @@ limitations under the License. */ #include #include -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/analyzer_zerocopy_tensor_tester.cc b/test/cpp/inference/api/analyzer_zerocopy_tensor_tester.cc similarity index 97% rename from paddle/fluid/inference/tests/api/analyzer_zerocopy_tensor_tester.cc rename to test/cpp/inference/api/analyzer_zerocopy_tensor_tester.cc index 76c00ee1c28..753c259d2a7 100644 --- a/paddle/fluid/inference/tests/api/analyzer_zerocopy_tensor_tester.cc +++ b/test/cpp/inference/api/analyzer_zerocopy_tensor_tester.cc @@ -16,8 +16,8 @@ #include "paddle/fluid/framework/op_desc.h" #include "paddle/fluid/framework/program_desc.h" #include "paddle/fluid/framework/scope.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "paddle/fluid/inference/utils/singleton.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/api/api_impl_tester.cc b/test/cpp/inference/api/api_impl_tester.cc similarity index 99% rename from paddle/fluid/inference/api/api_impl_tester.cc rename to test/cpp/inference/api/api_impl_tester.cc index 1416dacb833..3270e216586 100644 --- a/paddle/fluid/inference/api/api_impl_tester.cc +++ b/test/cpp/inference/api/api_impl_tester.cc @@ -20,7 +20,7 @@ limitations under the License. */ #include "gflags/gflags.h" #include "paddle/fluid/framework/convert_utils.h" #include "paddle/fluid/inference/api/api_impl.h" -#include "paddle/fluid/inference/tests/test_helper.h" +#include "test/cpp/inference/test_helper.h" #ifdef __clang__ #define ACC_DIFF 4e-3 diff --git a/paddle/fluid/inference/api/api_tester.cc b/test/cpp/inference/api/api_tester.cc similarity index 100% rename from paddle/fluid/inference/api/api_tester.cc rename to test/cpp/inference/api/api_tester.cc diff --git a/paddle/fluid/inference/tests/api/config_printer.h b/test/cpp/inference/api/config_printer.h similarity index 100% rename from paddle/fluid/inference/tests/api/config_printer.h rename to test/cpp/inference/api/config_printer.h diff --git a/paddle/fluid/inference/tests/api/full_ILSVRC2012_val_preprocess.py b/test/cpp/inference/api/full_ILSVRC2012_val_preprocess.py similarity index 100% rename from paddle/fluid/inference/tests/api/full_ILSVRC2012_val_preprocess.py rename to test/cpp/inference/api/full_ILSVRC2012_val_preprocess.py diff --git a/paddle/fluid/inference/tests/api/full_pascalvoc_test_preprocess.py b/test/cpp/inference/api/full_pascalvoc_test_preprocess.py similarity index 100% rename from paddle/fluid/inference/tests/api/full_pascalvoc_test_preprocess.py rename to test/cpp/inference/api/full_pascalvoc_test_preprocess.py diff --git a/paddle/fluid/inference/tests/api/gpu_ernie_half_test.cc b/test/cpp/inference/api/gpu_ernie_half_test.cc similarity index 99% rename from paddle/fluid/inference/tests/api/gpu_ernie_half_test.cc rename to test/cpp/inference/api/gpu_ernie_half_test.cc index 6b83e89a444..a897f6f7003 100644 --- a/paddle/fluid/inference/tests/api/gpu_ernie_half_test.cc +++ b/test/cpp/inference/api/gpu_ernie_half_test.cc @@ -13,7 +13,7 @@ // limitations under the License. #include "paddle/fluid/inference/api/paddle_inference_api.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/api/helper_test.cc b/test/cpp/inference/api/helper_test.cc similarity index 100% rename from paddle/fluid/inference/api/helper_test.cc rename to test/cpp/inference/api/helper_test.cc diff --git a/paddle/fluid/inference/tests/api/int8_mkldnn_quantization.md b/test/cpp/inference/api/int8_mkldnn_quantization.md similarity index 100% rename from paddle/fluid/inference/tests/api/int8_mkldnn_quantization.md rename to test/cpp/inference/api/int8_mkldnn_quantization.md diff --git a/paddle/fluid/inference/tests/api/ipu_ernie_fp16_test.cc b/test/cpp/inference/api/ipu_ernie_fp16_test.cc similarity index 98% rename from paddle/fluid/inference/tests/api/ipu_ernie_fp16_test.cc rename to test/cpp/inference/api/ipu_ernie_fp16_test.cc index fa775bd9a9c..92ed63167ca 100644 --- a/paddle/fluid/inference/tests/api/ipu_ernie_fp16_test.cc +++ b/test/cpp/inference/api/ipu_ernie_fp16_test.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/ipu_ernie_test.cc b/test/cpp/inference/api/ipu_ernie_test.cc similarity index 98% rename from paddle/fluid/inference/tests/api/ipu_ernie_test.cc rename to test/cpp/inference/api/ipu_ernie_test.cc index cbf35b5fca4..9687e760ac5 100644 --- a/paddle/fluid/inference/tests/api/ipu_ernie_test.cc +++ b/test/cpp/inference/api/ipu_ernie_test.cc @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/ipu_multi_model_profile.cc b/test/cpp/inference/api/ipu_multi_model_profile.cc similarity index 98% rename from paddle/fluid/inference/tests/api/ipu_multi_model_profile.cc rename to test/cpp/inference/api/ipu_multi_model_profile.cc index 9561f50f6a6..3c5b1af1594 100644 --- a/paddle/fluid/inference/tests/api/ipu_multi_model_profile.cc +++ b/test/cpp/inference/api/ipu_multi_model_profile.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/ipu_resnet50_fp16_test.cc b/test/cpp/inference/api/ipu_resnet50_fp16_test.cc similarity index 98% rename from paddle/fluid/inference/tests/api/ipu_resnet50_fp16_test.cc rename to test/cpp/inference/api/ipu_resnet50_fp16_test.cc index 7624e23ab15..99f0d58926d 100644 --- a/paddle/fluid/inference/tests/api/ipu_resnet50_fp16_test.cc +++ b/test/cpp/inference/api/ipu_resnet50_fp16_test.cc @@ -18,7 +18,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/ipu_resnet50_test.cc b/test/cpp/inference/api/ipu_resnet50_test.cc similarity index 99% rename from paddle/fluid/inference/tests/api/ipu_resnet50_test.cc rename to test/cpp/inference/api/ipu_resnet50_test.cc index ab7d8bd368e..5a414bf9415 100644 --- a/paddle/fluid/inference/tests/api/ipu_resnet50_test.cc +++ b/test/cpp/inference/api/ipu_resnet50_test.cc @@ -15,7 +15,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/ipu_word2vec_sample.cc b/test/cpp/inference/api/ipu_word2vec_sample.cc similarity index 100% rename from paddle/fluid/inference/tests/api/ipu_word2vec_sample.cc rename to test/cpp/inference/api/ipu_word2vec_sample.cc diff --git a/paddle/fluid/inference/tests/api/lite_mul_model_test.cc b/test/cpp/inference/api/lite_mul_model_test.cc similarity index 98% rename from paddle/fluid/inference/tests/api/lite_mul_model_test.cc rename to test/cpp/inference/api/lite_mul_model_test.cc index 8bb5b4c6df7..e600a3bab91 100644 --- a/paddle/fluid/inference/tests/api/lite_mul_model_test.cc +++ b/test/cpp/inference/api/lite_mul_model_test.cc @@ -20,7 +20,7 @@ limitations under the License. */ #include // NOLINT #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/lite_resnet50_test.cc b/test/cpp/inference/api/lite_resnet50_test.cc similarity index 98% rename from paddle/fluid/inference/tests/api/lite_resnet50_test.cc rename to test/cpp/inference/api/lite_resnet50_test.cc index 75df54631da..e35e2838823 100644 --- a/paddle/fluid/inference/tests/api/lite_resnet50_test.cc +++ b/test/cpp/inference/api/lite_resnet50_test.cc @@ -18,7 +18,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/mkldnn_quantizer_config_tester.cc b/test/cpp/inference/api/mkldnn_quantizer_config_tester.cc similarity index 98% rename from paddle/fluid/inference/tests/api/mkldnn_quantizer_config_tester.cc rename to test/cpp/inference/api/mkldnn_quantizer_config_tester.cc index e92d58f8ea8..6e372263ca8 100644 --- a/paddle/fluid/inference/tests/api/mkldnn_quantizer_config_tester.cc +++ b/test/cpp/inference/api/mkldnn_quantizer_config_tester.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "paddle/fluid/inference/api/paddle_mkldnn_quantizer_config.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/api/mkldnn_quantizer_tester.cc b/test/cpp/inference/api/mkldnn_quantizer_tester.cc similarity index 100% rename from paddle/fluid/inference/api/mkldnn_quantizer_tester.cc rename to test/cpp/inference/api/mkldnn_quantizer_tester.cc diff --git a/paddle/fluid/inference/tests/api/paddle_infer_api_copy_tensor_tester.cc b/test/cpp/inference/api/paddle_infer_api_copy_tensor_tester.cc similarity index 99% rename from paddle/fluid/inference/tests/api/paddle_infer_api_copy_tensor_tester.cc rename to test/cpp/inference/api/paddle_infer_api_copy_tensor_tester.cc index 74ce8260564..4674b77091a 100644 --- a/paddle/fluid/inference/tests/api/paddle_infer_api_copy_tensor_tester.cc +++ b/test/cpp/inference/api/paddle_infer_api_copy_tensor_tester.cc @@ -22,8 +22,8 @@ limitations under the License. */ #include "gflags/gflags.h" #include "glog/logging.h" #include "paddle/fluid/inference/api/paddle_infer_contrib.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "paddle/fluid/platform/float16.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle_infer { diff --git a/paddle/fluid/inference/tests/api/paddle_infer_api_errors_tester.cc b/test/cpp/inference/api/paddle_infer_api_errors_tester.cc similarity index 100% rename from paddle/fluid/inference/tests/api/paddle_infer_api_errors_tester.cc rename to test/cpp/inference/api/paddle_infer_api_errors_tester.cc diff --git a/paddle/fluid/inference/tests/api/paddle_infer_api_test.cc b/test/cpp/inference/api/paddle_infer_api_test.cc similarity index 98% rename from paddle/fluid/inference/tests/api/paddle_infer_api_test.cc rename to test/cpp/inference/api/paddle_infer_api_test.cc index 9029cefc9a4..c59ac40e5e5 100644 --- a/paddle/fluid/inference/tests/api/paddle_infer_api_test.cc +++ b/test/cpp/inference/api/paddle_infer_api_test.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle_infer { diff --git a/paddle/fluid/inference/tests/api/test_detection_dataset_preprocess.py b/test/cpp/inference/api/test_detection_dataset_preprocess.py similarity index 100% rename from paddle/fluid/inference/tests/api/test_detection_dataset_preprocess.py rename to test/cpp/inference/api/test_detection_dataset_preprocess.py diff --git a/paddle/fluid/inference/tests/api/tester_helper.h b/test/cpp/inference/api/tester_helper.h similarity index 99% rename from paddle/fluid/inference/tests/api/tester_helper.h rename to test/cpp/inference/api/tester_helper.h index 39751eb0d8c..fe016abdaee 100644 --- a/paddle/fluid/inference/tests/api/tester_helper.h +++ b/test/cpp/inference/api/tester_helper.h @@ -35,10 +35,10 @@ #include "paddle/fluid/inference/api/helper.h" #include "paddle/fluid/inference/api/paddle_inference_api.h" #include "paddle/fluid/inference/api/paddle_inference_pass.h" -#include "paddle/fluid/inference/tests/api/config_printer.h" -#include "paddle/fluid/inference/tests/test_helper.h" #include "paddle/fluid/inference/utils/benchmark.h" #include "paddle/fluid/platform/profiler/event_tracing.h" +#include "test/cpp/inference/api/config_printer.h" +#include "test/cpp/inference/test_helper.h" DEFINE_string(model_name, "", "model name"); DEFINE_string(infer_model, "", "model path"); diff --git a/paddle/fluid/inference/tests/api/trt_cascade_rcnn_test.cc b/test/cpp/inference/api/trt_cascade_rcnn_test.cc similarity index 96% rename from paddle/fluid/inference/tests/api/trt_cascade_rcnn_test.cc rename to test/cpp/inference/api/trt_cascade_rcnn_test.cc index 9cb215f8675..86759c33e47 100644 --- a/paddle/fluid/inference/tests/api/trt_cascade_rcnn_test.cc +++ b/test/cpp/inference/api/trt_cascade_rcnn_test.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_fp16_serialize_deserialize_test.cc b/test/cpp/inference/api/trt_dynamic_shape_ernie_fp16_serialize_deserialize_test.cc similarity index 91% rename from paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_fp16_serialize_deserialize_test.cc rename to test/cpp/inference/api/trt_dynamic_shape_ernie_fp16_serialize_deserialize_test.cc index b0c4c13dbbc..806950ca8d6 100644 --- a/paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_fp16_serialize_deserialize_test.cc +++ b/test/cpp/inference/api/trt_dynamic_shape_ernie_fp16_serialize_deserialize_test.cc @@ -24,7 +24,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h" +#include "test/cpp/inference/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_serialize_deserialize_test.cc b/test/cpp/inference/api/trt_dynamic_shape_ernie_serialize_deserialize_test.cc similarity index 92% rename from paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_serialize_deserialize_test.cc rename to test/cpp/inference/api/trt_dynamic_shape_ernie_serialize_deserialize_test.cc index 56da226e273..bcf82d66f78 100644 --- a/paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_serialize_deserialize_test.cc +++ b/test/cpp/inference/api/trt_dynamic_shape_ernie_serialize_deserialize_test.cc @@ -24,7 +24,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h" +#include "test/cpp/inference/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h b/test/cpp/inference/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h similarity index 98% rename from paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h rename to test/cpp/inference/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h index 049d2be3f9e..09b20d23e97 100644 --- a/paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h +++ b/test/cpp/inference/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h @@ -26,7 +26,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_test.cc b/test/cpp/inference/api/trt_dynamic_shape_ernie_test.cc similarity index 99% rename from paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_test.cc rename to test/cpp/inference/api/trt_dynamic_shape_ernie_test.cc index ae40c249d53..8abf7224a13 100644 --- a/paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_test.cc +++ b/test/cpp/inference/api/trt_dynamic_shape_ernie_test.cc @@ -17,7 +17,7 @@ limitations under the License. */ #include "gflags/gflags.h" #include "paddle/fluid/inference/tensorrt/helper.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_dynamic_shape_test.cc b/test/cpp/inference/api/trt_dynamic_shape_test.cc similarity index 99% rename from paddle/fluid/inference/tests/api/trt_dynamic_shape_test.cc rename to test/cpp/inference/api/trt_dynamic_shape_test.cc index be548701819..505c0cdf083 100644 --- a/paddle/fluid/inference/tests/api/trt_dynamic_shape_test.cc +++ b/test/cpp/inference/api/trt_dynamic_shape_test.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_dynamic_shape_transformer_prune_test.cc b/test/cpp/inference/api/trt_dynamic_shape_transformer_prune_test.cc similarity index 98% rename from paddle/fluid/inference/tests/api/trt_dynamic_shape_transformer_prune_test.cc rename to test/cpp/inference/api/trt_dynamic_shape_transformer_prune_test.cc index 937303b595e..2e71da39b5f 100644 --- a/paddle/fluid/inference/tests/api/trt_dynamic_shape_transformer_prune_test.cc +++ b/test/cpp/inference/api/trt_dynamic_shape_transformer_prune_test.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_fc_prelu_test.cc b/test/cpp/inference/api/trt_fc_prelu_test.cc similarity index 97% rename from paddle/fluid/inference/tests/api/trt_fc_prelu_test.cc rename to test/cpp/inference/api/trt_fc_prelu_test.cc index 70c1eb8bab2..a1ef3340759 100644 --- a/paddle/fluid/inference/tests/api/trt_fc_prelu_test.cc +++ b/test/cpp/inference/api/trt_fc_prelu_test.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_instance_norm_converter_test.cc b/test/cpp/inference/api/trt_instance_norm_converter_test.cc similarity index 96% rename from paddle/fluid/inference/tests/api/trt_instance_norm_converter_test.cc rename to test/cpp/inference/api/trt_instance_norm_converter_test.cc index 27d4967c418..b58ddb2d919 100644 --- a/paddle/fluid/inference/tests/api/trt_instance_norm_converter_test.cc +++ b/test/cpp/inference/api/trt_instance_norm_converter_test.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_mobilenet_test.cc b/test/cpp/inference/api/trt_mobilenet_test.cc similarity index 98% rename from paddle/fluid/inference/tests/api/trt_mobilenet_test.cc rename to test/cpp/inference/api/trt_mobilenet_test.cc index 45c14f4fc8b..5c0519c067f 100644 --- a/paddle/fluid/inference/tests/api/trt_mobilenet_test.cc +++ b/test/cpp/inference/api/trt_mobilenet_test.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_quant_int8_test.cc b/test/cpp/inference/api/trt_quant_int8_test.cc similarity index 97% rename from paddle/fluid/inference/tests/api/trt_quant_int8_test.cc rename to test/cpp/inference/api/trt_quant_int8_test.cc index ada7c6b60af..f40b2197fb2 100644 --- a/paddle/fluid/inference/tests/api/trt_quant_int8_test.cc +++ b/test/cpp/inference/api/trt_quant_int8_test.cc @@ -18,7 +18,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_quant_int8_yolov3_r50_test.cc b/test/cpp/inference/api/trt_quant_int8_yolov3_r50_test.cc similarity index 97% rename from paddle/fluid/inference/tests/api/trt_quant_int8_yolov3_r50_test.cc rename to test/cpp/inference/api/trt_quant_int8_yolov3_r50_test.cc index cc3df63e076..ce058a1275c 100644 --- a/paddle/fluid/inference/tests/api/trt_quant_int8_yolov3_r50_test.cc +++ b/test/cpp/inference/api/trt_quant_int8_yolov3_r50_test.cc @@ -15,7 +15,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_resnet50_test.cc b/test/cpp/inference/api/trt_resnet50_test.cc similarity index 93% rename from paddle/fluid/inference/tests/api/trt_resnet50_test.cc rename to test/cpp/inference/api/trt_resnet50_test.cc index cdc6586f127..8dde6a0f5dd 100644 --- a/paddle/fluid/inference/tests/api/trt_resnet50_test.cc +++ b/test/cpp/inference/api/trt_resnet50_test.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_resnext_test.cc b/test/cpp/inference/api/trt_resnext_test.cc similarity index 94% rename from paddle/fluid/inference/tests/api/trt_resnext_test.cc rename to test/cpp/inference/api/trt_resnext_test.cc index 8d4e331fa97..a80058468d5 100644 --- a/paddle/fluid/inference/tests/api/trt_resnext_test.cc +++ b/test/cpp/inference/api/trt_resnext_test.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_split_converter_test.cc b/test/cpp/inference/api/trt_split_converter_test.cc similarity index 96% rename from paddle/fluid/inference/tests/api/trt_split_converter_test.cc rename to test/cpp/inference/api/trt_split_converter_test.cc index 19c6103bf7a..ca41ac5681e 100644 --- a/paddle/fluid/inference/tests/api/trt_split_converter_test.cc +++ b/test/cpp/inference/api/trt_split_converter_test.cc @@ -16,7 +16,7 @@ limitations under the License. */ #include #include "gflags/gflags.h" -#include "paddle/fluid/inference/tests/api/trt_test_helper.h" +#include "test/cpp/inference/api/trt_test_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/api/trt_test_helper.h b/test/cpp/inference/api/trt_test_helper.h similarity index 99% rename from paddle/fluid/inference/tests/api/trt_test_helper.h rename to test/cpp/inference/api/trt_test_helper.h index 96dfe7e8767..db446e64054 100644 --- a/paddle/fluid/inference/tests/api/trt_test_helper.h +++ b/test/cpp/inference/api/trt_test_helper.h @@ -20,7 +20,7 @@ limitations under the License. */ #include "gflags/gflags.h" #include "glog/logging.h" #include "gtest/gtest.h" -#include "paddle/fluid/inference/tests/api/tester_helper.h" +#include "test/cpp/inference/api/tester_helper.h" namespace paddle { namespace inference { diff --git a/paddle/fluid/inference/tests/infer_ut/CMakeLists.txt b/test/cpp/inference/infer_ut/CMakeLists.txt similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/CMakeLists.txt rename to test/cpp/inference/infer_ut/CMakeLists.txt diff --git a/paddle/fluid/inference/tests/infer_ut/README.md b/test/cpp/inference/infer_ut/README.md similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/README.md rename to test/cpp/inference/infer_ut/README.md diff --git a/paddle/fluid/inference/tests/infer_ut/external-cmake/gtest-cpp.cmake b/test/cpp/inference/infer_ut/external-cmake/gtest-cpp.cmake similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/external-cmake/gtest-cpp.cmake rename to test/cpp/inference/infer_ut/external-cmake/gtest-cpp.cmake diff --git a/paddle/fluid/inference/tests/infer_ut/run.sh b/test/cpp/inference/infer_ut/run.sh similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/run.sh rename to test/cpp/inference/infer_ut/run.sh diff --git a/paddle/fluid/inference/tests/infer_ut/test_LeViT.cc b/test/cpp/inference/infer_ut/test_LeViT.cc similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/test_LeViT.cc rename to test/cpp/inference/infer_ut/test_LeViT.cc diff --git a/paddle/fluid/inference/tests/infer_ut/test_det_mv3_db.cc b/test/cpp/inference/infer_ut/test_det_mv3_db.cc similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/test_det_mv3_db.cc rename to test/cpp/inference/infer_ut/test_det_mv3_db.cc diff --git a/paddle/fluid/inference/tests/infer_ut/test_ernie_text_cls.cc b/test/cpp/inference/infer_ut/test_ernie_text_cls.cc similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/test_ernie_text_cls.cc rename to test/cpp/inference/infer_ut/test_ernie_text_cls.cc diff --git a/paddle/fluid/inference/tests/infer_ut/test_ernie_xnli_int8.cc b/test/cpp/inference/infer_ut/test_ernie_xnli_int8.cc similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/test_ernie_xnli_int8.cc rename to test/cpp/inference/infer_ut/test_ernie_xnli_int8.cc diff --git a/paddle/fluid/inference/tests/infer_ut/test_helper.h b/test/cpp/inference/infer_ut/test_helper.h similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/test_helper.h rename to test/cpp/inference/infer_ut/test_helper.h diff --git a/paddle/fluid/inference/tests/infer_ut/test_mobilnetv1.cc b/test/cpp/inference/infer_ut/test_mobilnetv1.cc similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/test_mobilnetv1.cc rename to test/cpp/inference/infer_ut/test_mobilnetv1.cc diff --git a/paddle/fluid/inference/tests/infer_ut/test_ppyolo_mbv3.cc b/test/cpp/inference/infer_ut/test_ppyolo_mbv3.cc similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/test_ppyolo_mbv3.cc rename to test/cpp/inference/infer_ut/test_ppyolo_mbv3.cc diff --git a/paddle/fluid/inference/tests/infer_ut/test_ppyolov2_r50vd.cc b/test/cpp/inference/infer_ut/test_ppyolov2_r50vd.cc similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/test_ppyolov2_r50vd.cc rename to test/cpp/inference/infer_ut/test_ppyolov2_r50vd.cc diff --git a/paddle/fluid/inference/tests/infer_ut/test_resnet50.cc b/test/cpp/inference/infer_ut/test_resnet50.cc similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/test_resnet50.cc rename to test/cpp/inference/infer_ut/test_resnet50.cc diff --git a/paddle/fluid/inference/tests/infer_ut/test_resnet50_quant.cc b/test/cpp/inference/infer_ut/test_resnet50_quant.cc similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/test_resnet50_quant.cc rename to test/cpp/inference/infer_ut/test_resnet50_quant.cc diff --git a/paddle/fluid/inference/tests/infer_ut/test_suite.h b/test/cpp/inference/infer_ut/test_suite.h similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/test_suite.h rename to test/cpp/inference/infer_ut/test_suite.h diff --git a/paddle/fluid/inference/tests/infer_ut/test_yolov3.cc b/test/cpp/inference/infer_ut/test_yolov3.cc similarity index 100% rename from paddle/fluid/inference/tests/infer_ut/test_yolov3.cc rename to test/cpp/inference/infer_ut/test_yolov3.cc diff --git a/paddle/fluid/inference/tests/test.cmake b/test/cpp/inference/test.cmake similarity index 100% rename from paddle/fluid/inference/tests/test.cmake rename to test/cpp/inference/test.cmake diff --git a/paddle/fluid/inference/tests/test_helper.h b/test/cpp/inference/test_helper.h similarity index 100% rename from paddle/fluid/inference/tests/test_helper.h rename to test/cpp/inference/test_helper.h -- GitLab