未验证 提交 008debe7 编写于 作者: T tianshuo78520a 提交者: GitHub

Mv inference ut (#52987)

* mv inference/api infer_ut

* mv test

* merge develop fix error

* fix

* fix build error

* fix build error

* fix bug

* fix tester_helper.h

* fix analyzer_transformer_profile_tester.cc

* fix

* fix mac

* fix mac

* fix error

* fix

* fix
上级 4efca9fb
...@@ -13,10 +13,6 @@ ...@@ -13,10 +13,6 @@
# limitations under the License. # limitations under the License.
# #
if(WITH_TESTING)
include(tests/test.cmake) # some generic cmake function for inference
endif()
cc_library( cc_library(
paddle_inference_io paddle_inference_io
SRCS io.cc SRCS io.cc
...@@ -91,10 +87,6 @@ endif() ...@@ -91,10 +87,6 @@ endif()
# C inference API # C inference API
add_subdirectory(capi_exp) add_subdirectory(capi_exp)
if(WITH_TESTING AND WITH_INFERENCE_API_TEST)
add_subdirectory(tests/api)
endif()
set(SHARED_INFERENCE_SRCS set(SHARED_INFERENCE_SRCS
io.cc io.cc
${CMAKE_CURRENT_SOURCE_DIR}/../framework/data_feed.cc ${CMAKE_CURRENT_SOURCE_DIR}/../framework/data_feed.cc
......
...@@ -36,84 +36,3 @@ cc_library( ...@@ -36,84 +36,3 @@ cc_library(
analysis analysis
SRCS analyzer.cc SRCS analyzer.cc
DEPS ${analysis_deps} analysis_helper analysis_pass ${INFER_IR_PASSES}) DEPS ${analysis_deps} analysis_helper analysis_pass ${INFER_IR_PASSES})
function(inference_analysis_test_build TARGET)
if(WITH_TESTING)
set(options "")
set(oneValueArgs "")
set(multiValueArgs SRCS EXTRA_DEPS)
cmake_parse_arguments(analysis_test "${options}" "${oneValueArgs}"
"${multiValueArgs}" ${ARGN})
inference_base_test_build(
${TARGET}
SRCS
${analysis_test_SRCS}
DEPS
${analysis_test_EXTRA_DEPS}
analysis
pass
${GLOB_PASS_LIB})
endif()
endfunction()
function(inference_analysis_test_run TARGET)
if(WITH_TESTING)
set(options "")
set(oneValueArgs "")
set(multiValueArgs COMMAND ARGS)
cmake_parse_arguments(analysis_test "${options}" "${oneValueArgs}"
"${multiValueArgs}" ${ARGN})
inference_base_test_run(${TARGET} COMMAND ${analysis_test_COMMAND} ARGS
${analysis_test_ARGS})
set_tests_properties(${TARGET} PROPERTIES LABELS "RUN_TYPE=INFER")
endif()
endfunction()
function(inference_analysis_test TARGET)
if(WITH_TESTING)
set(options "")
set(oneValueArgs "")
set(multiValueArgs SRCS ARGS EXTRA_DEPS)
cmake_parse_arguments(analysis_test "${options}" "${oneValueArgs}"
"${multiValueArgs}" ${ARGN})
inference_base_test_build(
${TARGET}
SRCS
${analysis_test_SRCS}
DEPS
${analysis_test_EXTRA_DEPS}
analysis
pass
${GLOB_PASS_LIB})
inference_base_test_run(${TARGET} COMMAND ${TARGET} ARGS
${analysis_test_ARGS})
set_tests_properties(${TARGET} PROPERTIES LABELS "RUN_TYPE=INFER")
endif()
endfunction()
if(NOT APPLE AND NOT WIN32)
inference_analysis_test(
test_analyzer
SRCS
analyzer_tester.cc
EXTRA_DEPS
reset_tensor_array
paddle_inference_shared
ARGS
--inference_model_dir=${WORD2VEC_MODEL_DIR})
elseif(WIN32)
inference_analysis_test(
test_analyzer
SRCS
analyzer_tester.cc
EXTRA_DEPS
reset_tensor_array
paddle_inference_api
ARGS
--inference_model_dir=${WORD2VEC_MODEL_DIR})
if(WITH_ONNXRUNTIME AND WIN32)
# Copy onnxruntime for some c++ test in Windows, since the test will
# be build only in CI, so suppose the generator in Windows is Ninja.
copy_onnx(test_analyzer)
endif()
endif()
...@@ -97,85 +97,8 @@ else() ...@@ -97,85 +97,8 @@ else()
infer_io_utils model_utils) infer_io_utils model_utils)
endif() endif()
cc_test(
test_paddle_inference_api
SRCS api_tester.cc
DEPS paddle_inference_api)
cc_test(
inference_api_helper_test
SRCS helper_test.cc
DEPS paddle_inference_api)
if(WITH_ONNXRUNTIME AND WIN32) if(WITH_ONNXRUNTIME AND WIN32)
# Copy onnxruntime for some c++ test in Windows, since the test will # Copy onnxruntime for some c++ test in Windows, since the test will
# be build only in CI, so suppose the generator in Windows is Ninja. # be build only in CI, so suppose the generator in Windows is Ninja.
copy_onnx(test_paddle_inference_api) copy_onnx(test_paddle_inference_api)
endif() endif()
if(WITH_TESTING)
if(NOT APPLE AND NOT WIN32)
inference_base_test(
test_api_impl
SRCS
api_impl_tester.cc
DEPS
paddle_inference_shared
ARGS
--word2vec_dirname=${WORD2VEC_MODEL_DIR}
--book_dirname=${IMG_CLS_RESNET_INSTALL_DIR})
elseif(WIN32)
inference_base_test(
test_api_impl
SRCS
api_impl_tester.cc
DEPS
${inference_deps}
ARGS
--word2vec_dirname=${WORD2VEC_MODEL_DIR}
--book_dirname=${IMG_CLS_RESNET_INSTALL_DIR})
endif()
endif()
if(NOT APPLE AND NOT WIN32)
cc_test_old(
test_analysis_predictor
SRCS
analysis_predictor_tester.cc
DEPS
paddle_inference_shared
ARGS
--dirname=${WORD2VEC_MODEL_DIR})
elseif(WIN32)
cc_test_old(
test_analysis_predictor
SRCS
analysis_predictor_tester.cc
DEPS
analysis_predictor
benchmark
${inference_deps}
ARGS
--dirname=${WORD2VEC_MODEL_DIR})
endif()
if(WITH_TESTING AND WITH_MKLDNN)
if(NOT APPLE AND NOT WIN32)
cc_test(
test_mkldnn_quantizer
SRCS mkldnn_quantizer_tester.cc
DEPS paddle_inference_shared ARGS --dirname=${WORD2VEC_MODEL_DIR})
elseif(WIN32)
cc_test(
test_mkldnn_quantizer
SRCS mkldnn_quantizer_tester.cc
DEPS analysis_predictor benchmark ${inference_deps} ARGS
--dirname=${WORD2VEC_MODEL_DIR})
endif()
endif()
if(WITH_TESTING AND TEST test_api_impl)
if(NOT APPLE)
set_tests_properties(test_api_impl PROPERTIES TIMEOUT 120)
endif()
endif()
...@@ -25,9 +25,9 @@ ...@@ -25,9 +25,9 @@
#include "paddle/fluid/inference/api/onnxruntime_predictor.h" #include "paddle/fluid/inference/api/onnxruntime_predictor.h"
#include "paddle/fluid/inference/api/paddle_api.h" #include "paddle/fluid/inference/api/paddle_api.h"
#include "paddle/fluid/inference/api/paddle_inference_api.h" #include "paddle/fluid/inference/api/paddle_inference_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h"
#include "paddle/fluid/inference/utils/io_utils.h" #include "paddle/fluid/inference/utils/io_utils.h"
#include "paddle/phi/backends/cpu/cpu_info.h" #include "paddle/phi/backends/cpu/cpu_info.h"
#include "test/cpp/inference/api/tester_helper.h"
DEFINE_string(dirname, "", "dirname to tests."); DEFINE_string(dirname, "", "dirname to tests.");
......
# In Windows, c_api test link must link both 2 shared to avoid symbols redefinition,
# in Linux, c_api test cant do like this or graph_to_program register more than once.
# Both Windows and Linux can only use paddle_inference_c, but this will increase size
# of build folder by 30G.
if(WIN32)
set(INFERENCE_C_EXTRA_DEPS paddle_inference_shared paddle_inference_c_shared)
else()
set(INFERENCE_C_EXTRA_DEPS paddle_inference_shared paddle_inference_c)
endif()
function(download_data install_dir data_file check_sum)
string(REGEX MATCH "[^/\\]+$" file_name ${data_file})
if(NOT EXISTS ${install_dir}/${file_name})
inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}
${data_file} ${check_sum})
endif()
endfunction()
function(download_data_without_verify install_dir data_file)
string(REGEX MATCH "[^/\\]+$" file_name ${data_file})
if(NOT EXISTS ${install_dir}/${file_name})
inference_download_and_uncompress_without_verify(
${install_dir} ${INFERENCE_URL} ${data_file})
endif()
endfunction()
function(download_int8_data install_dir data_file check_sum)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}/int8
${data_file} ${check_sum})
endif()
endfunction()
function(download_int8_data_without_verify install_dir data_file)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress_without_verify(
${install_dir} ${INFERENCE_URL}/int8 ${data_file})
endif()
endfunction()
function(download_bfloat16_data install_dir data_file check_sum)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}/bfloat16
${data_file} ${check_sum})
endif()
endfunction()
function(download_bfloat16_data_without_verify install_dir data_file)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress_without_verify(
${install_dir} ${INFERENCE_URL}/bfloat16 ${data_file})
endif()
endfunction()
function(download_GRU_data install_dir data_file check_sum)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}/gru
${data_file} ${check_sum})
endif()
endfunction()
function(download_GRU_data_without_verify install_dir data_file)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress_without_verify(
${install_dir} ${INFERENCE_URL}/gru ${data_file})
endif()
endfunction()
function(download_quant_data install_dir data_file check_sum)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress(
${install_dir} ${INFERENCE_URL}/int8/QAT_models ${data_file} ${check_sum})
endif()
endfunction()
function(download_quant_data_without_verify install_dir data_file)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress_without_verify(
${install_dir} ${INFERENCE_URL}/int8/QAT_models ${data_file})
endif()
endfunction()
function(download_model_and_data install_dir model_name model_check_sum
data_name data_check_sum)
download_data(${install_dir} ${model_name} ${model_check_sum})
download_data(${install_dir} ${data_name} ${data_check_sum})
endfunction()
function(download_model_and_data_without_verify install_dir model_name
data_name)
download_data_without_verify(${install_dir} ${model_name})
download_data_without_verify(${install_dir} ${data_name})
endfunction()
function(download_result install_dir result_name check_sum)
download_data(${install_dir} ${result_name} ${check_sum})
endfunction()
function(download_result_without_verify install_dir result_name)
download_data_without_verify(${install_dir} ${result_name})
endfunction()
function(inference_analysis_api_test target install_dir filename)
inference_analysis_test(
${target}
SRCS
${filename}
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${install_dir}/model
--infer_data=${install_dir}/data.txt
--refer_result=${install_dir}/result.txt)
endfunction()
function(inference_analysis_api_int8_test target install_dir filename)
inference_analysis_test(
${target}
SRCS
${filename}
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${install_dir}/model
--infer_data=${install_dir}/data.txt
--refer_result=${install_dir}/result.txt
--accuracy=0.8
--batch_size=5
--enable_int8_ptq=true)
endfunction()
function(inference_multiple_models_analysis_api_test target install_dir
filename)
inference_analysis_test(
${target}
SRCS
${filename}
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${install_dir}/mobilenet_v2_models/1
--infer_model2=${install_dir}/mobilenet_v2_models/xx
--infer_model3=${install_dir}/mobilenet_v2_models/3)
endfunction()
function(inference_analysis_api_test_build TARGET_NAME filename)
inference_analysis_test_build(${TARGET_NAME} SRCS ${filename} EXTRA_DEPS
paddle_inference_shared)
endfunction()
function(inference_analysis_api_int8_test_run TARGET_NAME test_binary model_dir
data_path)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--infer_model=${model_dir}/model
--infer_data=${data_path}
--warmup_batch_size=${WARMUP_BATCH_SIZE}
--batch_size=50
--enable_int8_ptq=true
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--iterations=2)
endfunction()
function(inference_analysis_api_int8_test_run_custom_warmup_batch_size
TARGET_NAME test_binary model_dir data_path warmup_batch_size)
set(WARMUP_BATCH_SIZE ${warmup_batch_size})
inference_analysis_api_int8_test_run(${TARGET_NAME} ${test_binary}
${model_dir} ${data_path})
endfunction()
function(inference_analysis_api_bfloat16_test_run TARGET_NAME test_binary
model_dir data_path)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--infer_model=${model_dir}/model
--infer_data=${data_path}
--batch_size=50
--enable_bf16=true
--paddle_num_threads=${CPU_NUM_THREADS_ON_CI}
--iterations=2)
endfunction()
function(inference_analysis_api_object_dection_int8_test_run TARGET_NAME
test_binary model_dir data_path)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--infer_model=${model_dir}/model
--infer_data=${data_path}
--warmup_batch_size=10
--batch_size=300
--enable_int8_ptq=true
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--iterations=1)
endfunction()
function(inference_analysis_api_test_with_fake_data_build TARGET_NAME filename)
inference_analysis_test_build(${TARGET_NAME} SRCS ${filename} EXTRA_DEPS
paddle_inference_shared)
endfunction()
function(inference_analysis_api_test_with_fake_data_run TARGET_NAME test_binary
model_dir disable_fc)
inference_analysis_test_run(
${TARGET_NAME} COMMAND ${test_binary} ARGS --infer_model=${model_dir}/model
--disable_mkldnn_fc=${disable_fc})
endfunction()
function(
inference_analysis_api_quant_test_run
TARGET_NAME
test_binary
fp32_model_dir
int8_model_dir
data_path
enable_int8_qat)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--fp32_model=${fp32_model_dir}
--int8_model=${int8_model_dir}
--infer_data=${data_path}
--batch_size=50
--enable_int8_qat=${enable_int8_qat}
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--with_accuracy_layer=false
--iterations=2)
endfunction()
function(inference_analysis_api_lexical_test_run TARGET_NAME test_binary
infer_model data_path)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--infer_model=${infer_model}
--infer_data=${data_path}
--batch_size=50
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--with_accuracy_layer=true
--use_analysis=true
--iterations=2)
endfunction()
function(inference_analysis_api_lexical_bfloat16_test_run TARGET_NAME
test_binary infer_model data_path)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--infer_model=${infer_model}
--infer_data=${data_path}
--batch_size=50
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--with_accuracy_layer=true
--use_analysis=true
--enable_bf16=true
--iterations=2)
endfunction()
function(
inference_analysis_api_lexical_int8_test_run
TARGET_NAME
test_binary
infer_model
data_path
enable_int8_ptq
enable_int8_qat
fuse_multi_gru)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--infer_model=${infer_model}
--infer_data=${data_path}
--batch_size=100
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--with_accuracy_layer=true
--use_analysis=true
--enable_int8_ptq=${enable_int8_ptq}
--enable_int8_qat=${enable_int8_qat}
--quantized_accuracy=0.015
--fuse_multi_gru=${fuse_multi_gru}
--iterations=4)
endfunction()
function(preprocess_data2bin_test_run target py_script_source data_dir
output_file)
py_test(${target}
SRCS ${CMAKE_CURRENT_SOURCE_DIR}/${py_script_source} ARGS
--data_dir=${data_dir} --output_file=${output_file} --local)
endfunction()
if(NOT APPLE AND WITH_MKLML)
# RNN1
set(RNN1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/rnn1")
download_model_and_data_without_verify(
${RNN1_INSTALL_DIR} "rnn1/model.tar.gz" "rnn1/data.txt.tar.gz")
inference_analysis_api_test(test_analyzer_rnn1 ${RNN1_INSTALL_DIR}
analyzer_rnn1_tester.cc)
# seq_pool1
set(SEQ_POOL1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/seq_pool")
download_model_and_data_without_verify(
${SEQ_POOL1_INSTALL_DIR} "seq_pool1_model_.tar.gz"
"seq_pool1_data.txt.tar.gz")
inference_analysis_api_test(
test_analyzer_seq_pool1_compare_determine ${SEQ_POOL1_INSTALL_DIR}
analyzer_seq_pool1_compare_determine_tester.cc)
inference_analysis_api_test(test_analyzer_seq_pool1 ${SEQ_POOL1_INSTALL_DIR}
analyzer_seq_pool1_compare_tester.cc)
inference_analysis_api_test(
test_analyzer_seq_pool1_fuse_compare_zero_copy ${SEQ_POOL1_INSTALL_DIR}
analyzer_seq_pool1_fuse_compare_zero_copy_tester.cc)
inference_analysis_api_test(
test_analyzer_seq_pool1_fuse_statis ${SEQ_POOL1_INSTALL_DIR}
analyzer_seq_pool1_fuse_statis_tester.cc)
inference_analysis_api_test(
test_analyzer_seq_pool1_profile ${SEQ_POOL1_INSTALL_DIR}
analyzer_seq_pool1_profile_tester.cc)
if(NOT WIN32)
set_tests_properties(test_analyzer_seq_pool1_compare_determine
PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_seq_pool1 PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_seq_pool1_fuse_compare_zero_copy
PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_seq_pool1_fuse_statis PROPERTIES TIMEOUT
120)
set_tests_properties(test_analyzer_seq_pool1_profile PROPERTIES TIMEOUT 120)
endif()
else()
# TODO: fix this test on MACOS and OPENBLAS, the reason is that
# fusion_seqexpand_concat_fc_op is not supported on MACOS and OPENBLAS
message(
WARNING
"These tests has been disabled in OSX or WITH_MKL=OFF before being fixed: \n test_analyzer_rnn1"
)
message(
WARNING
"These tests has been disabled in OSX or WITH_MKL=OFF before being fixed: \n test_analyzer_seq_pool1"
)
endif()
# RNN2
set(RNN2_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/rnn2")
download_model_and_data_without_verify(${RNN2_INSTALL_DIR} "rnn2_model.tar.gz"
"rnn2_data.txt.tar.gz")
inference_analysis_api_test(test_analyzer_rnn2 ${RNN2_INSTALL_DIR}
analyzer_rnn2_tester.cc)
# TODO(luotao, Superjom) Disable DAM test, temporarily fix
# https://github.com/PaddlePaddle/Paddle/issues/15032#issuecomment-455990914.
# After inference framework refactor, will reopen it.
# normal DAM
set(DAM_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/dam")
download_model_and_data_without_verify(${DAM_INSTALL_DIR} "DAM_model.tar.gz"
"DAM_data.txt.tar.gz")
#inference_analysis_api_test(test_analyzer_dam ${DAM_INSTALL_DIR} analyzer_dam_tester.cc EXTRA_DEPS legacy_allocator)
# small DAM
set(DAM_SMALL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/small_dam")
download_model_and_data_without_verify(
${DAM_SMALL_INSTALL_DIR} "dam_small_model.tar.gz" "dam_small_data.txt.tar.gz")
inference_analysis_test(
test_analyzer_small_dam
SRCS
analyzer_dam_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${DAM_SMALL_INSTALL_DIR}/model
--infer_data=${DAM_SMALL_INSTALL_DIR}/data.txt)
#save model
inference_analysis_api_test(test_analyzer_save_model ${DAM_SMALL_INSTALL_DIR}
analyzer_save_model_tester.cc)
# chinese_ner
set(CHINESE_NER_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/chinese_ner")
download_model_and_data_without_verify(
${CHINESE_NER_INSTALL_DIR} "chinese_ner_model.tar.gz"
"chinese_ner-data.txt.tar.gz")
inference_analysis_api_test(test_analyzer_ner ${CHINESE_NER_INSTALL_DIR}
analyzer_ner_tester.cc)
# lac
set(LAC_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/lac")
download_model_and_data(
${LAC_INSTALL_DIR} "lac_model.tar.gz" 419ca6eb85f57a01bfe173591910aec5
"lac_data.txt.tar.gz" 9983539cd6b34fbdc411e43422776bfd)
inference_analysis_api_test(test_analyzer_lac ${LAC_INSTALL_DIR}
analyzer_lac_tester.cc)
# Pyramid DNN
set(PYRAMID_DNN_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/pyramid_dnn")
download_model_and_data_without_verify(
${PYRAMID_DNN_INSTALL_DIR} "PyramidDNN_model.tar.gz"
"PyramidDNN_data.txt.tar.gz")
inference_analysis_api_test(
test_analyzer_pyramid_dnn ${PYRAMID_DNN_INSTALL_DIR}
analyzer_pyramid_dnn_tester.cc)
# Ernie
set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie")
download_model_and_data(
${ERNIE_INSTALL_DIR} "Ernie_model.tar.gz" aa59192dd41ed377f9f168e3a1309fa6
"Ernie_data.txt.tar.gz" 5396e63548edad7ca561e7e26a9476d1)
download_result(${ERNIE_INSTALL_DIR} "Ernie_result.txt.tar.gz"
73beea65abda2edb61c1662cd3180c62)
if(WITH_GPU)
inference_analysis_api_test(test_analyzer_ernie ${ERNIE_INSTALL_DIR}
analyzer_ernie_tester.cc)
inference_analysis_api_test(gpu_ernie_half_test ${ERNIE_INSTALL_DIR}
gpu_ernie_half_test.cc)
set_tests_properties(gpu_ernie_half_test PROPERTIES TIMEOUT 60)
endif()
inference_analysis_api_int8_test(test_analyzer_ernie_int8 ${ERNIE_INSTALL_DIR}
analyzer_ernie_int8_tester.cc)
# Ernie large
set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie_Large")
download_model_and_data(
${ERNIE_INSTALL_DIR} "Ernie_large_model.tar.gz"
af7715245ed32cc77374625d4c80f7ef "Ernie_large_data.txt.tar.gz"
edb2113eec93783cad56ed76d47ba57f)
download_result(${ERNIE_INSTALL_DIR} "Ernie_large_result.txt.tar.gz"
1facda98eef1085dc9d435ebf3f23a73)
inference_analysis_test(
test_analyzer_ernie_large
SRCS
analyzer_ernie_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${ERNIE_INSTALL_DIR}/model
--infer_data=${ERNIE_INSTALL_DIR}/data.txt
--refer_result=${ERNIE_INSTALL_DIR}/result.txt
--ernie_large=true)
if(NOT WIN32
AND NOT APPLE
AND TEST test_analyzer_ernie_large)
set_tests_properties(test_analyzer_ernie_large PROPERTIES TIMEOUT 150 LABELS
"RUN_TYPE=NIGHTLY")
endif()
if(WIN32 AND TEST test_analyzer_ernie_large)
set_tests_properties(test_analyzer_ernie_large PROPERTIES TIMEOUT 200)
endif()
# text_classification
set(TEXT_CLASSIFICATION_INSTALL_DIR
"${INFERENCE_DEMO_INSTALL_DIR}/text_classification")
download_model_and_data(
${TEXT_CLASSIFICATION_INSTALL_DIR} "text-classification-Senta.tar.gz"
3f0f440313ca50e26184e65ffd5809ab "text_classification_data.txt.tar.gz"
36ae620020cc3377f45ed330dd36238f)
inference_analysis_api_test(
test_analyzer_text_classification ${TEXT_CLASSIFICATION_INSTALL_DIR}
analyzer_text_classification_tester.cc)
# seq_conv1
set(SEQ_CONV1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/seq_conv1")
download_model_and_data_without_verify(
${SEQ_CONV1_INSTALL_DIR} "seq_conv1_model.tar.gz" "seq_conv1_data.txt.tar.gz")
inference_analysis_api_test(test_analyzer_seq_conv1 ${SEQ_CONV1_INSTALL_DIR}
analyzer_seq_conv1_tester.cc)
# transformer, the dataset only works on batch_size=8 now
set(TRANSFORMER_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/transformer")
download_model_and_data_without_verify(
${TRANSFORMER_INSTALL_DIR} "temp/transformer_model.tar.gz"
"temp/transformer_data.txt.tar.gz")
inference_analysis_test(
test_analyzer_transformer
SRCS
analyzer_transformer_compare_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRANSFORMER_INSTALL_DIR}/model
--infer_data=${TRANSFORMER_INSTALL_DIR}/data.txt
--batch_size=8
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI})
inference_analysis_test(
test_analyzer_transformer_fuse
SRCS
analyzer_transformer_fuse_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRANSFORMER_INSTALL_DIR}/model
--infer_data=${TRANSFORMER_INSTALL_DIR}/data.txt
--batch_size=8
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI})
inference_analysis_test(
test_analyzer_transformer_profile
SRCS
analyzer_transformer_profile_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRANSFORMER_INSTALL_DIR}/model
--infer_data=${TRANSFORMER_INSTALL_DIR}/data.txt
--batch_size=8
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI})
# VIT-OCR
set(VIT_OCR_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/vit")
if(NOT EXISTS ${VIT_OCR_INSTALL_DIR}/vit_ocr.tgz)
inference_download_and_uncompress_without_verify(
${VIT_OCR_INSTALL_DIR} ${INFERENCE_URL} "ocr/vit_ocr.tgz")
endif()
inference_analysis_test(
test_analyzer_vit_ocr
SRCS
analyzer_vit_ocr_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${VIT_OCR_INSTALL_DIR}/vit_ocr/model
--infer_data=${VIT_OCR_INSTALL_DIR}/vit_ocr/datavit.txt)
# ocr
set(OCR_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/ocr")
if(NOT EXISTS ${OCR_INSTALL_DIR}/ocr.tar.gz)
inference_download_and_uncompress_without_verify(
${OCR_INSTALL_DIR} "http://paddlemodels.bj.bcebos.com/"
"inference-vis-demos/ocr.tar.gz")
endif()
inference_analysis_api_test(test_analyzer_ocr ${OCR_INSTALL_DIR}
analyzer_vis_tester.cc)
# densebox
set(DENSEBOX_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/densebox")
download_data_without_verify(${DENSEBOX_INSTALL_DIR} "densebox.tar.gz")
inference_analysis_test(
test_analyzer_detect_functional_mkldnn
SRCS
analyzer_detect_functional_mkldnn_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${DENSEBOX_INSTALL_DIR}/model
--infer_data=${DENSEBOX_INSTALL_DIR}/detect_input_50.txt
--infer_shape=${DENSEBOX_INSTALL_DIR}/shape_50.txt)
# mobilenet with transpose op
set(MOBILENET_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet")
if(NOT EXISTS ${MOBILENET_INSTALL_DIR}/mobilenet.tar.gz)
inference_download_and_uncompress_without_verify(
${MOBILENET_INSTALL_DIR} "http://paddlemodels.bj.bcebos.com/"
"inference-vis-demos/mobilenet.tar.gz")
endif()
inference_analysis_api_test(test_analyzer_mobilenet_transpose
${MOBILENET_INSTALL_DIR} analyzer_vis_tester.cc)
### Image classification tests with fake data
set(IMG_CLASS_TEST_APP "test_analyzer_image_classification")
set(IMG_CLASS_TEST_APP_SRC "analyzer_image_classification_tester.cc")
# build test binary to be used in subsequent tests
inference_analysis_api_test_with_fake_data_build(${IMG_CLASS_TEST_APP}
${IMG_CLASS_TEST_APP_SRC})
# googlenet
set(GOOGLENET_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/googlenet")
download_data_without_verify(${GOOGLENET_MODEL_DIR} "googlenet.tar.gz")
inference_analysis_api_test_with_fake_data_run(
test_analyzer_googlenet ${IMG_CLASS_TEST_APP} ${GOOGLENET_MODEL_DIR} false)
# resnet50
set(RESNET50_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/resnet50")
download_data_without_verify(${RESNET50_MODEL_DIR} "resnet50_model.tar.gz")
inference_analysis_api_test_with_fake_data_run(
test_analyzer_resnet50 ${IMG_CLASS_TEST_APP} ${RESNET50_MODEL_DIR} true)
if(WIN32)
set_tests_properties(test_analyzer_resnet50 PROPERTIES TIMEOUT 200)
endif()
# mobilenet with depthwise_conv op
set(MOBILENET_MODEL_DIR
"${INFERENCE_DEMO_INSTALL_DIR}/mobilenet_depthwise_conv")
download_data_without_verify(${MOBILENET_MODEL_DIR} "mobilenet_model.tar.gz")
inference_analysis_api_test_with_fake_data_run(
test_analyzer_mobilenet_depthwise_conv ${IMG_CLASS_TEST_APP}
${MOBILENET_MODEL_DIR} false)
if(WITH_MKLDNN)
### INT8 tests
set(INT8_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/int8v2")
## Image classification models
# ImageNet small dataset
# It may be already downloaded for Quant & INT8 unit tests
set(IMAGENET_DATA_ARCHIVE "imagenet_val_100_tail.tar.gz")
set(IMAGENET_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/imagenet")
set(IMAGENET_DATA_PATH "${IMAGENET_DATA_DIR}/data.bin")
download_int8_data_without_verify(${IMAGENET_DATA_DIR}
${IMAGENET_DATA_ARCHIVE})
# build test binary to be used in subsequent tests
set(INT8_IMG_CLASS_TEST_APP "test_analyzer_int8_image_classification")
set(INT8_IMG_CLASS_TEST_APP_SRC
"analyzer_int8_image_classification_tester.cc")
inference_analysis_api_test_build(${INT8_IMG_CLASS_TEST_APP}
${INT8_IMG_CLASS_TEST_APP_SRC})
# resnet50 int8
set(INT8_RESNET50_MODEL_DIR "${INT8_DATA_DIR}/resnet50")
download_int8_data_without_verify(${INT8_RESNET50_MODEL_DIR}
"resnet50_int8_model.tar.gz")
inference_analysis_api_int8_test_run(
test_analyzer_int8_resnet50 ${INT8_IMG_CLASS_TEST_APP}
${INT8_RESNET50_MODEL_DIR} ${IMAGENET_DATA_PATH})
# mobilenetv1 int8
set(INT8_MOBILENETV1_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv1")
download_int8_data_without_verify(${INT8_MOBILENETV1_MODEL_DIR}
"mobilenetv1_int8_model.tar.gz")
inference_analysis_api_int8_test_run(
test_analyzer_int8_mobilenetv1 ${INT8_IMG_CLASS_TEST_APP}
${INT8_MOBILENETV1_MODEL_DIR} ${IMAGENET_DATA_PATH})
# mobilenetv2 int8
set(INT8_MOBILENETV2_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv2")
download_int8_data_without_verify(${INT8_MOBILENETV2_MODEL_DIR}
"mobilenet_v2_int8_model.tar.gz")
inference_analysis_api_int8_test_run(
test_analyzer_int8_mobilenetv2 ${INT8_IMG_CLASS_TEST_APP}
${INT8_MOBILENETV2_MODEL_DIR} ${IMAGENET_DATA_PATH})
# resnet101 int8
set(INT8_RESNET101_MODEL_DIR "${INT8_DATA_DIR}/resnet101")
download_int8_data_without_verify(${INT8_RESNET101_MODEL_DIR}
"Res101_int8_model.tar.gz")
# inference_analysis_api_int8_test_run(test_analyzer_int8_resnet101 ${INT8_IMG_CLASS_TEST_APP} ${INT8_RESNET101_MODEL_DIR} ${IMAGENET_DATA_PATH})
# vgg16 int8
set(INT8_VGG16_MODEL_DIR "${INT8_DATA_DIR}/vgg16")
download_int8_data_without_verify(${INT8_VGG16_MODEL_DIR}
"VGG16_int8_model.tar.gz")
# inference_analysis_api_int8_test_run(test_analyzer_int8_vgg16 ${INT8_IMG_CLASS_TEST_APP} ${INT8_VGG16_MODEL_DIR} ${IMAGENET_DATA_PATH})
# vgg19 int8
set(INT8_VGG19_MODEL_DIR "${INT8_DATA_DIR}/vgg19")
download_int8_data_without_verify(${INT8_VGG19_MODEL_DIR}
"VGG19_int8_model.tar.gz")
# inference_analysis_api_int8_test_run(test_analyzer_int8_vgg19 ${INT8_IMG_CLASS_TEST_APP} ${INT8_VGG19_MODEL_DIR} ${IMAGENET_DATA_PATH})
# googlenet int8
set(INT8_GOOGLENET_MODEL_DIR "${INT8_DATA_DIR}/googlenet")
download_int8_data_without_verify(${INT8_GOOGLENET_MODEL_DIR}
"GoogleNet_int8_model.tar.gz")
inference_analysis_api_int8_test_run_custom_warmup_batch_size(
test_analyzer_int8_googlenet ${INT8_IMG_CLASS_TEST_APP}
${INT8_GOOGLENET_MODEL_DIR} ${IMAGENET_DATA_PATH} 10)
# mobilenetv3_large_x1_0 int8
set(INT8_MOBILENETV3_LARGE_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv3_large")
set(INT8_MOBILENETV3_FILE_NAME "MobileNetV3_large_x1_0_infer.tar")
if(NOT EXISTS
${INT8_MOBILENETV3_LARGE_MODEL_DIR}/${INT8_MOBILENETV3_FILE_NAME})
inference_download_and_uncompress_without_verify(
${INT8_MOBILENETV3_LARGE_MODEL_DIR}
"https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/"
${INT8_MOBILENETV3_FILE_NAME})
endif()
inference_analysis_test_run(
test_analyzer_int8_mobilenetv3_large
COMMAND
${INT8_IMG_CLASS_TEST_APP}
ARGS
--infer_model=${INT8_MOBILENETV3_LARGE_MODEL_DIR}/MobileNetV3_large_x1_0_infer
--infer_data=${IMAGENET_DATA_PATH}
--warmup_batch_size=50
--batch_size=1
--enable_int8_ptq=true
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--iterations=100
--with_accuracy_layer=false)
### BFLOAT16 tests
# build test binary to be used in subsequent tests
set(BF16_IMG_CLASS_TEST_APP "test_analyzer_bfloat16_image_classification")
set(BF16_IMG_CLASS_TEST_APP_SRC
"analyzer_bfloat16_image_classification_tester.cc")
inference_analysis_api_test_build(${BF16_IMG_CLASS_TEST_APP}
${BF16_IMG_CLASS_TEST_APP_SRC})
# resnet50 bfloat16
inference_analysis_api_bfloat16_test_run(
test_analyzer_bfloat16_resnet50 ${BF16_IMG_CLASS_TEST_APP}
${INT8_RESNET50_MODEL_DIR} ${IMAGENET_DATA_PATH})
# googlenet bfloat16
inference_analysis_api_bfloat16_test_run(
test_analyzer_bfloat16_googlenet ${BF16_IMG_CLASS_TEST_APP}
${INT8_GOOGLENET_MODEL_DIR} ${IMAGENET_DATA_PATH})
# mobilenetv1 bfloat16
inference_analysis_api_bfloat16_test_run(
test_analyzer_bfloat16_mobilenetv1 ${BF16_IMG_CLASS_TEST_APP}
${INT8_MOBILENETV1_MODEL_DIR} ${IMAGENET_DATA_PATH})
# mobilenetv2 bfloat16
inference_analysis_api_bfloat16_test_run(
test_analyzer_bfloat16_mobilenetv2 ${BF16_IMG_CLASS_TEST_APP}
${INT8_MOBILENETV2_MODEL_DIR} ${IMAGENET_DATA_PATH})
# mobilenetv3_large
inference_analysis_test_run(
test_analyzer_bfloat16_mobilenetv3_large
COMMAND
${BF16_IMG_CLASS_TEST_APP}
ARGS
--infer_model=${INT8_MOBILENETV3_LARGE_MODEL_DIR}/MobileNetV3_large_x1_0_infer
--infer_data=${IMAGENET_DATA_PATH}
--batch_size=1
--enable_bf16=true
--paddle_num_threads=${CPU_NUM_THREADS_ON_CI}
--iterations=100
--with_accuracy_layer=false)
### Object detection models
set(PASCALVOC_DATA_PATH "${INT8_DATA_DIR}/pascalvoc_val_head_300.bin")
set(INT8_OBJ_DETECT_TEST_APP "test_analyzer_int8_object_detection")
set(INT8_OBJ_DETECT_TEST_APP_SRC "analyzer_int8_object_detection_tester.cc")
# download dataset if necessary
download_int8_data_without_verify(${INT8_DATA_DIR}
"pascalvoc_val_head_300.tar.gz")
# build test binary to be used in subsequent tests
inference_analysis_api_test_build(${INT8_OBJ_DETECT_TEST_APP}
${INT8_OBJ_DETECT_TEST_APP_SRC})
# mobilenet-ssd int8
set(INT8_MOBILENET_SSD_MODEL_DIR "${INT8_DATA_DIR}/mobilenet-ssd")
download_int8_data_without_verify(${INT8_MOBILENET_SSD_MODEL_DIR}
"mobilenet_ssd_int8_model.tar.gz")
inference_analysis_api_object_dection_int8_test_run(
test_analyzer_int8_mobilenet_ssd ${INT8_OBJ_DETECT_TEST_APP}
${INT8_MOBILENET_SSD_MODEL_DIR} ${PASCALVOC_DATA_PATH})
### Lexcial analysis GRU model
set(GRU_PATH "${INFERENCE_DEMO_INSTALL_DIR}/gru")
download_gru_data_without_verify("${GRU_PATH}" "GRU_eval_data.tar.gz")
download_gru_data_without_verify("${GRU_PATH}" "GRU_eval_model_v2.tar.gz")
set(GRU_DATA_PATH "${GRU_PATH}/GRU_eval_data.bin")
set(GRU_MODEL_PATH "${GRU_PATH}/GRU_eval_model_v2")
set(LEXICAL_TEST_APP "test_analyzer_lexical_analysis")
set(LEXICAL_TEST_APP_SRC "analyzer_lexical_analysis_gru_tester.cc")
# build test binary to be used in subsequent tests
inference_analysis_api_test_build(${LEXICAL_TEST_APP} ${LEXICAL_TEST_APP_SRC})
# run lexcial analysis test
inference_analysis_api_lexical_test_run(
test_analyzer_lexical_gru ${LEXICAL_TEST_APP} ${GRU_MODEL_PATH}
${GRU_DATA_PATH})
# run bfloat16 lexical analysis test
inference_analysis_api_lexical_bfloat16_test_run(
test_analyzer_lexical_gru_bfloat16 ${LEXICAL_TEST_APP} ${GRU_MODEL_PATH}
${GRU_DATA_PATH})
# run post-training quantization lexical analysis test
inference_analysis_api_lexical_int8_test_run(
test_analyzer_lexical_gru_int8
${LEXICAL_TEST_APP}
${GRU_MODEL_PATH}
${GRU_DATA_PATH}
true # enable_int8_ptq
false # enable_int8_qat
false) # fuse_multi_gru
# run post-training quantization lexical analysis test with multi_gru fuse
inference_analysis_api_lexical_int8_test_run(
test_analyzer_lexical_gru_int8_multi_gru
${LEXICAL_TEST_APP}
${GRU_MODEL_PATH}
${GRU_DATA_PATH}
true # enable_int8_ptq
false # enable_int8_qat
true) # fuse_multi_gru
# run qat gru test
set(QAT_GRU_MODEL_ARCHIVE "GRU_quant_acc.tar.gz")
set(QAT_GRU_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/quant/GRU_quant2")
download_quant_data(${QAT_GRU_MODEL_DIR} ${QAT_GRU_MODEL_ARCHIVE}
cf207f8076dcfb8b74d8b6bdddf9090c)
inference_analysis_api_lexical_int8_test_run(
test_analyzer_lexical_gru_qat_int8
${LEXICAL_TEST_APP}
"${QAT_GRU_MODEL_DIR}/GRU_quant_acc"
${GRU_DATA_PATH}
false # enable_int8_ptq
true # enable_int8_qat
false) # fuse_multi_gru
### optimized FP32 vs. Quant INT8 tests
set(QUANT_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/quant")
set(QUANT_IMG_CLASS_TEST_APP "test_analyzer_quant_image_classification")
set(QUANT_IMG_CLASS_TEST_APP_SRC
"analyzer_quant_image_classification_tester.cc")
# build test binary to be used in subsequent tests
inference_analysis_api_test_build(${QUANT_IMG_CLASS_TEST_APP}
${QUANT_IMG_CLASS_TEST_APP_SRC})
# MobileNetV1 FP32 vs. Quant INT8
# The FP32 model should already be downloaded for slim Quant unit tests on Linux
set(QUANT2_MobileNetV1_MODEL_DIR "${QUANT_DATA_DIR}/MobileNetV1_quant2")
set(QUANT2_INT8_MobileNetV1_MODEL_DIR
"${QUANT_DATA_DIR}/MobileNetV1_quant2_int8")
if(NOT LINUX)
download_quant_data_without_verify(${QUANT2_MobileNetV1_MODEL_DIR}
"MobileNet_qat_perf.tar.gz")
endif()
download_quant_data_without_verify(${QUANT2_INT8_MobileNetV1_MODEL_DIR}
"MobileNet_qat_perf_int8.tar.gz")
inference_analysis_api_quant_test_run(
test_analyzer_quant_performance_benchmark
${QUANT_IMG_CLASS_TEST_APP}
${QUANT2_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf/float
${QUANT2_INT8_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf_int8
${IMAGENET_DATA_PATH}
false)
# Quant2 MobileNetV1
inference_analysis_api_quant_test_run(
test_analyzer_quant2_mobilenetv1_mkldnn
${QUANT_IMG_CLASS_TEST_APP}
${QUANT2_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf/float
${QUANT2_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf/float
${IMAGENET_DATA_PATH}
true)
# Quant2 ResNet50 with input/output scales in `fake_quantize_range_abs_max` operators and the `out_threshold` attributes,
# with weight scales in `fake_channel_wise_dequantize_max_abs` operators
set(QUANT2_RESNET50_CHANNELWISE_MODEL_DIR
"${QUANT_DATA_DIR}/ResNet50_quant2_channelwise")
set(QUANT2_RESNET50_CHANNELWISE_MODEL_ARCHIVE
"ResNet50_qat_channelwise.tar.gz")
if(NOT LINUX)
download_quant_data_without_verify(
${QUANT2_RESNET50_CHANNELWISE_MODEL_DIR}
${QUANT2_RESNET50_CHANNELWISE_MODEL_ARCHIVE})
endif()
set(QUANT2_RESNET50_MODEL
${QUANT2_RESNET50_CHANNELWISE_MODEL_DIR}/ResNet50_qat_channelwise)
inference_analysis_api_quant_test_run(
test_analyzer_quant2_resnet50_channelwise_mkldnn
${QUANT_IMG_CLASS_TEST_APP} ${QUANT2_RESNET50_MODEL}
${QUANT2_RESNET50_MODEL} ${IMAGENET_DATA_PATH} true)
### Other tests
# MKLDNN quantizer config
set(MKLDNN_QUANTIZER_CONFIG_TEST_APP "test_mkldnn_quantizer_config")
set(MKLDNN_QUANTIZER_CONFIG_TEST_APP_SRC "mkldnn_quantizer_config_tester.cc")
inference_analysis_api_test_build(${MKLDNN_QUANTIZER_CONFIG_TEST_APP}
${MKLDNN_QUANTIZER_CONFIG_TEST_APP_SRC})
inference_analysis_test_run(test_mkldnn_quantizer_config COMMAND
${MKLDNN_QUANTIZER_CONFIG_TEST_APP})
# preprocess data2bin imagenet
download_int8_data_without_verify(${INT8_DATA_DIR} "imagenet_small.tar.gz")
set(IMAGENET_SMALL_DATA_DIR "${INT8_DATA_DIR}/imagenet_small")
set(IMAGENET_SMALL_OUTPUT_FILE "imagenet_small.bin")
preprocess_data2bin_test_run(
preprocess_local_imagenet "full_ILSVRC2012_val_preprocess.py"
${IMAGENET_SMALL_DATA_DIR} ${IMAGENET_SMALL_OUTPUT_FILE})
# preprocess data2bin pascalvoc
download_int8_data_without_verify(${INT8_DATA_DIR} "pascalvoc_small.tar.gz")
set(PASCALVOC_SMALL_DATA_DIR "${INT8_DATA_DIR}/pascalvoc_small")
set(PASCALVOC_SMALL_OUTPUT_FILE "pascalvoc_small.bin")
preprocess_data2bin_test_run(
preprocess_local_pascalvoc "full_pascalvoc_test_preprocess.py"
${PASCALVOC_SMALL_DATA_DIR} ${PASCALVOC_SMALL_OUTPUT_FILE})
endif()
# bert, max_len=20, embedding_dim=128
set(BERT_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/bert_emb128")
download_model_and_data_without_verify(
${BERT_INSTALL_DIR} "bert_emb128_model.tar.gz" "bert_data_len20.txt.tar.gz")
inference_analysis_api_test(test_analyzer_bert ${BERT_INSTALL_DIR}
analyzer_bert_tester.cc)
# multiple models prediction
set(MMP_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/multi_model_prediction")
download_data_without_verify(${MMP_INSTALL_DIR}
PaddleInference/mobilenet_v2_models.tar.gz)
inference_multiple_models_analysis_api_test(
test_analyzer_multi_model_prediction ${MMP_INSTALL_DIR}
analyzer_mmp_tester.cc)
if(WITH_GPU AND TENSORRT_FOUND)
set(TRT_MODEL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/trt_models")
if(NOT EXISTS ${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models.tar.gz)
inference_download_and_uncompress(
${TRT_MODEL_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test
"trt_inference_test_models.tar.gz" 3dcccdc38b549b6b1b4089723757bd98)
endif()
set(TEST_SPLIT_CONVERTER_MODEL
"${TRT_MODEL_INSTALL_DIR}/trt_split_op_converter_test")
if(NOT EXISTS ${TEST_SPLIT_CONVERTER_MODEL}/split_converter.tgz)
inference_download_and_uncompress_without_verify(
${TEST_SPLIT_CONVERTER_MODEL} ${INFERENCE_URL}/tensorrt_test
"split_converter.tgz")
endif()
inference_analysis_test(
trt_mobilenet_test
SRCS
trt_mobilenet_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
inference_analysis_test(
trt_resnet50_test
SRCS
trt_resnet50_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
inference_analysis_test(
trt_resnext_test
SRCS
trt_resnext_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
inference_analysis_test(
trt_fc_prelu_test
SRCS
trt_fc_prelu_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
inference_analysis_test(
trt_cascade_rcnn_test
SRCS
trt_cascade_rcnn_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
inference_analysis_test(
trt_split_converter_test
SRCS
trt_split_converter_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TEST_SPLIT_CONVERTER_MODEL}/)
inference_analysis_test(
test_analyzer_capi_exp_gpu
SRCS
analyzer_capi_exp_gpu_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_gpu paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_gpu paddle_inference_c)
endif()
inference_analysis_test(
test_analyzer_capi_exp_xpu
SRCS
analyzer_capi_exp_xpu_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_xpu paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_xpu paddle_inference_c)
endif()
set(TRT_MODEL_QUANT_RESNET_DIR
"${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model")
if(NOT EXISTS ${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model.tgz)
inference_download_and_uncompress_without_verify(
${INFERENCE_DEMO_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test
"small_quant_model.tgz")
endif()
inference_analysis_test(
trt_quant_int8_test
SRCS
trt_quant_int8_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_QUANT_RESNET_DIR})
set(TRT_MODEL_QUANT_YOLOV3_DIR
"${INFERENCE_DEMO_INSTALL_DIR}/yolov3_r50_quant_aware")
if(NOT EXISTS ${INFERENCE_DEMO_INSTALL_DIR}/yolov3_r50_quant_aware.tgz)
inference_download_and_uncompress_without_verify(
${INFERENCE_DEMO_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test
"yolov3_r50_quant_aware.tgz")
endif()
inference_analysis_test(
trt_quant_int8_yolov3_r50_test
SRCS
trt_quant_int8_yolov3_r50_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_QUANT_YOLOV3_DIR})
set(TEST_TRT_DYNAMIC_MODEL2 "${TRT_MODEL_INSTALL_DIR}/complex_model_dynamic")
if(NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL2}/complex_model_dynamic2.tar.gz)
inference_download_and_uncompress_without_verify(
${TEST_TRT_DYNAMIC_MODEL2} ${INFERENCE_URL}/tensorrt_test
"complex_model_dynamic2.tar.gz")
endif()
set(TEST_TRT_DYNAMIC_MODEL
"${TRT_MODEL_INSTALL_DIR}/conv_bn_swish_split_gelu")
if(NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL}/conv_bn_swish_split_gelu.tar.gz)
inference_download_and_uncompress(
${TEST_TRT_DYNAMIC_MODEL} ${INFERENCE_URL}/tensorrt_test
"conv_bn_swish_split_gelu.tar.gz" 2a5e8791e47b221b4f782151d76da9c6)
endif()
inference_analysis_test(
trt_dynamic_shape_test
SRCS
trt_dynamic_shape_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR})
set(TEST_TRT_ERNIE_MODEL "${TRT_MODEL_INSTALL_DIR}/ernie_test")
if(NOT EXISTS ${TEST_TRT_ERNIE_MODEL}/ernie_model_4.tar.gz)
inference_download_and_uncompress(
${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test
"ernie_model_4.tar.gz" 5fa371efa75706becbaad79195d2ca68)
endif()
inference_analysis_test(
test_trt_dynamic_shape_ernie
SRCS
trt_dynamic_shape_ernie_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4)
set(TEST_TRT_TRANSFORMER_PRUNE_MODEL
"${TRT_MODEL_INSTALL_DIR}/transformer_prune")
if(NOT EXISTS ${TEST_TRT_TRANSFORMER_PRUNE_MODEL}/transformer_prune.tar.gz)
inference_download_and_uncompress(
${TEST_TRT_TRANSFORMER_PRUNE_MODEL} ${INFERENCE_URL}/tensorrt_test
"transformer_prune.tar.gz" 77b56dc73ff0cf44ddb1ce9ca0b0f471)
endif()
inference_analysis_test(
test_trt_dynamic_shape_transformer_prune
SRCS
trt_dynamic_shape_transformer_prune_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TEST_TRT_TRANSFORMER_PRUNE_MODEL}/transformer_prune)
if(NOT EXISTS ${TEST_TRT_ERNIE_MODEL}/ernie_model_4_unserialized.tgz)
inference_download_and_uncompress(
${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test
"ernie_model_4_unserialized.tgz" 833d73fc6a7f7e1ee4a1fd6419209e55)
endif()
inference_analysis_test(
test_trt_dynamic_shape_ernie_ser_deser
SRCS
trt_dynamic_shape_ernie_serialize_deserialize_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4_unserialized)
if(NOT EXISTS ${TEST_TRT_ERNIE_MODEL}/ernie_model_4_fp16_unserialized.tgz)
inference_download_and_uncompress(
${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test
"ernie_model_4_fp16_unserialized.tgz" c5ff2d0cad79953ffbf2b8b9e2fae6e4)
endif()
inference_analysis_test(
test_trt_dynamic_shape_ernie_fp16_ser_deser
SRCS
trt_dynamic_shape_ernie_fp16_serialize_deserialize_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4_fp16_unserialized)
endif()
set(LITE_MODEL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/lite")
download_data_without_verify(${LITE_MODEL_INSTALL_DIR} "mul_model_fp32.tgz")
inference_analysis_test(
lite_mul_model_test
SRCS
lite_mul_model_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${LITE_MODEL_INSTALL_DIR})
inference_analysis_test(
lite_resnet50_test
SRCS
lite_resnet50_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${RESNET50_MODEL_DIR})
inference_analysis_test(
test_analyzer_capi_exp
SRCS
analyzer_capi_exp_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${RESNET50_MODEL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp paddle_inference_c)
endif()
inference_analysis_test(
test_analyzer_capi_exp_pd_config
SRCS
analyzer_capi_exp_pd_config_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${MOBILENET_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_pd_config
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_pd_config paddle_inference_c)
endif()
inference_analysis_test(
test_analyzer_capi_exp_pd_tensor
SRCS
analyzer_capi_exp_pd_tensor_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${MOBILENET_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_pd_tensor
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_pd_tensor paddle_inference_c)
endif()
if(NOT APPLE AND NOT WIN32)
inference_analysis_test(
test_analyzer_capi_exp_pd_threads
SRCS
analyzer_capi_exp_pd_threads_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${MOBILENET_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_pd_threads
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_pd_threads paddle_inference_c)
endif()
endif()
inference_analysis_test(
test_analyzer_zerocopytensor_tensor
SRCS
analyzer_zerocopy_tensor_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${OCR_INSTALL_DIR}/model)
if(WITH_DISTRIBUTE AND WITH_PSCORE)
inference_analysis_test(
test_analyzer_dist_model
SRCS
analyzer_dist_model_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${OCR_INSTALL_DIR}/model)
endif()
if(WITH_DISTRIBUTE
AND WITH_PSCORE
AND WITH_XPU
AND WITH_XPU_BKCL)
inference_analysis_test(
test_analyzer_dist_model_xpu
SRCS
analyzer_dist_model_xpu_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${OCR_INSTALL_DIR}/model)
endif()
inference_analysis_test(
test_analyzer_paddletensor_tensor
SRCS
analyzer_paddle_tensor_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${OCR_INSTALL_DIR}/model
--infer_data=${OCR_INSTALL_DIR}/data.txt
--refer_result=${OCR_INSTALL_DIR}/result.txt)
if(WITH_MKLDNN)
inference_analysis_test(
test_analyzer_capi_exp_int
SRCS
analyzer_capi_exp_int_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${INT8_DATA_DIR}/resnet50/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_int paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_int paddle_inference_c)
endif()
endif()
inference_analysis_test(
test_analyzer_capi_exp_ner
SRCS
analyzer_capi_exp_ner_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${CHINESE_NER_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c)
endif()
if(WITH_GPU)
inference_analysis_test(
paddle_infer_api_test
SRCS
paddle_infer_api_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${RESNET50_MODEL_DIR})
inference_analysis_test(
paddle_infer_api_copy_tensor_tester
SRCS
paddle_infer_api_copy_tensor_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${RESNET50_MODEL_DIR})
set_tests_properties(paddle_infer_api_copy_tensor_tester PROPERTIES TIMEOUT
30)
endif()
cc_test(
paddle_infer_api_errors_test
SRCS paddle_infer_api_errors_tester.cc
DEPS paddle_inference_api)
if(WITH_GPU AND TENSORRT_FOUND)
set_tests_properties(trt_resnext_test PROPERTIES TIMEOUT 300)
set_tests_properties(trt_quant_int8_yolov3_r50_test PROPERTIES TIMEOUT 400)
set_tests_properties(trt_resnet50_test PROPERTIES TIMEOUT 300)
set_tests_properties(trt_cascade_rcnn_test PROPERTIES TIMEOUT 300)
set_tests_properties(test_trt_dynamic_shape_ernie_ser_deser PROPERTIES TIMEOUT
300)
set_tests_properties(test_trt_dynamic_shape_ernie_fp16_ser_deser
PROPERTIES TIMEOUT 300)
set_tests_properties(test_trt_dynamic_shape_ernie PROPERTIES TIMEOUT 300)
endif()
if(WITH_MKLDNN)
set_tests_properties(test_analyzer_int8_resnet50 PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_int8_mobilenet_ssd PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_quant_performance_benchmark
PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_int8_mobilenetv2 PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_int8_mobilenetv1 PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_int8_mobilenetv3_large PROPERTIES TIMEOUT
120)
set_tests_properties(test_analyzer_quant2_mobilenetv1_mkldnn
PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_quant2_resnet50_channelwise_mkldnn
PROPERTIES TIMEOUT 120)
endif()
set_tests_properties(lite_resnet50_test PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_mobilenet_transpose PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_resnet50 PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_ner PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_ernie_int8 PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_googlenet PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_small_dam PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_transformer PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_mobilenet_depthwise_conv PROPERTIES TIMEOUT
120)
if(WITH_GPU)
set_tests_properties(test_analyzer_bert PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_ernie PROPERTIES TIMEOUT 120)
endif()
if(WITH_GPU AND TENSORRT_FOUND)
set_tests_properties(trt_mobilenet_test PROPERTIES TIMEOUT 120)
if(WITH_MKLDNN)
set_tests_properties(test_analyzer_bfloat16_resnet50 PROPERTIES TIMEOUT 120)
endif()
endif()
if(ON_INFER OR WITH_GPU)
set_tests_properties(test_analyzer_transformer_profile PROPERTIES TIMEOUT 120)
endif()
if(WITH_IPU)
#word2vec sample
set(WORD2VEC_INSTALL_DIR
"${INFERENCE_DEMO_INSTALL_DIR}/word2vec/word2vec.inference.model")
inference_analysis_test(
ipu_word2vec_sample
SRCS
ipu_word2vec_sample.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${WORD2VEC_INSTALL_DIR})
# ERNIE
set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie")
inference_analysis_api_test(ipu_ernie_test ${ERNIE_INSTALL_DIR}
ipu_ernie_test.cc ARGS --warmup=true --repeat=10)
inference_analysis_api_test(
ipu_ernie_fp16_test ${ERNIE_INSTALL_DIR} ipu_ernie_fp16_test.cc ARGS
--warmup=true --repeat=10)
# Resnet50
set(RESNET50_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/resnet50")
inference_analysis_test(
ipu_resnet50_test
SRCS
ipu_resnet50_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${RESNET50_MODEL_DIR}
--warmup=true
--repeat=10)
inference_analysis_test(
ipu_resnet50_fp16_test
SRCS
ipu_resnet50_fp16_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${RESNET50_MODEL_DIR}
--warmup=true
--repeat=10)
# Only support Resnet50 and Ernie currently
inference_analysis_api_test(
ipu_multi_model_profile
SRCS
ipu_multi_model_profile.cc
ARGS
--model_name="Resnet50"
--infer_model=${RESNET50_MODEL_DIR}
--warmup=true
--repeat=10)
endif()
...@@ -3008,7 +3008,7 @@ EOF ...@@ -3008,7 +3008,7 @@ EOF
echo "ipipe_log_param_Demo_Ci_Tests_Total_Time: $[ $demo_ci_endTime_s - $demo_ci_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt echo "ipipe_log_param_Demo_Ci_Tests_Total_Time: $[ $demo_ci_endTime_s - $demo_ci_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt
infer_ut_startTime_s=`date +%s` infer_ut_startTime_s=`date +%s`
cd ${PADDLE_ROOT}/paddle/fluid/inference/tests/infer_ut cd ${PADDLE_ROOT}/test/cpp/inference/infer_ut
./run.sh ${PADDLE_ROOT} ${WITH_MKL:-ON} ${WITH_GPU:-OFF} ${INFERENCE_DEMO_INSTALL_DIR} \ ./run.sh ${PADDLE_ROOT} ${WITH_MKL:-ON} ${WITH_GPU:-OFF} ${INFERENCE_DEMO_INSTALL_DIR} \
${TENSORRT_ROOT_DIR:-/usr} ${WITH_ONNXRUNTIME:-ON} ${TENSORRT_ROOT_DIR:-/usr} ${WITH_ONNXRUNTIME:-ON}
TEST_EXIT_CODE=$? TEST_EXIT_CODE=$?
......
...@@ -4,3 +4,4 @@ add_subdirectory(new_executor) ...@@ -4,3 +4,4 @@ add_subdirectory(new_executor)
add_subdirectory(prim) add_subdirectory(prim)
add_subdirectory(imperative) add_subdirectory(imperative)
add_subdirectory(ir) add_subdirectory(ir)
add_subdirectory(inference)
if(WITH_TESTING)
include(test.cmake) # some generic cmake function for inference
endif()
add_subdirectory(analysis)
add_subdirectory(api)
function(inference_analysis_test_build TARGET)
if(WITH_TESTING)
set(options "")
set(oneValueArgs "")
set(multiValueArgs SRCS EXTRA_DEPS)
cmake_parse_arguments(analysis_test "${options}" "${oneValueArgs}"
"${multiValueArgs}" ${ARGN})
inference_base_test_build(
${TARGET}
SRCS
${analysis_test_SRCS}
DEPS
${analysis_test_EXTRA_DEPS}
analysis
pass
${GLOB_PASS_LIB})
endif()
endfunction()
function(inference_analysis_test_run TARGET)
if(WITH_TESTING)
set(options "")
set(oneValueArgs "")
set(multiValueArgs COMMAND ARGS)
cmake_parse_arguments(analysis_test "${options}" "${oneValueArgs}"
"${multiValueArgs}" ${ARGN})
inference_base_test_run(${TARGET} COMMAND ${analysis_test_COMMAND} ARGS
${analysis_test_ARGS})
set_tests_properties(${TARGET} PROPERTIES LABELS "RUN_TYPE=INFER")
endif()
endfunction()
function(inference_analysis_test TARGET)
if(WITH_TESTING)
set(options "")
set(oneValueArgs "")
set(multiValueArgs SRCS ARGS EXTRA_DEPS)
cmake_parse_arguments(analysis_test "${options}" "${oneValueArgs}"
"${multiValueArgs}" ${ARGN})
inference_base_test_build(
${TARGET}
SRCS
${analysis_test_SRCS}
DEPS
${analysis_test_EXTRA_DEPS}
analysis
pass
${GLOB_PASS_LIB})
inference_base_test_run(${TARGET} COMMAND ${TARGET} ARGS
${analysis_test_ARGS})
set_tests_properties(${TARGET} PROPERTIES LABELS "RUN_TYPE=INFER")
endif()
endfunction()
if(NOT APPLE AND NOT WIN32)
inference_analysis_test(
test_analyzer
SRCS
analyzer_tester.cc
EXTRA_DEPS
reset_tensor_array
paddle_inference_shared
ARGS
--inference_model_dir=${WORD2VEC_MODEL_DIR})
elseif(WIN32)
inference_analysis_test(
test_analyzer
SRCS
analyzer_tester.cc
EXTRA_DEPS
reset_tensor_array
paddle_inference_api
ARGS
--inference_model_dir=${WORD2VEC_MODEL_DIR})
if(WITH_ONNXRUNTIME AND WIN32)
# Copy onnxruntime for some c++ test in Windows, since the test will
# be build only in CI, so suppose the generator in Windows is Ninja.
copy_onnx(test_analyzer)
endif()
endif()
# In Windows, c_api test link must link both 2 shared to avoid symbols redefinition,
# in Linux, c_api test cant do like this or graph_to_program register more than once.
# Both Windows and Linux can only use paddle_inference_c, but this will increase size
# of build folder by 30G.
cc_test(
test_paddle_inference_api
SRCS api_tester.cc
DEPS paddle_inference_api)
cc_test(
inference_api_helper_test
SRCS helper_test.cc
DEPS paddle_inference_api)
if(WITH_ONNXRUNTIME AND WIN32)
# Copy onnxruntime for some c++ test in Windows, since the test will
# be build only in CI, so suppose the generator in Windows is Ninja.
copy_onnx(test_paddle_inference_api)
endif()
if(WITH_TESTING AND WITH_INFERENCE_API_TEST)
if(WIN32)
set(INFERENCE_C_EXTRA_DEPS paddle_inference_shared
paddle_inference_c_shared)
else()
set(INFERENCE_C_EXTRA_DEPS paddle_inference_shared paddle_inference_c)
endif()
function(download_data install_dir data_file check_sum)
string(REGEX MATCH "[^/\\]+$" file_name ${data_file})
if(NOT EXISTS ${install_dir}/${file_name})
inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}
${data_file} ${check_sum})
endif()
endfunction()
function(download_data_without_verify install_dir data_file)
string(REGEX MATCH "[^/\\]+$" file_name ${data_file})
if(NOT EXISTS ${install_dir}/${file_name})
inference_download_and_uncompress_without_verify(
${install_dir} ${INFERENCE_URL} ${data_file})
endif()
endfunction()
function(download_int8_data install_dir data_file check_sum)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}/int8
${data_file} ${check_sum})
endif()
endfunction()
function(download_int8_data_without_verify install_dir data_file)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress_without_verify(
${install_dir} ${INFERENCE_URL}/int8 ${data_file})
endif()
endfunction()
function(download_bfloat16_data install_dir data_file check_sum)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress(
${install_dir} ${INFERENCE_URL}/bfloat16 ${data_file} ${check_sum})
endif()
endfunction()
function(download_bfloat16_data_without_verify install_dir data_file)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress_without_verify(
${install_dir} ${INFERENCE_URL}/bfloat16 ${data_file})
endif()
endfunction()
function(download_GRU_data install_dir data_file check_sum)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}/gru
${data_file} ${check_sum})
endif()
endfunction()
function(download_GRU_data_without_verify install_dir data_file)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress_without_verify(
${install_dir} ${INFERENCE_URL}/gru ${data_file})
endif()
endfunction()
function(download_quant_data install_dir data_file check_sum)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress(
${install_dir} ${INFERENCE_URL}/int8/QAT_models ${data_file}
${check_sum})
endif()
endfunction()
function(download_quant_data_without_verify install_dir data_file)
if(NOT EXISTS ${install_dir}/${data_file})
inference_download_and_uncompress_without_verify(
${install_dir} ${INFERENCE_URL}/int8/QAT_models ${data_file})
endif()
endfunction()
function(download_model_and_data install_dir model_name model_check_sum
data_name data_check_sum)
download_data(${install_dir} ${model_name} ${model_check_sum})
download_data(${install_dir} ${data_name} ${data_check_sum})
endfunction()
function(download_model_and_data_without_verify install_dir model_name
data_name)
download_data_without_verify(${install_dir} ${model_name})
download_data_without_verify(${install_dir} ${data_name})
endfunction()
function(download_result install_dir result_name check_sum)
download_data(${install_dir} ${result_name} ${check_sum})
endfunction()
function(download_result_without_verify install_dir result_name)
download_data_without_verify(${install_dir} ${result_name})
endfunction()
function(inference_analysis_api_test target install_dir filename)
inference_analysis_test(
${target}
SRCS
${filename}
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${install_dir}/model
--infer_data=${install_dir}/data.txt
--refer_result=${install_dir}/result.txt)
endfunction()
function(inference_analysis_api_int8_test target install_dir filename)
inference_analysis_test(
${target}
SRCS
${filename}
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${install_dir}/model
--infer_data=${install_dir}/data.txt
--refer_result=${install_dir}/result.txt
--accuracy=0.8
--batch_size=5
--enable_int8_ptq=true)
endfunction()
function(inference_multiple_models_analysis_api_test target install_dir
filename)
inference_analysis_test(
${target}
SRCS
${filename}
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${install_dir}/mobilenet_v2_models/1
--infer_model2=${install_dir}/mobilenet_v2_models/xx
--infer_model3=${install_dir}/mobilenet_v2_models/3)
endfunction()
function(inference_analysis_api_test_build TARGET_NAME filename)
inference_analysis_test_build(${TARGET_NAME} SRCS ${filename} EXTRA_DEPS
paddle_inference_shared)
endfunction()
function(inference_analysis_api_int8_test_run TARGET_NAME test_binary
model_dir data_path)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--infer_model=${model_dir}/model
--infer_data=${data_path}
--warmup_batch_size=${WARMUP_BATCH_SIZE}
--batch_size=50
--enable_int8_ptq=true
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--iterations=2)
endfunction()
function(inference_analysis_api_int8_test_run_custom_warmup_batch_size
TARGET_NAME test_binary model_dir data_path warmup_batch_size)
set(WARMUP_BATCH_SIZE ${warmup_batch_size})
inference_analysis_api_int8_test_run(${TARGET_NAME} ${test_binary}
${model_dir} ${data_path})
endfunction()
function(inference_analysis_api_bfloat16_test_run TARGET_NAME test_binary
model_dir data_path)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--infer_model=${model_dir}/model
--infer_data=${data_path}
--batch_size=50
--enable_bf16=true
--paddle_num_threads=${CPU_NUM_THREADS_ON_CI}
--iterations=2)
endfunction()
function(inference_analysis_api_object_dection_int8_test_run TARGET_NAME
test_binary model_dir data_path)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--infer_model=${model_dir}/model
--infer_data=${data_path}
--warmup_batch_size=10
--batch_size=300
--enable_int8_ptq=true
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--iterations=1)
endfunction()
function(inference_analysis_api_test_with_fake_data_build TARGET_NAME
filename)
inference_analysis_test_build(${TARGET_NAME} SRCS ${filename} EXTRA_DEPS
paddle_inference_shared)
endfunction()
function(inference_analysis_api_test_with_fake_data_run TARGET_NAME
test_binary model_dir disable_fc)
inference_analysis_test_run(
${TARGET_NAME} COMMAND ${test_binary} ARGS
--infer_model=${model_dir}/model --disable_mkldnn_fc=${disable_fc})
endfunction()
function(
inference_analysis_api_quant_test_run
TARGET_NAME
test_binary
fp32_model_dir
int8_model_dir
data_path
enable_int8_qat)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--fp32_model=${fp32_model_dir}
--int8_model=${int8_model_dir}
--infer_data=${data_path}
--batch_size=50
--enable_int8_qat=${enable_int8_qat}
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--with_accuracy_layer=false
--iterations=2)
endfunction()
function(inference_analysis_api_lexical_test_run TARGET_NAME test_binary
infer_model data_path)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--infer_model=${infer_model}
--infer_data=${data_path}
--batch_size=50
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--with_accuracy_layer=true
--use_analysis=true
--iterations=2)
endfunction()
function(inference_analysis_api_lexical_bfloat16_test_run TARGET_NAME
test_binary infer_model data_path)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--infer_model=${infer_model}
--infer_data=${data_path}
--batch_size=50
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--with_accuracy_layer=true
--use_analysis=true
--enable_bf16=true
--iterations=2)
endfunction()
function(
inference_analysis_api_lexical_int8_test_run
TARGET_NAME
test_binary
infer_model
data_path
enable_int8_ptq
enable_int8_qat
fuse_multi_gru)
inference_analysis_test_run(
${TARGET_NAME}
COMMAND
${test_binary}
ARGS
--infer_model=${infer_model}
--infer_data=${data_path}
--batch_size=100
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--with_accuracy_layer=true
--use_analysis=true
--enable_int8_ptq=${enable_int8_ptq}
--enable_int8_qat=${enable_int8_qat}
--quantized_accuracy=0.015
--fuse_multi_gru=${fuse_multi_gru}
--iterations=4)
endfunction()
function(preprocess_data2bin_test_run target py_script_source data_dir
output_file)
py_test(${target}
SRCS ${CMAKE_CURRENT_SOURCE_DIR}/${py_script_source} ARGS
--data_dir=${data_dir} --output_file=${output_file} --local)
endfunction()
if(NOT APPLE AND WITH_MKLML)
# RNN1
set(RNN1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/rnn1")
download_model_and_data_without_verify(
${RNN1_INSTALL_DIR} "rnn1/model.tar.gz" "rnn1/data.txt.tar.gz")
inference_analysis_api_test(test_analyzer_rnn1 ${RNN1_INSTALL_DIR}
analyzer_rnn1_tester.cc)
# seq_pool1
set(SEQ_POOL1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/seq_pool")
download_model_and_data_without_verify(
${SEQ_POOL1_INSTALL_DIR} "seq_pool1_model_.tar.gz"
"seq_pool1_data.txt.tar.gz")
inference_analysis_api_test(
test_analyzer_seq_pool1_compare_determine ${SEQ_POOL1_INSTALL_DIR}
analyzer_seq_pool1_compare_determine_tester.cc)
inference_analysis_api_test(
test_analyzer_seq_pool1 ${SEQ_POOL1_INSTALL_DIR}
analyzer_seq_pool1_compare_tester.cc)
inference_analysis_api_test(
test_analyzer_seq_pool1_fuse_compare_zero_copy ${SEQ_POOL1_INSTALL_DIR}
analyzer_seq_pool1_fuse_compare_zero_copy_tester.cc)
inference_analysis_api_test(
test_analyzer_seq_pool1_fuse_statis ${SEQ_POOL1_INSTALL_DIR}
analyzer_seq_pool1_fuse_statis_tester.cc)
inference_analysis_api_test(
test_analyzer_seq_pool1_profile ${SEQ_POOL1_INSTALL_DIR}
analyzer_seq_pool1_profile_tester.cc)
if(NOT WIN32)
set_tests_properties(test_analyzer_seq_pool1_compare_determine
PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_seq_pool1 PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_seq_pool1_fuse_compare_zero_copy
PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_seq_pool1_fuse_statis
PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_seq_pool1_profile PROPERTIES TIMEOUT
120)
endif()
else()
# TODO: fix this test on MACOS and OPENBLAS, the reason is that
# fusion_seqexpand_concat_fc_op is not supported on MACOS and OPENBLAS
message(
WARNING
"These tests has been disabled in OSX or WITH_MKL=OFF before being fixed: \n test_analyzer_rnn1"
)
message(
WARNING
"These tests has been disabled in OSX or WITH_MKL=OFF before being fixed: \n test_analyzer_seq_pool1"
)
endif()
# RNN2
set(RNN2_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/rnn2")
download_model_and_data_without_verify(
${RNN2_INSTALL_DIR} "rnn2_model.tar.gz" "rnn2_data.txt.tar.gz")
inference_analysis_api_test(test_analyzer_rnn2 ${RNN2_INSTALL_DIR}
analyzer_rnn2_tester.cc)
# TODO(luotao, Superjom) Disable DAM test, temporarily fix
# https://github.com/PaddlePaddle/Paddle/issues/15032#issuecomment-455990914.
# After inference framework refactor, will reopen it.
# normal DAM
set(DAM_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/dam")
download_model_and_data_without_verify(${DAM_INSTALL_DIR} "DAM_model.tar.gz"
"DAM_data.txt.tar.gz")
#inference_analysis_api_test(test_analyzer_dam ${DAM_INSTALL_DIR} analyzer_dam_tester.cc EXTRA_DEPS legacy_allocator)
# small DAM
set(DAM_SMALL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/small_dam")
download_model_and_data_without_verify(
${DAM_SMALL_INSTALL_DIR} "dam_small_model.tar.gz"
"dam_small_data.txt.tar.gz")
inference_analysis_test(
test_analyzer_small_dam
SRCS
analyzer_dam_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${DAM_SMALL_INSTALL_DIR}/model
--infer_data=${DAM_SMALL_INSTALL_DIR}/data.txt)
#save model
inference_analysis_api_test(test_analyzer_save_model ${DAM_SMALL_INSTALL_DIR}
analyzer_save_model_tester.cc)
# chinese_ner
set(CHINESE_NER_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/chinese_ner")
download_model_and_data_without_verify(
${CHINESE_NER_INSTALL_DIR} "chinese_ner_model.tar.gz"
"chinese_ner-data.txt.tar.gz")
inference_analysis_api_test(test_analyzer_ner ${CHINESE_NER_INSTALL_DIR}
analyzer_ner_tester.cc)
# lac
set(LAC_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/lac")
download_model_and_data(
${LAC_INSTALL_DIR} "lac_model.tar.gz" 419ca6eb85f57a01bfe173591910aec5
"lac_data.txt.tar.gz" 9983539cd6b34fbdc411e43422776bfd)
inference_analysis_api_test(test_analyzer_lac ${LAC_INSTALL_DIR}
analyzer_lac_tester.cc)
# Pyramid DNN
set(PYRAMID_DNN_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/pyramid_dnn")
download_model_and_data_without_verify(
${PYRAMID_DNN_INSTALL_DIR} "PyramidDNN_model.tar.gz"
"PyramidDNN_data.txt.tar.gz")
inference_analysis_api_test(
test_analyzer_pyramid_dnn ${PYRAMID_DNN_INSTALL_DIR}
analyzer_pyramid_dnn_tester.cc)
# Ernie
set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie")
download_model_and_data(
${ERNIE_INSTALL_DIR} "Ernie_model.tar.gz" aa59192dd41ed377f9f168e3a1309fa6
"Ernie_data.txt.tar.gz" 5396e63548edad7ca561e7e26a9476d1)
download_result(${ERNIE_INSTALL_DIR} "Ernie_result.txt.tar.gz"
73beea65abda2edb61c1662cd3180c62)
if(WITH_GPU)
inference_analysis_api_test(test_analyzer_ernie ${ERNIE_INSTALL_DIR}
analyzer_ernie_tester.cc)
inference_analysis_api_test(gpu_ernie_half_test ${ERNIE_INSTALL_DIR}
gpu_ernie_half_test.cc)
set_tests_properties(gpu_ernie_half_test PROPERTIES TIMEOUT 60)
endif()
inference_analysis_api_int8_test(
test_analyzer_ernie_int8 ${ERNIE_INSTALL_DIR} analyzer_ernie_int8_tester.cc)
# Ernie large
set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie_Large")
download_model_and_data(
${ERNIE_INSTALL_DIR} "Ernie_large_model.tar.gz"
af7715245ed32cc77374625d4c80f7ef "Ernie_large_data.txt.tar.gz"
edb2113eec93783cad56ed76d47ba57f)
download_result(${ERNIE_INSTALL_DIR} "Ernie_large_result.txt.tar.gz"
1facda98eef1085dc9d435ebf3f23a73)
inference_analysis_test(
test_analyzer_ernie_large
SRCS
analyzer_ernie_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${ERNIE_INSTALL_DIR}/model
--infer_data=${ERNIE_INSTALL_DIR}/data.txt
--refer_result=${ERNIE_INSTALL_DIR}/result.txt
--ernie_large=true)
if(NOT WIN32
AND NOT APPLE
AND TEST test_analyzer_ernie_large)
set_tests_properties(test_analyzer_ernie_large
PROPERTIES TIMEOUT 150 LABELS "RUN_TYPE=NIGHTLY")
endif()
if(WIN32 AND TEST test_analyzer_ernie_large)
set_tests_properties(test_analyzer_ernie_large PROPERTIES TIMEOUT 200)
endif()
# text_classification
set(TEXT_CLASSIFICATION_INSTALL_DIR
"${INFERENCE_DEMO_INSTALL_DIR}/text_classification")
download_model_and_data(
${TEXT_CLASSIFICATION_INSTALL_DIR} "text-classification-Senta.tar.gz"
3f0f440313ca50e26184e65ffd5809ab "text_classification_data.txt.tar.gz"
36ae620020cc3377f45ed330dd36238f)
inference_analysis_api_test(
test_analyzer_text_classification ${TEXT_CLASSIFICATION_INSTALL_DIR}
analyzer_text_classification_tester.cc)
# seq_conv1
set(SEQ_CONV1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/seq_conv1")
download_model_and_data_without_verify(
${SEQ_CONV1_INSTALL_DIR} "seq_conv1_model.tar.gz"
"seq_conv1_data.txt.tar.gz")
inference_analysis_api_test(test_analyzer_seq_conv1 ${SEQ_CONV1_INSTALL_DIR}
analyzer_seq_conv1_tester.cc)
# transformer, the dataset only works on batch_size=8 now
set(TRANSFORMER_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/transformer")
download_model_and_data_without_verify(
${TRANSFORMER_INSTALL_DIR} "temp/transformer_model.tar.gz"
"temp/transformer_data.txt.tar.gz")
inference_analysis_test(
test_analyzer_transformer
SRCS
analyzer_transformer_compare_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRANSFORMER_INSTALL_DIR}/model
--infer_data=${TRANSFORMER_INSTALL_DIR}/data.txt
--batch_size=8
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI})
inference_analysis_test(
test_analyzer_transformer_fuse
SRCS
analyzer_transformer_fuse_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRANSFORMER_INSTALL_DIR}/model
--infer_data=${TRANSFORMER_INSTALL_DIR}/data.txt
--batch_size=8
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI})
inference_analysis_test(
test_analyzer_transformer_profile
SRCS
analyzer_transformer_profile_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRANSFORMER_INSTALL_DIR}/model
--infer_data=${TRANSFORMER_INSTALL_DIR}/data.txt
--batch_size=8
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI})
# VIT-OCR
set(VIT_OCR_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/vit")
if(NOT EXISTS ${VIT_OCR_INSTALL_DIR}/vit_ocr.tgz)
inference_download_and_uncompress_without_verify(
${VIT_OCR_INSTALL_DIR} ${INFERENCE_URL} "ocr/vit_ocr.tgz")
endif()
inference_analysis_test(
test_analyzer_vit_ocr
SRCS
analyzer_vit_ocr_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${VIT_OCR_INSTALL_DIR}/vit_ocr/model
--infer_data=${VIT_OCR_INSTALL_DIR}/vit_ocr/datavit.txt)
# ocr
set(OCR_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/ocr")
if(NOT EXISTS ${OCR_INSTALL_DIR}/ocr.tar.gz)
inference_download_and_uncompress_without_verify(
${OCR_INSTALL_DIR} "http://paddlemodels.bj.bcebos.com/"
"inference-vis-demos/ocr.tar.gz")
endif()
inference_analysis_api_test(test_analyzer_ocr ${OCR_INSTALL_DIR}
analyzer_vis_tester.cc)
# densebox
set(DENSEBOX_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/densebox")
download_data_without_verify(${DENSEBOX_INSTALL_DIR} "densebox.tar.gz")
inference_analysis_test(
test_analyzer_detect_functional_mkldnn
SRCS
analyzer_detect_functional_mkldnn_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${DENSEBOX_INSTALL_DIR}/model
--infer_data=${DENSEBOX_INSTALL_DIR}/detect_input_50.txt
--infer_shape=${DENSEBOX_INSTALL_DIR}/shape_50.txt)
# mobilenet with transpose op
set(MOBILENET_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet")
if(NOT EXISTS ${MOBILENET_INSTALL_DIR}/mobilenet.tar.gz)
inference_download_and_uncompress_without_verify(
${MOBILENET_INSTALL_DIR} "http://paddlemodels.bj.bcebos.com/"
"inference-vis-demos/mobilenet.tar.gz")
endif()
inference_analysis_api_test(test_analyzer_mobilenet_transpose
${MOBILENET_INSTALL_DIR} analyzer_vis_tester.cc)
### Image classification tests with fake data
set(IMG_CLASS_TEST_APP "test_analyzer_image_classification")
set(IMG_CLASS_TEST_APP_SRC "analyzer_image_classification_tester.cc")
# build test binary to be used in subsequent tests
inference_analysis_api_test_with_fake_data_build(${IMG_CLASS_TEST_APP}
${IMG_CLASS_TEST_APP_SRC})
# googlenet
set(GOOGLENET_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/googlenet")
download_data_without_verify(${GOOGLENET_MODEL_DIR} "googlenet.tar.gz")
inference_analysis_api_test_with_fake_data_run(
test_analyzer_googlenet ${IMG_CLASS_TEST_APP} ${GOOGLENET_MODEL_DIR} false)
# resnet50
set(RESNET50_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/resnet50")
download_data_without_verify(${RESNET50_MODEL_DIR} "resnet50_model.tar.gz")
inference_analysis_api_test_with_fake_data_run(
test_analyzer_resnet50 ${IMG_CLASS_TEST_APP} ${RESNET50_MODEL_DIR} true)
if(WIN32)
set_tests_properties(test_analyzer_resnet50 PROPERTIES TIMEOUT 200)
endif()
# mobilenet with depthwise_conv op
set(MOBILENET_MODEL_DIR
"${INFERENCE_DEMO_INSTALL_DIR}/mobilenet_depthwise_conv")
download_data_without_verify(${MOBILENET_MODEL_DIR} "mobilenet_model.tar.gz")
inference_analysis_api_test_with_fake_data_run(
test_analyzer_mobilenet_depthwise_conv ${IMG_CLASS_TEST_APP}
${MOBILENET_MODEL_DIR} false)
if(WITH_MKLDNN)
### INT8 tests
set(INT8_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/int8v2")
## Image classification models
# ImageNet small dataset
# It may be already downloaded for Quant & INT8 unit tests
set(IMAGENET_DATA_ARCHIVE "imagenet_val_100_tail.tar.gz")
set(IMAGENET_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/imagenet")
set(IMAGENET_DATA_PATH "${IMAGENET_DATA_DIR}/data.bin")
download_int8_data_without_verify(${IMAGENET_DATA_DIR}
${IMAGENET_DATA_ARCHIVE})
# build test binary to be used in subsequent tests
set(INT8_IMG_CLASS_TEST_APP "test_analyzer_int8_image_classification")
set(INT8_IMG_CLASS_TEST_APP_SRC
"analyzer_int8_image_classification_tester.cc")
inference_analysis_api_test_build(${INT8_IMG_CLASS_TEST_APP}
${INT8_IMG_CLASS_TEST_APP_SRC})
# resnet50 int8
set(INT8_RESNET50_MODEL_DIR "${INT8_DATA_DIR}/resnet50")
download_int8_data_without_verify(${INT8_RESNET50_MODEL_DIR}
"resnet50_int8_model.tar.gz")
inference_analysis_api_int8_test_run(
test_analyzer_int8_resnet50 ${INT8_IMG_CLASS_TEST_APP}
${INT8_RESNET50_MODEL_DIR} ${IMAGENET_DATA_PATH})
# mobilenetv1 int8
set(INT8_MOBILENETV1_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv1")
download_int8_data_without_verify(${INT8_MOBILENETV1_MODEL_DIR}
"mobilenetv1_int8_model.tar.gz")
inference_analysis_api_int8_test_run(
test_analyzer_int8_mobilenetv1 ${INT8_IMG_CLASS_TEST_APP}
${INT8_MOBILENETV1_MODEL_DIR} ${IMAGENET_DATA_PATH})
# mobilenetv2 int8
set(INT8_MOBILENETV2_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv2")
download_int8_data_without_verify(${INT8_MOBILENETV2_MODEL_DIR}
"mobilenet_v2_int8_model.tar.gz")
inference_analysis_api_int8_test_run(
test_analyzer_int8_mobilenetv2 ${INT8_IMG_CLASS_TEST_APP}
${INT8_MOBILENETV2_MODEL_DIR} ${IMAGENET_DATA_PATH})
# resnet101 int8
set(INT8_RESNET101_MODEL_DIR "${INT8_DATA_DIR}/resnet101")
download_int8_data_without_verify(${INT8_RESNET101_MODEL_DIR}
"Res101_int8_model.tar.gz")
# inference_analysis_api_int8_test_run(test_analyzer_int8_resnet101 ${INT8_IMG_CLASS_TEST_APP} ${INT8_RESNET101_MODEL_DIR} ${IMAGENET_DATA_PATH})
# vgg16 int8
set(INT8_VGG16_MODEL_DIR "${INT8_DATA_DIR}/vgg16")
download_int8_data_without_verify(${INT8_VGG16_MODEL_DIR}
"VGG16_int8_model.tar.gz")
# inference_analysis_api_int8_test_run(test_analyzer_int8_vgg16 ${INT8_IMG_CLASS_TEST_APP} ${INT8_VGG16_MODEL_DIR} ${IMAGENET_DATA_PATH})
# vgg19 int8
set(INT8_VGG19_MODEL_DIR "${INT8_DATA_DIR}/vgg19")
download_int8_data_without_verify(${INT8_VGG19_MODEL_DIR}
"VGG19_int8_model.tar.gz")
# inference_analysis_api_int8_test_run(test_analyzer_int8_vgg19 ${INT8_IMG_CLASS_TEST_APP} ${INT8_VGG19_MODEL_DIR} ${IMAGENET_DATA_PATH})
# googlenet int8
set(INT8_GOOGLENET_MODEL_DIR "${INT8_DATA_DIR}/googlenet")
download_int8_data_without_verify(${INT8_GOOGLENET_MODEL_DIR}
"GoogleNet_int8_model.tar.gz")
inference_analysis_api_int8_test_run_custom_warmup_batch_size(
test_analyzer_int8_googlenet ${INT8_IMG_CLASS_TEST_APP}
${INT8_GOOGLENET_MODEL_DIR} ${IMAGENET_DATA_PATH} 10)
# mobilenetv3_large_x1_0 int8
set(INT8_MOBILENETV3_LARGE_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv3_large")
set(INT8_MOBILENETV3_FILE_NAME "MobileNetV3_large_x1_0_infer.tar")
if(NOT EXISTS
${INT8_MOBILENETV3_LARGE_MODEL_DIR}/${INT8_MOBILENETV3_FILE_NAME})
inference_download_and_uncompress_without_verify(
${INT8_MOBILENETV3_LARGE_MODEL_DIR}
"https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/"
${INT8_MOBILENETV3_FILE_NAME})
endif()
inference_analysis_test_run(
test_analyzer_int8_mobilenetv3_large
COMMAND
${INT8_IMG_CLASS_TEST_APP}
ARGS
--infer_model=${INT8_MOBILENETV3_LARGE_MODEL_DIR}/MobileNetV3_large_x1_0_infer
--infer_data=${IMAGENET_DATA_PATH}
--warmup_batch_size=50
--batch_size=1
--enable_int8_ptq=true
--cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
--iterations=100
--with_accuracy_layer=false)
### BFLOAT16 tests
# build test binary to be used in subsequent tests
set(BF16_IMG_CLASS_TEST_APP "test_analyzer_bfloat16_image_classification")
set(BF16_IMG_CLASS_TEST_APP_SRC
"analyzer_bfloat16_image_classification_tester.cc")
inference_analysis_api_test_build(${BF16_IMG_CLASS_TEST_APP}
${BF16_IMG_CLASS_TEST_APP_SRC})
# resnet50 bfloat16
inference_analysis_api_bfloat16_test_run(
test_analyzer_bfloat16_resnet50 ${BF16_IMG_CLASS_TEST_APP}
${INT8_RESNET50_MODEL_DIR} ${IMAGENET_DATA_PATH})
# googlenet bfloat16
inference_analysis_api_bfloat16_test_run(
test_analyzer_bfloat16_googlenet ${BF16_IMG_CLASS_TEST_APP}
${INT8_GOOGLENET_MODEL_DIR} ${IMAGENET_DATA_PATH})
# mobilenetv1 bfloat16
inference_analysis_api_bfloat16_test_run(
test_analyzer_bfloat16_mobilenetv1 ${BF16_IMG_CLASS_TEST_APP}
${INT8_MOBILENETV1_MODEL_DIR} ${IMAGENET_DATA_PATH})
# mobilenetv2 bfloat16
inference_analysis_api_bfloat16_test_run(
test_analyzer_bfloat16_mobilenetv2 ${BF16_IMG_CLASS_TEST_APP}
${INT8_MOBILENETV2_MODEL_DIR} ${IMAGENET_DATA_PATH})
# mobilenetv3_large
inference_analysis_test_run(
test_analyzer_bfloat16_mobilenetv3_large
COMMAND
${BF16_IMG_CLASS_TEST_APP}
ARGS
--infer_model=${INT8_MOBILENETV3_LARGE_MODEL_DIR}/MobileNetV3_large_x1_0_infer
--infer_data=${IMAGENET_DATA_PATH}
--batch_size=1
--enable_bf16=true
--paddle_num_threads=${CPU_NUM_THREADS_ON_CI}
--iterations=100
--with_accuracy_layer=false)
### Object detection models
set(PASCALVOC_DATA_PATH "${INT8_DATA_DIR}/pascalvoc_val_head_300.bin")
set(INT8_OBJ_DETECT_TEST_APP "test_analyzer_int8_object_detection")
set(INT8_OBJ_DETECT_TEST_APP_SRC "analyzer_int8_object_detection_tester.cc")
# download dataset if necessary
download_int8_data_without_verify(${INT8_DATA_DIR}
"pascalvoc_val_head_300.tar.gz")
# build test binary to be used in subsequent tests
inference_analysis_api_test_build(${INT8_OBJ_DETECT_TEST_APP}
${INT8_OBJ_DETECT_TEST_APP_SRC})
# mobilenet-ssd int8
set(INT8_MOBILENET_SSD_MODEL_DIR "${INT8_DATA_DIR}/mobilenet-ssd")
download_int8_data_without_verify(${INT8_MOBILENET_SSD_MODEL_DIR}
"mobilenet_ssd_int8_model.tar.gz")
inference_analysis_api_object_dection_int8_test_run(
test_analyzer_int8_mobilenet_ssd ${INT8_OBJ_DETECT_TEST_APP}
${INT8_MOBILENET_SSD_MODEL_DIR} ${PASCALVOC_DATA_PATH})
### Lexcial analysis GRU model
set(GRU_PATH "${INFERENCE_DEMO_INSTALL_DIR}/gru")
download_gru_data_without_verify("${GRU_PATH}" "GRU_eval_data.tar.gz")
download_gru_data_without_verify("${GRU_PATH}" "GRU_eval_model_v2.tar.gz")
set(GRU_DATA_PATH "${GRU_PATH}/GRU_eval_data.bin")
set(GRU_MODEL_PATH "${GRU_PATH}/GRU_eval_model_v2")
set(LEXICAL_TEST_APP "test_analyzer_lexical_analysis")
set(LEXICAL_TEST_APP_SRC "analyzer_lexical_analysis_gru_tester.cc")
# build test binary to be used in subsequent tests
inference_analysis_api_test_build(${LEXICAL_TEST_APP}
${LEXICAL_TEST_APP_SRC})
# run lexcial analysis test
inference_analysis_api_lexical_test_run(
test_analyzer_lexical_gru ${LEXICAL_TEST_APP} ${GRU_MODEL_PATH}
${GRU_DATA_PATH})
# run bfloat16 lexical analysis test
inference_analysis_api_lexical_bfloat16_test_run(
test_analyzer_lexical_gru_bfloat16 ${LEXICAL_TEST_APP} ${GRU_MODEL_PATH}
${GRU_DATA_PATH})
# run post-training quantization lexical analysis test
inference_analysis_api_lexical_int8_test_run(
test_analyzer_lexical_gru_int8
${LEXICAL_TEST_APP}
${GRU_MODEL_PATH}
${GRU_DATA_PATH}
true # enable_int8_ptq
false # enable_int8_qat
false) # fuse_multi_gru
# run post-training quantization lexical analysis test with multi_gru fuse
inference_analysis_api_lexical_int8_test_run(
test_analyzer_lexical_gru_int8_multi_gru
${LEXICAL_TEST_APP}
${GRU_MODEL_PATH}
${GRU_DATA_PATH}
true # enable_int8_ptq
false # enable_int8_qat
true) # fuse_multi_gru
# run qat gru test
set(QAT_GRU_MODEL_ARCHIVE "GRU_quant_acc.tar.gz")
set(QAT_GRU_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/quant/GRU_quant2")
download_quant_data(${QAT_GRU_MODEL_DIR} ${QAT_GRU_MODEL_ARCHIVE}
cf207f8076dcfb8b74d8b6bdddf9090c)
inference_analysis_api_lexical_int8_test_run(
test_analyzer_lexical_gru_qat_int8
${LEXICAL_TEST_APP}
"${QAT_GRU_MODEL_DIR}/GRU_quant_acc"
${GRU_DATA_PATH}
false # enable_int8_ptq
true # enable_int8_qat
false) # fuse_multi_gru
### optimized FP32 vs. Quant INT8 tests
set(QUANT_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/quant")
set(QUANT_IMG_CLASS_TEST_APP "test_analyzer_quant_image_classification")
set(QUANT_IMG_CLASS_TEST_APP_SRC
"analyzer_quant_image_classification_tester.cc")
# build test binary to be used in subsequent tests
inference_analysis_api_test_build(${QUANT_IMG_CLASS_TEST_APP}
${QUANT_IMG_CLASS_TEST_APP_SRC})
# MobileNetV1 FP32 vs. Quant INT8
# The FP32 model should already be downloaded for slim Quant unit tests on Linux
set(QUANT2_MobileNetV1_MODEL_DIR "${QUANT_DATA_DIR}/MobileNetV1_quant2")
set(QUANT2_INT8_MobileNetV1_MODEL_DIR
"${QUANT_DATA_DIR}/MobileNetV1_quant2_int8")
if(NOT LINUX)
download_quant_data_without_verify(${QUANT2_MobileNetV1_MODEL_DIR}
"MobileNet_qat_perf.tar.gz")
endif()
download_quant_data_without_verify(${QUANT2_INT8_MobileNetV1_MODEL_DIR}
"MobileNet_qat_perf_int8.tar.gz")
inference_analysis_api_quant_test_run(
test_analyzer_quant_performance_benchmark
${QUANT_IMG_CLASS_TEST_APP}
${QUANT2_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf/float
${QUANT2_INT8_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf_int8
${IMAGENET_DATA_PATH}
false)
# Quant2 MobileNetV1
inference_analysis_api_quant_test_run(
test_analyzer_quant2_mobilenetv1_mkldnn
${QUANT_IMG_CLASS_TEST_APP}
${QUANT2_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf/float
${QUANT2_MobileNetV1_MODEL_DIR}/MobileNet_qat_perf/float
${IMAGENET_DATA_PATH}
true)
# Quant2 ResNet50 with input/output scales in `fake_quantize_range_abs_max` operators and the `out_threshold` attributes,
# with weight scales in `fake_channel_wise_dequantize_max_abs` operators
set(QUANT2_RESNET50_CHANNELWISE_MODEL_DIR
"${QUANT_DATA_DIR}/ResNet50_quant2_channelwise")
set(QUANT2_RESNET50_CHANNELWISE_MODEL_ARCHIVE
"ResNet50_qat_channelwise.tar.gz")
if(NOT LINUX)
download_quant_data_without_verify(
${QUANT2_RESNET50_CHANNELWISE_MODEL_DIR}
${QUANT2_RESNET50_CHANNELWISE_MODEL_ARCHIVE})
endif()
set(QUANT2_RESNET50_MODEL
${QUANT2_RESNET50_CHANNELWISE_MODEL_DIR}/ResNet50_qat_channelwise)
inference_analysis_api_quant_test_run(
test_analyzer_quant2_resnet50_channelwise_mkldnn
${QUANT_IMG_CLASS_TEST_APP} ${QUANT2_RESNET50_MODEL}
${QUANT2_RESNET50_MODEL} ${IMAGENET_DATA_PATH} true)
### Other tests
# MKLDNN quantizer config
set(MKLDNN_QUANTIZER_CONFIG_TEST_APP "test_mkldnn_quantizer_config")
set(MKLDNN_QUANTIZER_CONFIG_TEST_APP_SRC
"mkldnn_quantizer_config_tester.cc")
inference_analysis_api_test_build(${MKLDNN_QUANTIZER_CONFIG_TEST_APP}
${MKLDNN_QUANTIZER_CONFIG_TEST_APP_SRC})
inference_analysis_test_run(test_mkldnn_quantizer_config COMMAND
${MKLDNN_QUANTIZER_CONFIG_TEST_APP})
# preprocess data2bin imagenet
download_int8_data_without_verify(${INT8_DATA_DIR} "imagenet_small.tar.gz")
set(IMAGENET_SMALL_DATA_DIR "${INT8_DATA_DIR}/imagenet_small")
set(IMAGENET_SMALL_OUTPUT_FILE "imagenet_small.bin")
preprocess_data2bin_test_run(
preprocess_local_imagenet "full_ILSVRC2012_val_preprocess.py"
${IMAGENET_SMALL_DATA_DIR} ${IMAGENET_SMALL_OUTPUT_FILE})
# preprocess data2bin pascalvoc
download_int8_data_without_verify(${INT8_DATA_DIR} "pascalvoc_small.tar.gz")
set(PASCALVOC_SMALL_DATA_DIR "${INT8_DATA_DIR}/pascalvoc_small")
set(PASCALVOC_SMALL_OUTPUT_FILE "pascalvoc_small.bin")
preprocess_data2bin_test_run(
preprocess_local_pascalvoc "full_pascalvoc_test_preprocess.py"
${PASCALVOC_SMALL_DATA_DIR} ${PASCALVOC_SMALL_OUTPUT_FILE})
endif()
# bert, max_len=20, embedding_dim=128
set(BERT_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/bert_emb128")
download_model_and_data_without_verify(
${BERT_INSTALL_DIR} "bert_emb128_model.tar.gz" "bert_data_len20.txt.tar.gz")
inference_analysis_api_test(test_analyzer_bert ${BERT_INSTALL_DIR}
analyzer_bert_tester.cc)
# multiple models prediction
set(MMP_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/multi_model_prediction")
download_data_without_verify(${MMP_INSTALL_DIR}
PaddleInference/mobilenet_v2_models.tar.gz)
inference_multiple_models_analysis_api_test(
test_analyzer_multi_model_prediction ${MMP_INSTALL_DIR}
analyzer_mmp_tester.cc)
if(WITH_GPU AND TENSORRT_FOUND)
set(TRT_MODEL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/trt_models")
if(NOT EXISTS ${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models.tar.gz)
inference_download_and_uncompress(
${TRT_MODEL_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test
"trt_inference_test_models.tar.gz" 3dcccdc38b549b6b1b4089723757bd98)
endif()
set(TEST_SPLIT_CONVERTER_MODEL
"${TRT_MODEL_INSTALL_DIR}/trt_split_op_converter_test")
if(NOT EXISTS ${TEST_SPLIT_CONVERTER_MODEL}/split_converter.tgz)
inference_download_and_uncompress_without_verify(
${TEST_SPLIT_CONVERTER_MODEL} ${INFERENCE_URL}/tensorrt_test
"split_converter.tgz")
endif()
inference_analysis_test(
trt_mobilenet_test
SRCS
trt_mobilenet_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
inference_analysis_test(
trt_resnet50_test
SRCS
trt_resnet50_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
inference_analysis_test(
trt_resnext_test
SRCS
trt_resnext_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
inference_analysis_test(
trt_fc_prelu_test
SRCS
trt_fc_prelu_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
inference_analysis_test(
trt_cascade_rcnn_test
SRCS
trt_cascade_rcnn_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
inference_analysis_test(
trt_split_converter_test
SRCS
trt_split_converter_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TEST_SPLIT_CONVERTER_MODEL}/)
inference_analysis_test(
test_analyzer_capi_exp_gpu
SRCS
analyzer_capi_exp_gpu_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_gpu
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_gpu paddle_inference_c)
endif()
inference_analysis_test(
test_analyzer_capi_exp_xpu
SRCS
analyzer_capi_exp_xpu_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_xpu
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_xpu paddle_inference_c)
endif()
set(TRT_MODEL_QUANT_RESNET_DIR
"${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model")
if(NOT EXISTS ${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model.tgz)
inference_download_and_uncompress_without_verify(
${INFERENCE_DEMO_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test
"small_quant_model.tgz")
endif()
inference_analysis_test(
trt_quant_int8_test
SRCS
trt_quant_int8_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_QUANT_RESNET_DIR})
set(TRT_MODEL_QUANT_YOLOV3_DIR
"${INFERENCE_DEMO_INSTALL_DIR}/yolov3_r50_quant_aware")
if(NOT EXISTS ${INFERENCE_DEMO_INSTALL_DIR}/yolov3_r50_quant_aware.tgz)
inference_download_and_uncompress_without_verify(
${INFERENCE_DEMO_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test
"yolov3_r50_quant_aware.tgz")
endif()
inference_analysis_test(
trt_quant_int8_yolov3_r50_test
SRCS
trt_quant_int8_yolov3_r50_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_QUANT_YOLOV3_DIR})
set(TEST_TRT_DYNAMIC_MODEL2
"${TRT_MODEL_INSTALL_DIR}/complex_model_dynamic")
if(NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL2}/complex_model_dynamic2.tar.gz)
inference_download_and_uncompress_without_verify(
${TEST_TRT_DYNAMIC_MODEL2} ${INFERENCE_URL}/tensorrt_test
"complex_model_dynamic2.tar.gz")
endif()
set(TEST_TRT_DYNAMIC_MODEL
"${TRT_MODEL_INSTALL_DIR}/conv_bn_swish_split_gelu")
if(NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL}/conv_bn_swish_split_gelu.tar.gz)
inference_download_and_uncompress(
${TEST_TRT_DYNAMIC_MODEL} ${INFERENCE_URL}/tensorrt_test
"conv_bn_swish_split_gelu.tar.gz" 2a5e8791e47b221b4f782151d76da9c6)
endif()
inference_analysis_test(
trt_dynamic_shape_test
SRCS
trt_dynamic_shape_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TRT_MODEL_INSTALL_DIR})
set(TEST_TRT_ERNIE_MODEL "${TRT_MODEL_INSTALL_DIR}/ernie_test")
if(NOT EXISTS ${TEST_TRT_ERNIE_MODEL}/ernie_model_4.tar.gz)
inference_download_and_uncompress(
${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test
"ernie_model_4.tar.gz" 5fa371efa75706becbaad79195d2ca68)
endif()
inference_analysis_test(
test_trt_dynamic_shape_ernie
SRCS
trt_dynamic_shape_ernie_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4)
set(TEST_TRT_TRANSFORMER_PRUNE_MODEL
"${TRT_MODEL_INSTALL_DIR}/transformer_prune")
if(NOT EXISTS ${TEST_TRT_TRANSFORMER_PRUNE_MODEL}/transformer_prune.tar.gz)
inference_download_and_uncompress(
${TEST_TRT_TRANSFORMER_PRUNE_MODEL} ${INFERENCE_URL}/tensorrt_test
"transformer_prune.tar.gz" 77b56dc73ff0cf44ddb1ce9ca0b0f471)
endif()
inference_analysis_test(
test_trt_dynamic_shape_transformer_prune
SRCS
trt_dynamic_shape_transformer_prune_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TEST_TRT_TRANSFORMER_PRUNE_MODEL}/transformer_prune)
if(NOT EXISTS ${TEST_TRT_ERNIE_MODEL}/ernie_model_4_unserialized.tgz)
inference_download_and_uncompress(
${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test
"ernie_model_4_unserialized.tgz" 833d73fc6a7f7e1ee4a1fd6419209e55)
endif()
inference_analysis_test(
test_trt_dynamic_shape_ernie_ser_deser
SRCS
trt_dynamic_shape_ernie_serialize_deserialize_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4_unserialized)
if(NOT EXISTS ${TEST_TRT_ERNIE_MODEL}/ernie_model_4_fp16_unserialized.tgz)
inference_download_and_uncompress(
${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test
"ernie_model_4_fp16_unserialized.tgz" c5ff2d0cad79953ffbf2b8b9e2fae6e4)
endif()
inference_analysis_test(
test_trt_dynamic_shape_ernie_fp16_ser_deser
SRCS
trt_dynamic_shape_ernie_fp16_serialize_deserialize_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4_fp16_unserialized)
endif()
set(LITE_MODEL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/lite")
download_data_without_verify(${LITE_MODEL_INSTALL_DIR} "mul_model_fp32.tgz")
inference_analysis_test(
lite_mul_model_test
SRCS
lite_mul_model_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${LITE_MODEL_INSTALL_DIR})
inference_analysis_test(
lite_resnet50_test
SRCS
lite_resnet50_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${RESNET50_MODEL_DIR})
inference_analysis_test(
test_analyzer_capi_exp
SRCS
analyzer_capi_exp_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${RESNET50_MODEL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp paddle_inference_c)
endif()
inference_analysis_test(
test_analyzer_capi_exp_pd_config
SRCS
analyzer_capi_exp_pd_config_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${MOBILENET_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_pd_config
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_pd_config paddle_inference_c)
endif()
inference_analysis_test(
test_analyzer_capi_exp_pd_tensor
SRCS
analyzer_capi_exp_pd_tensor_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${MOBILENET_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_pd_tensor
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_pd_tensor paddle_inference_c)
endif()
if(NOT APPLE AND NOT WIN32)
inference_analysis_test(
test_analyzer_capi_exp_pd_threads
SRCS
analyzer_capi_exp_pd_threads_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${MOBILENET_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_pd_threads
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_pd_threads
paddle_inference_c)
endif()
endif()
inference_analysis_test(
test_analyzer_zerocopytensor_tensor
SRCS
analyzer_zerocopy_tensor_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${OCR_INSTALL_DIR}/model)
if(WITH_DISTRIBUTE AND WITH_PSCORE)
inference_analysis_test(
test_analyzer_dist_model
SRCS
analyzer_dist_model_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${OCR_INSTALL_DIR}/model)
endif()
if(WITH_DISTRIBUTE
AND WITH_PSCORE
AND WITH_XPU
AND WITH_XPU_BKCL)
inference_analysis_test(
test_analyzer_dist_model_xpu
SRCS
analyzer_dist_model_xpu_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${OCR_INSTALL_DIR}/model)
endif()
inference_analysis_test(
test_analyzer_paddletensor_tensor
SRCS
analyzer_paddle_tensor_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${OCR_INSTALL_DIR}/model
--infer_data=${OCR_INSTALL_DIR}/data.txt
--refer_result=${OCR_INSTALL_DIR}/result.txt)
if(WITH_MKLDNN)
inference_analysis_test(
test_analyzer_capi_exp_int
SRCS
analyzer_capi_exp_int_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${INT8_DATA_DIR}/resnet50/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_int
paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_int paddle_inference_c)
endif()
endif()
inference_analysis_test(
test_analyzer_capi_exp_ner
SRCS
analyzer_capi_exp_ner_tester.cc
EXTRA_DEPS
${INFERENCE_C_EXTRA_DEPS}
ARGS
--infer_model=${CHINESE_NER_INSTALL_DIR}/model)
if(WIN32)
target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c_shared)
else()
target_link_libraries(test_analyzer_capi_exp_ner paddle_inference_c)
endif()
if(WITH_GPU)
inference_analysis_test(
paddle_infer_api_test
SRCS
paddle_infer_api_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${RESNET50_MODEL_DIR})
inference_analysis_test(
paddle_infer_api_copy_tensor_tester
SRCS
paddle_infer_api_copy_tensor_tester.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${RESNET50_MODEL_DIR})
set_tests_properties(paddle_infer_api_copy_tensor_tester PROPERTIES TIMEOUT
30)
endif()
cc_test(
paddle_infer_api_errors_test
SRCS paddle_infer_api_errors_tester.cc
DEPS paddle_inference_api)
if(WITH_GPU AND TENSORRT_FOUND)
set_tests_properties(trt_resnext_test PROPERTIES TIMEOUT 300)
set_tests_properties(trt_quant_int8_yolov3_r50_test PROPERTIES TIMEOUT 400)
set_tests_properties(trt_resnet50_test PROPERTIES TIMEOUT 300)
set_tests_properties(trt_cascade_rcnn_test PROPERTIES TIMEOUT 300)
set_tests_properties(test_trt_dynamic_shape_ernie_ser_deser
PROPERTIES TIMEOUT 300)
set_tests_properties(test_trt_dynamic_shape_ernie_fp16_ser_deser
PROPERTIES TIMEOUT 300)
set_tests_properties(test_trt_dynamic_shape_ernie PROPERTIES TIMEOUT 300)
endif()
if(WITH_MKLDNN)
set_tests_properties(test_analyzer_int8_resnet50 PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_int8_mobilenet_ssd PROPERTIES TIMEOUT
120)
set_tests_properties(test_analyzer_quant_performance_benchmark
PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_int8_mobilenetv2 PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_int8_mobilenetv1 PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_int8_mobilenetv3_large PROPERTIES TIMEOUT
120)
set_tests_properties(test_analyzer_quant2_mobilenetv1_mkldnn
PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_quant2_resnet50_channelwise_mkldnn
PROPERTIES TIMEOUT 120)
endif()
set_tests_properties(lite_resnet50_test PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_mobilenet_transpose PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_resnet50 PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_ner PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_ernie_int8 PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_googlenet PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_small_dam PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_transformer PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_mobilenet_depthwise_conv PROPERTIES TIMEOUT
120)
if(WITH_GPU)
set_tests_properties(test_analyzer_bert PROPERTIES TIMEOUT 120)
set_tests_properties(test_analyzer_ernie PROPERTIES TIMEOUT 120)
endif()
if(WITH_GPU AND TENSORRT_FOUND)
set_tests_properties(trt_mobilenet_test PROPERTIES TIMEOUT 120)
if(WITH_MKLDNN)
set_tests_properties(test_analyzer_bfloat16_resnet50 PROPERTIES TIMEOUT
120)
endif()
endif()
if(ON_INFER OR WITH_GPU)
set_tests_properties(test_analyzer_transformer_profile PROPERTIES TIMEOUT
120)
endif()
if(WITH_IPU)
#word2vec sample
set(WORD2VEC_INSTALL_DIR
"${INFERENCE_DEMO_INSTALL_DIR}/word2vec/word2vec.inference.model")
inference_analysis_test(
ipu_word2vec_sample
SRCS
ipu_word2vec_sample.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${WORD2VEC_INSTALL_DIR})
# ERNIE
set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie")
inference_analysis_api_test(
ipu_ernie_test ${ERNIE_INSTALL_DIR} ipu_ernie_test.cc ARGS --warmup=true
--repeat=10)
inference_analysis_api_test(
ipu_ernie_fp16_test ${ERNIE_INSTALL_DIR} ipu_ernie_fp16_test.cc ARGS
--warmup=true --repeat=10)
# Resnet50
set(RESNET50_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/resnet50")
inference_analysis_test(
ipu_resnet50_test
SRCS
ipu_resnet50_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${RESNET50_MODEL_DIR}
--warmup=true
--repeat=10)
inference_analysis_test(
ipu_resnet50_fp16_test
SRCS
ipu_resnet50_fp16_test.cc
EXTRA_DEPS
paddle_inference_shared
ARGS
--infer_model=${RESNET50_MODEL_DIR}
--warmup=true
--repeat=10)
# Only support Resnet50 and Ernie currently
inference_analysis_api_test(
ipu_multi_model_profile
SRCS
ipu_multi_model_profile.cc
ARGS
--model_name="Resnet50"
--infer_model=${RESNET50_MODEL_DIR}
--warmup=true
--repeat=10)
endif()
set(inference_deps ${analysis_deps} paddle_inference_api analysis
naive_executor ${GLOB_PASS_LIB})
if(WITH_TESTING)
if(NOT APPLE AND NOT WIN32)
inference_base_test(
test_api_impl
SRCS
api_impl_tester.cc
DEPS
paddle_inference_shared
ARGS
--word2vec_dirname=${WORD2VEC_MODEL_DIR}
--book_dirname=${IMG_CLS_RESNET_INSTALL_DIR})
elseif(WIN32)
inference_base_test(
test_api_impl
SRCS
api_impl_tester.cc
DEPS
${inference_deps}
ARGS
--word2vec_dirname=${WORD2VEC_MODEL_DIR}
--book_dirname=${IMG_CLS_RESNET_INSTALL_DIR})
endif()
endif()
if(NOT APPLE AND NOT WIN32)
cc_test_old(
test_analysis_predictor
SRCS
analysis_predictor_tester.cc
DEPS
paddle_inference_shared
ARGS
--dirname=${WORD2VEC_MODEL_DIR})
elseif(WIN32)
cc_test_old(
test_analysis_predictor
SRCS
analysis_predictor_tester.cc
DEPS
analysis_predictor
benchmark
${inference_deps}
ARGS
--dirname=${WORD2VEC_MODEL_DIR})
endif()
if(WITH_TESTING AND WITH_MKLDNN)
if(NOT APPLE AND NOT WIN32)
cc_test(
test_mkldnn_quantizer
SRCS mkldnn_quantizer_tester.cc
DEPS paddle_inference_shared ARGS --dirname=${WORD2VEC_MODEL_DIR})
elseif(WIN32)
cc_test(
test_mkldnn_quantizer
SRCS mkldnn_quantizer_tester.cc
DEPS analysis_predictor benchmark ${inference_deps} ARGS
--dirname=${WORD2VEC_MODEL_DIR})
endif()
endif()
if(WITH_TESTING AND TEST test_api_impl)
if(NOT APPLE)
set_tests_properties(test_api_impl PROPERTIES TIMEOUT 120)
endif()
endif()
endif()
...@@ -27,9 +27,9 @@ ...@@ -27,9 +27,9 @@
#include "paddle/fluid/inference/api/helper.h" #include "paddle/fluid/inference/api/helper.h"
#include "paddle/fluid/inference/api/paddle_api.h" #include "paddle/fluid/inference/api/paddle_api.h"
#include "paddle/fluid/inference/api/paddle_inference_api.h" #include "paddle/fluid/inference/api/paddle_inference_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h"
#include "paddle/fluid/inference/utils/io_utils.h" #include "paddle/fluid/inference/utils/io_utils.h"
#include "paddle/phi/backends/cpu/cpu_info.h" #include "paddle/phi/backends/cpu/cpu_info.h"
#include "test/cpp/inference/api/tester_helper.h"
DEFINE_string(dirname, "", "dirname to tests."); DEFINE_string(dirname, "", "dirname to tests.");
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
// limitations under the License. // limitations under the License.
#include "paddle/fluid/framework/transfer_scope_cache.h" #include "paddle/fluid/framework/transfer_scope_cache.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -13,8 +13,8 @@ limitations under the License. */ ...@@ -13,8 +13,8 @@ limitations under the License. */
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/api/paddle_analysis_config.h" #include "paddle/fluid/inference/api/paddle_analysis_config.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h"
#include "paddle/phi/backends/cpu/cpu_info.h" #include "paddle/phi/backends/cpu/cpu_info.h"
#include "test/cpp/inference/api/tester_helper.h"
DEFINE_bool(enable_mkldnn, true, "Enable MKLDNN"); DEFINE_bool(enable_mkldnn, true, "Enable MKLDNN");
......
...@@ -24,7 +24,7 @@ limitations under the License. */ ...@@ -24,7 +24,7 @@ limitations under the License. */
#endif #endif
#include "paddle/fluid/inference/capi_exp/pd_inference_api.h" #include "paddle/fluid/inference/capi_exp/pd_inference_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -20,7 +20,7 @@ limitations under the License. */ ...@@ -20,7 +20,7 @@ limitations under the License. */
#include <vector> #include <vector>
#include "paddle/fluid/inference/capi_exp/pd_inference_api.h" #include "paddle/fluid/inference/capi_exp/pd_inference_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
#include <vector> #include <vector>
#include "paddle/fluid/inference/capi_exp/pd_inference_api.h" #include "paddle/fluid/inference/capi_exp/pd_inference_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -20,7 +20,7 @@ limitations under the License. */ ...@@ -20,7 +20,7 @@ limitations under the License. */
#include <vector> #include <vector>
#include "paddle/fluid/inference/capi_exp/pd_inference_api.h" #include "paddle/fluid/inference/capi_exp/pd_inference_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -23,7 +23,7 @@ limitations under the License. */ ...@@ -23,7 +23,7 @@ limitations under the License. */
#include <vector> #include <vector>
#include "paddle/fluid/inference/capi_exp/pd_inference_api.h" #include "paddle/fluid/inference/capi_exp/pd_inference_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -23,7 +23,7 @@ limitations under the License. */ ...@@ -23,7 +23,7 @@ limitations under the License. */
#include <vector> #include <vector>
#include "paddle/fluid/inference/capi_exp/pd_inference_api.h" #include "paddle/fluid/inference/capi_exp/pd_inference_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -21,7 +21,7 @@ limitations under the License. */ ...@@ -21,7 +21,7 @@ limitations under the License. */
#include "paddle/fluid/inference/capi_exp/pd_config.h" #include "paddle/fluid/inference/capi_exp/pd_config.h"
#include "paddle/fluid/inference/capi_exp/pd_inference_api.h" #include "paddle/fluid/inference/capi_exp/pd_inference_api.h"
#include "paddle/fluid/inference/capi_exp/pd_utils.h" #include "paddle/fluid/inference/capi_exp/pd_utils.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -20,7 +20,7 @@ limitations under the License. */ ...@@ -20,7 +20,7 @@ limitations under the License. */
#include <vector> #include <vector>
#include "paddle/fluid/inference/capi_exp/pd_inference_api.h" #include "paddle/fluid/inference/capi_exp/pd_inference_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -20,7 +20,7 @@ limitations under the License. */ ...@@ -20,7 +20,7 @@ limitations under the License. */
#include <vector> #include <vector>
#include "paddle/fluid/inference/capi/paddle_c_api.h" #include "paddle/fluid/inference/capi/paddle_c_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -20,7 +20,7 @@ limitations under the License. */ ...@@ -20,7 +20,7 @@ limitations under the License. */
#include <vector> #include <vector>
#include "paddle/fluid/inference/capi/paddle_c_api.h" #include "paddle/fluid/inference/capi/paddle_c_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
#include <vector> #include <vector>
#include "paddle/fluid/inference/capi/paddle_c_api.h" #include "paddle/fluid/inference/capi/paddle_c_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -24,7 +24,7 @@ limitations under the License. */ ...@@ -24,7 +24,7 @@ limitations under the License. */
#include "paddle/fluid/inference/capi/c_api_internal.h" #include "paddle/fluid/inference/capi/c_api_internal.h"
#include "paddle/fluid/inference/capi/paddle_c_api.h" #include "paddle/fluid/inference/capi/paddle_c_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -20,7 +20,7 @@ limitations under the License. */ ...@@ -20,7 +20,7 @@ limitations under the License. */
#include <vector> #include <vector>
#include "paddle/fluid/inference/capi/paddle_c_api.h" #include "paddle/fluid/inference/capi/paddle_c_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -20,7 +20,7 @@ limitations under the License. */ ...@@ -20,7 +20,7 @@ limitations under the License. */
#include <vector> #include <vector>
#include "paddle/fluid/inference/capi/paddle_c_api.h" #include "paddle/fluid/inference/capi/paddle_c_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
#include <vector> #include <vector>
#include "paddle/fluid/inference/analysis/helper.h" #include "paddle/fluid/inference/analysis/helper.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
const int FLAGS_max_turn_num = 1; const int FLAGS_max_turn_num = 1;
......
...@@ -17,8 +17,8 @@ limitations under the License. */ ...@@ -17,8 +17,8 @@ limitations under the License. */
#include <fstream> #include <fstream>
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/tests/api/tester_helper.h"
#include "paddle/phi/common/place.h" #include "paddle/phi/common/place.h"
#include "test/cpp/inference/api/tester_helper.h"
DEFINE_string(infer_shape, "", "data shape file"); DEFINE_string(infer_shape, "", "data shape file");
DEFINE_int32(sample, 20, "number of sample"); DEFINE_int32(sample, 20, "number of sample");
......
...@@ -17,7 +17,7 @@ limitations under the License. */ ...@@ -17,7 +17,7 @@ limitations under the License. */
#include <fstream> #include <fstream>
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
DEFINE_string(infer_shape, "", "data shape file"); DEFINE_string(infer_shape, "", "data shape file");
DEFINE_int32(sample, 20, "number of sample"); DEFINE_int32(sample, 20, "number of sample");
......
...@@ -17,8 +17,8 @@ ...@@ -17,8 +17,8 @@
#include "paddle/fluid/framework/op_desc.h" #include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/program_desc.h" #include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/scope.h" #include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h"
#include "paddle/fluid/inference/utils/singleton.h" #include "paddle/fluid/inference/utils/singleton.h"
#include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -17,8 +17,8 @@ ...@@ -17,8 +17,8 @@
#include "paddle/fluid/framework/op_desc.h" #include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/program_desc.h" #include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/scope.h" #include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h"
#include "paddle/fluid/inference/utils/singleton.h" #include "paddle/fluid/inference/utils/singleton.h"
#include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/analyzer_ernie_tester.h" #include "test/cpp/inference/api/analyzer_ernie_tester.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/analyzer_ernie_tester.h" #include "test/cpp/inference/api/analyzer_ernie_tester.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
#pragma once #pragma once
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -15,7 +15,7 @@ limitations under the License. */ ...@@ -15,7 +15,7 @@ limitations under the License. */
#include <fstream> #include <fstream>
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
DEFINE_bool(disable_mkldnn_fc, false, "Disable usage of MKL-DNN's FC op"); DEFINE_bool(disable_mkldnn_fc, false, "Disable usage of MKL-DNN's FC op");
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/api/paddle_analysis_config.h" #include "paddle/fluid/inference/api/paddle_analysis_config.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
DEFINE_bool(enable_mkldnn, true, "Enable MKLDNN"); DEFINE_bool(enable_mkldnn, true, "Enable MKLDNN");
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/api/paddle_analysis_config.h" #include "paddle/fluid/inference/api/paddle_analysis_config.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
DEFINE_bool(enable_mkldnn, true, "Enable MKLDNN"); DEFINE_bool(enable_mkldnn, true, "Enable MKLDNN");
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/api/paddle_analysis_config.h" #include "paddle/fluid/inference/api/paddle_analysis_config.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
// setting iterations to 0 means processing the whole dataset // setting iterations to 0 means processing the whole dataset
namespace paddle { namespace paddle {
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
#include <random> #include <random>
#include "paddle/fluid/framework/transfer_scope_cache.h" #include "paddle/fluid/framework/transfer_scope_cache.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
// Here add missing commands // Here add missing commands
DEFINE_string(infer_model2, "", "model path"); DEFINE_string(infer_model2, "", "model path");
...@@ -35,7 +35,9 @@ void SetConfig(AnalysisConfig* config, const std::string& infer_model) { ...@@ -35,7 +35,9 @@ void SetConfig(AnalysisConfig* config, const std::string& infer_model) {
} }
std::unique_ptr<PaddlePredictor> InitializePredictor( std::unique_ptr<PaddlePredictor> InitializePredictor(
const std::string& infer_model, std::vector<float>& data, bool use_mkldnn) { const std::string& infer_model,
const std::vector<float>& data,
bool use_mkldnn) {
AnalysisConfig cfg; AnalysisConfig cfg;
SetConfig(&cfg, infer_model); SetConfig(&cfg, infer_model);
if (use_mkldnn) { if (use_mkldnn) {
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,8 +16,8 @@ ...@@ -16,8 +16,8 @@
#include "paddle/fluid/framework/op_desc.h" #include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/program_desc.h" #include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/scope.h" #include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h"
#include "paddle/fluid/inference/utils/singleton.h" #include "paddle/fluid/inference/utils/singleton.h"
#include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/api/paddle_analysis_config.h" #include "paddle/fluid/inference/api/paddle_analysis_config.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
DEFINE_bool(enable_mkldnn, true, "Enable MKLDNN"); DEFINE_bool(enable_mkldnn, true, "Enable MKLDNN");
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
DEFINE_bool(with_precision_check, true, "turn on test"); DEFINE_bool(with_precision_check, true, "turn on test");
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/analysis/helper.h" #include "paddle/fluid/inference/analysis/helper.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,8 +16,8 @@ limitations under the License. */ ...@@ -16,8 +16,8 @@ limitations under the License. */
#include <fstream> #include <fstream>
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/tests/api/analyzer_seq_pool1_tester_helper.h" #include "test/cpp/inference/api/analyzer_seq_pool1_tester_helper.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,8 +16,8 @@ limitations under the License. */ ...@@ -16,8 +16,8 @@ limitations under the License. */
#include <fstream> #include <fstream>
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/tests/api/analyzer_seq_pool1_tester_helper.h" #include "test/cpp/inference/api/analyzer_seq_pool1_tester_helper.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,8 +16,8 @@ limitations under the License. */ ...@@ -16,8 +16,8 @@ limitations under the License. */
#include <fstream> #include <fstream>
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/tests/api/analyzer_seq_pool1_tester_helper.h" #include "test/cpp/inference/api/analyzer_seq_pool1_tester_helper.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,8 +16,8 @@ limitations under the License. */ ...@@ -16,8 +16,8 @@ limitations under the License. */
#include <fstream> #include <fstream>
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/tests/api/analyzer_seq_pool1_tester_helper.h" #include "test/cpp/inference/api/analyzer_seq_pool1_tester_helper.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,8 +16,8 @@ limitations under the License. */ ...@@ -16,8 +16,8 @@ limitations under the License. */
#include <fstream> #include <fstream>
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/tests/api/analyzer_seq_pool1_tester_helper.h" #include "test/cpp/inference/api/analyzer_seq_pool1_tester_helper.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -20,7 +20,7 @@ limitations under the License. */ ...@@ -20,7 +20,7 @@ limitations under the License. */
#include <utility> #include <utility>
#include <vector> #include <vector>
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/analyzer_transformer_tester_helper.h" #include "test/cpp/inference/api/analyzer_transformer_tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/analyzer_transformer_tester_helper.h" #include "test/cpp/inference/api/analyzer_transformer_tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/analyzer_transformer_tester_helper.h" #include "test/cpp/inference/api/analyzer_transformer_tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
#include <utility> #include <utility>
#include <vector> #include <vector>
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -17,7 +17,7 @@ limitations under the License. */ ...@@ -17,7 +17,7 @@ limitations under the License. */
#include <fstream> #include <fstream>
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -15,7 +15,7 @@ limitations under the License. */ ...@@ -15,7 +15,7 @@ limitations under the License. */
#include <fstream> #include <fstream>
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,8 +16,8 @@ ...@@ -16,8 +16,8 @@
#include "paddle/fluid/framework/op_desc.h" #include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/program_desc.h" #include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/scope.h" #include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h"
#include "paddle/fluid/inference/utils/singleton.h" #include "paddle/fluid/inference/utils/singleton.h"
#include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -20,7 +20,7 @@ limitations under the License. */ ...@@ -20,7 +20,7 @@ limitations under the License. */
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/framework/convert_utils.h" #include "paddle/fluid/framework/convert_utils.h"
#include "paddle/fluid/inference/api/api_impl.h" #include "paddle/fluid/inference/api/api_impl.h"
#include "paddle/fluid/inference/tests/test_helper.h" #include "test/cpp/inference/test_helper.h"
#ifdef __clang__ #ifdef __clang__
#define ACC_DIFF 4e-3 #define ACC_DIFF 4e-3
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/api/paddle_inference_api.h" #include "paddle/fluid/inference/api/paddle_inference_api.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -18,7 +18,7 @@ limitations under the License. */ ...@@ -18,7 +18,7 @@ limitations under the License. */
#include <cmath> #include <cmath>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -15,7 +15,7 @@ limitations under the License. */ ...@@ -15,7 +15,7 @@ limitations under the License. */
#include <cmath> #include <cmath>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -20,7 +20,7 @@ limitations under the License. */ ...@@ -20,7 +20,7 @@ limitations under the License. */
#include <thread> // NOLINT #include <thread> // NOLINT
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -18,7 +18,7 @@ limitations under the License. */ ...@@ -18,7 +18,7 @@ limitations under the License. */
#include <cmath> #include <cmath>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <iostream> #include <iostream>
#include "paddle/fluid/inference/api/paddle_mkldnn_quantizer_config.h" #include "paddle/fluid/inference/api/paddle_mkldnn_quantizer_config.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -22,8 +22,8 @@ limitations under the License. */ ...@@ -22,8 +22,8 @@ limitations under the License. */
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "glog/logging.h" #include "glog/logging.h"
#include "paddle/fluid/inference/api/paddle_infer_contrib.h" #include "paddle/fluid/inference/api/paddle_infer_contrib.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h"
#include "paddle/fluid/platform/float16.h" #include "paddle/fluid/platform/float16.h"
#include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle_infer { namespace paddle_infer {
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle_infer { namespace paddle_infer {
......
...@@ -35,10 +35,10 @@ ...@@ -35,10 +35,10 @@
#include "paddle/fluid/inference/api/helper.h" #include "paddle/fluid/inference/api/helper.h"
#include "paddle/fluid/inference/api/paddle_inference_api.h" #include "paddle/fluid/inference/api/paddle_inference_api.h"
#include "paddle/fluid/inference/api/paddle_inference_pass.h" #include "paddle/fluid/inference/api/paddle_inference_pass.h"
#include "paddle/fluid/inference/tests/api/config_printer.h"
#include "paddle/fluid/inference/tests/test_helper.h"
#include "paddle/fluid/inference/utils/benchmark.h" #include "paddle/fluid/inference/utils/benchmark.h"
#include "paddle/fluid/platform/profiler/event_tracing.h" #include "paddle/fluid/platform/profiler/event_tracing.h"
#include "test/cpp/inference/api/config_printer.h"
#include "test/cpp/inference/test_helper.h"
DEFINE_string(model_name, "", "model name"); DEFINE_string(model_name, "", "model name");
DEFINE_string(infer_model, "", "model path"); DEFINE_string(infer_model, "", "model path");
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -24,7 +24,7 @@ limitations under the License. */ ...@@ -24,7 +24,7 @@ limitations under the License. */
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h" #include "test/cpp/inference/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -24,7 +24,7 @@ limitations under the License. */ ...@@ -24,7 +24,7 @@ limitations under the License. */
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h" #include "test/cpp/inference/api/trt_dynamic_shape_ernie_serialize_deserialize_test.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -26,7 +26,7 @@ limitations under the License. */ ...@@ -26,7 +26,7 @@ limitations under the License. */
#include <vector> #include <vector>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -17,7 +17,7 @@ limitations under the License. */ ...@@ -17,7 +17,7 @@ limitations under the License. */
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tensorrt/helper.h" #include "paddle/fluid/inference/tensorrt/helper.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -18,7 +18,7 @@ limitations under the License. */ ...@@ -18,7 +18,7 @@ limitations under the License. */
#include <numeric> #include <numeric>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -15,7 +15,7 @@ limitations under the License. */ ...@@ -15,7 +15,7 @@ limitations under the License. */
#include <numeric> #include <numeric>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -16,7 +16,7 @@ limitations under the License. */ ...@@ -16,7 +16,7 @@ limitations under the License. */
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "paddle/fluid/inference/tests/api/trt_test_helper.h" #include "test/cpp/inference/api/trt_test_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
...@@ -20,7 +20,7 @@ limitations under the License. */ ...@@ -20,7 +20,7 @@ limitations under the License. */
#include "gflags/gflags.h" #include "gflags/gflags.h"
#include "glog/logging.h" #include "glog/logging.h"
#include "gtest/gtest.h" #include "gtest/gtest.h"
#include "paddle/fluid/inference/tests/api/tester_helper.h" #include "test/cpp/inference/api/tester_helper.h"
namespace paddle { namespace paddle {
namespace inference { namespace inference {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册