CMakeLists.txt 23.6 KB
Newer Older
1 2 3 4 5
if (NOT APPLE AND NOT WIN32)
    set(INFERENCE_EXTRA_DEPS paddle_fluid_shared)
else()
    set(INFERENCE_EXTRA_DEPS paddle_inference_api paddle_fluid_api ir_pass_manager analysis_predictor benchmark)
endif()
6

N
nhzlx 已提交
7
if(WITH_GPU AND TENSORRT_FOUND)
8
    set(INFERENCE_EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} analysis ${analysis_deps})
N
nhzlx 已提交
9 10
endif()

11
function(download_data install_dir data_file)
12
    if (NOT EXISTS ${install_dir}/${data_file})
13 14 15 16 17
        inference_download_and_uncompress(${install_dir} ${INFERENCE_URL} ${data_file})
    endif()
endfunction()

function(download_int8_data install_dir data_file)
18
    if (NOT EXISTS ${install_dir}/${data_file})
19
        inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}/int8 ${data_file})
T
Tao Luo 已提交
20 21 22
    endif()
endfunction()

23 24 25 26 27 28
function(download_qat_data install_dir data_file)
    if (NOT EXISTS ${install_dir}/${data_file})
	    inference_download_and_uncompress(${install_dir} ${INFERENCE_URL}/int8/QAT_models ${data_file})
    endif()
endfunction()

L
luotao1 已提交
29
function(download_model_and_data install_dir model_name data_name)
30 31
    download_data(${install_dir} ${model_name}) 
    download_data(${install_dir} ${data_name})
32 33
endfunction()

G
GaoWei8 已提交
34 35 36 37
function(download_result install_dir result_name)
    download_data(${install_dir} ${result_name})
endfunction()

38 39
function(inference_analysis_api_test target install_dir filename)
    inference_analysis_test(${target} SRCS ${filename}
40
        EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
G
GaoWei8 已提交
41
        ARGS --infer_model=${install_dir}/model --infer_data=${install_dir}/data.txt --refer_result=${install_dir}/result.txt)
42 43
endfunction()

44
function(inference_analysis_api_test_build TARGET_NAME filename)
45
	inference_analysis_test_build(${TARGET_NAME} SRCS ${filename}
46
        EXTRA_DEPS ${INFERENCE_EXTRA_DEPS})
47 48 49 50 51
endfunction()

function(inference_analysis_api_int8_test_run TARGET_NAME test_binary model_dir data_path)
	inference_analysis_test_run(${TARGET_NAME}
	COMMAND ${test_binary}
52
        ARGS --infer_model=${model_dir}/model
53
             --infer_data=${data_path}
54
             --warmup_batch_size=${WARMUP_BATCH_SIZE}
55
             --batch_size=50
56
             --cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
57
	     --iterations=2)
58
endfunction()
59

60 61 62 63 64
function(inference_analysis_api_int8_test_run_custom_warmup_batch_size TARGET_NAME test_binary model_dir data_path warmup_batch_size)
    set(WARMUP_BATCH_SIZE ${warmup_batch_size})
    inference_analysis_api_int8_test_run(${TARGET_NAME} ${test_binary} ${model_dir} ${data_path})
endfunction()

65 66 67 68 69 70 71
function(inference_analysis_api_object_dection_int8_test_run TARGET_NAME test_binary model_dir data_path)
	inference_analysis_test_run(${TARGET_NAME}
	COMMAND ${test_binary}
        ARGS --infer_model=${model_dir}/model
             --infer_data=${data_path}
             --warmup_batch_size=10
             --batch_size=300
72
             --cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
73 74 75
	     --iterations=1)
endfunction()

76 77 78 79 80 81 82 83 84
function(inference_analysis_api_test_with_fake_data_build TARGET_NAME filename)
	inference_analysis_test_build(${TARGET_NAME} SRCS ${filename}
        EXTRA_DEPS ${INFERENCE_EXTRA_DEPS})
endfunction()

function(inference_analysis_api_test_with_fake_data_run TARGET_NAME test_binary model_dir disable_fc)
    inference_analysis_test_run(${TARGET_NAME}
	COMMAND ${test_binary}
        ARGS --infer_model=${model_dir}/model
85
             --disable_mkldnn_fc=${disable_fc}) 
T
Tao Luo 已提交
86 87
endfunction()

88 89 90 91 92 93 94
function(inference_analysis_api_qat_test_run TARGET_NAME test_binary fp32_model_dir int8_model_dir data_path)
    inference_analysis_test_run(${TARGET_NAME}
    COMMAND ${test_binary}
        ARGS --fp32_model=${fp32_model_dir}
             --int8_model=${int8_model_dir}
             --infer_data=${data_path}
             --batch_size=50
95
             --cpu_num_threads=${CPU_NUM_THREADS_ON_CI}
96 97 98 99
             --with_accuracy_layer=false
             --iterations=2)
endfunction()

100 101 102 103 104 105 106
function(preprocess_data2bin_test_run target py_script_source data_dir output_file)
	py_test(${target} SRCS ${CMAKE_CURRENT_SOURCE_DIR}/${py_script_source}
	        ARGS --data_dir=${data_dir}
		     --output_file=${output_file}
		     --local)
endfunction()

T
Tao Luo 已提交
107
if(NOT APPLE AND WITH_MKLML)
108
    # RNN1
109 110
    set(RNN1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/rnn1")
    download_model_and_data(${RNN1_INSTALL_DIR} "rnn1%2Fmodel.tar.gz" "rnn1%2Fdata.txt.tar.gz")
111
    inference_analysis_api_test(test_analyzer_rnn1 ${RNN1_INSTALL_DIR} analyzer_rnn1_tester.cc)
112 113 114 115
    
    # seq_pool1
    set(SEQ_POOL1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/seq_pool")
    download_model_and_data(${SEQ_POOL1_INSTALL_DIR} "seq_pool1_model_.tar.gz" "seq_pool1_data.txt.tar.gz")
116
    inference_analysis_api_test(test_analyzer_seq_pool1 ${SEQ_POOL1_INSTALL_DIR} analyzer_seq_pool1_tester.cc)
117
else()
T
Tao Luo 已提交
118 119 120
    # TODO: fix this test on MACOS and OPENBLAS, the reason is that
    # fusion_seqexpand_concat_fc_op is not supported on MACOS and OPENBLAS
    message(WARNING "These tests has been disabled in OSX or WITH_MKL=OFF before being fixed: \n test_analyzer_rnn1")
121
    message(WARNING "These tests has been disabled in OSX or WITH_MKL=OFF before being fixed: \n test_analyzer_seq_pool1")
122
endif()
123

124

L
luotao1 已提交
125 126 127
# RNN2
set(RNN2_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/rnn2")
download_model_and_data(${RNN2_INSTALL_DIR} "rnn2_model.tar.gz" "rnn2_data.txt.tar.gz")
128
inference_analysis_api_test(test_analyzer_rnn2 ${RNN2_INSTALL_DIR} analyzer_rnn2_tester.cc)
L
luotao1 已提交
129

T
Tao Luo 已提交
130 131 132
# TODO(luotao, Superjom) Disable DAM test, temporarily fix
# https://github.com/PaddlePaddle/Paddle/issues/15032#issuecomment-455990914.
# After inference framework refactor, will reopen it.
Z
ZhenWang 已提交
133
# normal DAM
Z
Zhen Wang 已提交
134
set(DAM_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/dam")
Z
ZhenWang 已提交
135
download_model_and_data(${DAM_INSTALL_DIR} "DAM_model.tar.gz" "DAM_data.txt.tar.gz")
136
#inference_analysis_api_test(test_analyzer_dam ${DAM_INSTALL_DIR} analyzer_dam_tester.cc EXTRA_DEPS legacy_allocator)
Z
ZhenWang 已提交
137

Z
ZhenWang 已提交
138
# small DAM
Z
ZhenWang 已提交
139 140 141
set(DAM_SMALL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/small_dam")
download_model_and_data(${DAM_SMALL_INSTALL_DIR} "dam_small_model.tar.gz" "dam_small_data.txt.tar.gz")
inference_analysis_test(test_analyzer_small_dam SRCS analyzer_dam_tester.cc
Z
ZhenWang 已提交
142
        EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
143
        ARGS --infer_model=${DAM_SMALL_INSTALL_DIR}/model --infer_data=${DAM_SMALL_INSTALL_DIR}/data.txt --max_turn_num=1)
Z
Zhen Wang 已提交
144

145
#save model 
146
inference_analysis_api_test(test_analyzer_save_model ${DAM_SMALL_INSTALL_DIR} analyzer_save_model_tester.cc)
147

148
# chinese_ner
L
luotao1 已提交
149 150
set(CHINESE_NER_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/chinese_ner")
download_model_and_data(${CHINESE_NER_INSTALL_DIR} "chinese_ner_model.tar.gz" "chinese_ner-data.txt.tar.gz")
151
inference_analysis_api_test(test_analyzer_ner ${CHINESE_NER_INSTALL_DIR} analyzer_ner_tester.cc)
152 153

# lac
L
luotao1 已提交
154 155
set(LAC_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/lac")
download_model_and_data(${LAC_INSTALL_DIR} "lac_model.tar.gz" "lac_data.txt.tar.gz")
156
inference_analysis_api_test(test_analyzer_lac ${LAC_INSTALL_DIR} analyzer_lac_tester.cc)
157

T
Tao Luo 已提交
158 159 160 161 162
# Pyramid DNN
set(PYRAMID_DNN_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/pyramid_dnn")
download_model_and_data(${PYRAMID_DNN_INSTALL_DIR} "PyramidDNN_model.tar.gz" "PyramidDNN_data.txt.tar.gz")
inference_analysis_api_test(test_analyzer_pyramid_dnn ${PYRAMID_DNN_INSTALL_DIR} analyzer_pyramid_dnn_tester.cc)

G
GaoWei8 已提交
163 164 165 166 167 168
#Ernie
set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie")
download_model_and_data(${ERNIE_INSTALL_DIR} "Ernie_model.tar.gz" "Ernie_data.txt.tar.gz" "Ernie_result.txt.tar.gz")
download_result(${ERNIE_INSTALL_DIR} "Ernie_result.txt.tar.gz")
inference_analysis_api_test(test_analyzer_ernie ${ERNIE_INSTALL_DIR} analyzer_ernie_tester.cc)

169 170 171 172 173
#Ernie large
set(ERNIE_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/Ernie_Large")
download_model_and_data(${ERNIE_INSTALL_DIR} "Ernie_large_model.tar.gz" "Ernie_large_data.txt.tar.gz" "Ernie_large_result.txt.tar.gz")
download_result(${ERNIE_INSTALL_DIR} "Ernie_large_result.txt.tar.gz")
inference_analysis_test(test_analyzer_ernie_large SRCS analyzer_ernie_tester.cc
174
    EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
175 176
    ARGS --infer_model=${ERNIE_INSTALL_DIR}/model --infer_data=${ERNIE_INSTALL_DIR}/data.txt --refer_result=${ERNIE_INSTALL_DIR}/result.txt --ernie_large=true)

177
# text_classification
L
luotao1 已提交
178 179
set(TEXT_CLASSIFICATION_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/text_classification")
download_model_and_data(${TEXT_CLASSIFICATION_INSTALL_DIR} "text-classification-Senta.tar.gz" "text_classification_data.txt.tar.gz")
180
inference_analysis_api_test(test_analyzer_text_classification ${TEXT_CLASSIFICATION_INSTALL_DIR} analyzer_text_classification_tester.cc)
T
tensor-tang 已提交
181

T
Tao Luo 已提交
182 183 184 185 186
# seq_conv1
set(SEQ_CONV1_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/seq_conv1")
download_model_and_data(${SEQ_CONV1_INSTALL_DIR} "seq_conv1_model.tar.gz" "seq_conv1_data.txt.tar.gz")
inference_analysis_api_test(test_analyzer_seq_conv1 ${SEQ_CONV1_INSTALL_DIR} analyzer_seq_conv1_tester.cc)

T
Tao Luo 已提交
187 188 189 190 191
# transformer, the dataset only works on batch_size=8 now
set(TRANSFORMER_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/transformer")
download_model_and_data(${TRANSFORMER_INSTALL_DIR} "temp%2Ftransformer_model.tar.gz" "temp%2Ftransformer_data.txt.tar.gz")
inference_analysis_test(test_analyzer_transformer SRCS analyzer_transformer_tester.cc 
  EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
T
Tao Luo 已提交
192
  ARGS --infer_model=${TRANSFORMER_INSTALL_DIR}/model --infer_data=${TRANSFORMER_INSTALL_DIR}/data.txt --batch_size=8 
193
       --cpu_num_threads=${CPU_NUM_THREADS_ON_CI})
T
Tao Luo 已提交
194

T
tensor-tang 已提交
195
# ocr
196 197
set(OCR_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/ocr")
if (NOT EXISTS ${OCR_INSTALL_DIR})
198
    inference_download_and_uncompress(${OCR_INSTALL_DIR} "http://paddlemodels.bj.bcebos.com/" "inference-vis-demos%2Focr.tar.gz")
199
endif()
200
inference_analysis_api_test(test_analyzer_ocr ${OCR_INSTALL_DIR} analyzer_vis_tester.cc)
201

T
Tao Luo 已提交
202
# mobilenet with transpose op
T
Tao Luo 已提交
203 204
set(MOBILENET_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet")
if (NOT EXISTS ${MOBILENET_INSTALL_DIR})
205
    inference_download_and_uncompress(${MOBILENET_INSTALL_DIR} "http://paddlemodels.bj.bcebos.com/" "inference-vis-demos%2Fmobilenet.tar.gz")
T
Tao Luo 已提交
206
endif()
207
inference_analysis_api_test(test_analyzer_mobilenet_transpose ${MOBILENET_INSTALL_DIR} analyzer_vis_tester.cc)
T
Tao Luo 已提交
208

209 210 211 212 213 214 215
### Image classification tests with fake data
set(IMG_CLASS_TEST_APP "test_analyzer_image_classification")
set(IMG_CLASS_TEST_APP_SRC "analyzer_image_classification_tester.cc")

# build test binary to be used in subsequent tests
inference_analysis_api_test_with_fake_data_build(${IMG_CLASS_TEST_APP} ${IMG_CLASS_TEST_APP_SRC})

216
# googlenet
217 218 219 220
set(GOOGLENET_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/googlenet")
download_data(${GOOGLENET_MODEL_DIR} "googlenet.tar.gz")
inference_analysis_api_test_with_fake_data_run(test_analyzer_googlenet ${IMG_CLASS_TEST_APP}
	${GOOGLENET_MODEL_DIR} false)
F
fuchang01 已提交
221

T
Tao Luo 已提交
222
# resnet50
223 224 225 226
set(RESNET50_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/resnet50")
download_data(${RESNET50_MODEL_DIR} "resnet50_model.tar.gz")
inference_analysis_api_test_with_fake_data_run(test_analyzer_resnet50 ${IMG_CLASS_TEST_APP}
	${RESNET50_MODEL_DIR} true)
T
Tao Luo 已提交
227

228
# mobilenet with depthwise_conv op
229 230 231 232
set(MOBILENET_MODEL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet_depthwise_conv")
download_data(${MOBILENET_MODEL_DIR} "mobilenet_model.tar.gz")
inference_analysis_api_test_with_fake_data_run(test_analyzer_mobilenet_depthwise_conv ${IMG_CLASS_TEST_APP}
	${MOBILENET_MODEL_DIR} false)
S
Sylwester Fraczek 已提交
233

234
if(WITH_MKLDNN)
235

236 237
  ### INT8 tests

238
  set(INT8_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/int8v2")
239

240
  ## Image classification models
241

242 243 244 245 246 247
  # ImageNet small dataset
  # May be already downloaded for INT8 QAT unit tests
  set(IMAGENET_DATA_ARCHIVE "imagenet_val_100_tail.tar.gz")
  set(IMAGENET_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/imagenet")
  set(IMAGENET_DATA_PATH "${IMAGENET_DATA_DIR}/data.bin")
  download_int8_data(${IMAGENET_DATA_DIR} ${IMAGENET_DATA_ARCHIVE})
248

249
  # build test binary to be used in subsequent tests
250 251
  set(INT8_IMG_CLASS_TEST_APP "test_analyzer_int8_image_classification")
  set(INT8_IMG_CLASS_TEST_APP_SRC "analyzer_int8_image_classification_tester.cc")
252
  inference_analysis_api_test_build(${INT8_IMG_CLASS_TEST_APP} ${INT8_IMG_CLASS_TEST_APP_SRC})
253 254

  # resnet50 int8
255
  set(INT8_RESNET50_MODEL_DIR "${INT8_DATA_DIR}/resnet50")
256 257 258 259 260 261 262
  download_int8_data(${INT8_RESNET50_MODEL_DIR} "resnet50_int8_model.tar.gz" )
  inference_analysis_api_int8_test_run(test_analyzer_int8_resnet50 ${INT8_IMG_CLASS_TEST_APP} ${INT8_RESNET50_MODEL_DIR} ${IMAGENET_DATA_PATH})

  # mobilenetv1 int8
  set(INT8_MOBILENETV1_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv1")
  download_int8_data(${INT8_MOBILENETV1_MODEL_DIR} "mobilenetv1_int8_model.tar.gz" )
  inference_analysis_api_int8_test_run(test_analyzer_int8_mobilenetv1 ${INT8_IMG_CLASS_TEST_APP} ${INT8_MOBILENETV1_MODEL_DIR} ${IMAGENET_DATA_PATH})
263
 
264
  # mobilenetv2 int8
265
  set(INT8_MOBILENETV2_MODEL_DIR "${INT8_DATA_DIR}/mobilenetv2")
266 267
  download_int8_data(${INT8_MOBILENETV2_MODEL_DIR} "mobilenet_v2_int8_model.tar.gz" )
  inference_analysis_api_int8_test_run(test_analyzer_int8_mobilenetv2 ${INT8_IMG_CLASS_TEST_APP} ${INT8_MOBILENETV2_MODEL_DIR} ${IMAGENET_DATA_PATH})
268
 
269
  # resnet101 int8
A
Adam 已提交
270
  # TODO(grygielski) Enable after MKL-DNN 1.0 merge
271 272
  set(INT8_RESNET101_MODEL_DIR "${INT8_DATA_DIR}/resnet101")
  download_int8_data(${INT8_RESNET101_MODEL_DIR} "Res101_int8_model.tar.gz" )
A
Adam 已提交
273
#   inference_analysis_api_int8_test_run(test_analyzer_int8_resnet101 ${INT8_IMG_CLASS_TEST_APP} ${INT8_RESNET101_MODEL_DIR} ${IMAGENET_DATA_PATH})
274
 
275
  # vgg16 int8
A
Adam 已提交
276
  # TODO(grygielski) Enable after MKL-DNN 1.0 merge
277 278
  set(INT8_VGG16_MODEL_DIR "${INT8_DATA_DIR}/vgg16")
  download_int8_data(${INT8_VGG16_MODEL_DIR} "VGG16_int8_model.tar.gz" )
A
Adam 已提交
279
#   inference_analysis_api_int8_test_run(test_analyzer_int8_vgg16 ${INT8_IMG_CLASS_TEST_APP} ${INT8_VGG16_MODEL_DIR} ${IMAGENET_DATA_PATH})
280
 
281
  # vgg19 int8
A
Adam 已提交
282
  # TODO(grygielski) Enable after MKL-DNN 1.0 merge
283 284
  set(INT8_VGG19_MODEL_DIR "${INT8_DATA_DIR}/vgg19")
  download_int8_data(${INT8_VGG19_MODEL_DIR} "VGG19_int8_model.tar.gz" )
A
Adam 已提交
285
#   inference_analysis_api_int8_test_run(test_analyzer_int8_vgg19 ${INT8_IMG_CLASS_TEST_APP} ${INT8_VGG19_MODEL_DIR} ${IMAGENET_DATA_PATH})
286

287
  # googlenet int8
288
  set(INT8_GOOGLENET_MODEL_DIR "${INT8_DATA_DIR}/googlenet")
289
  download_int8_data(${INT8_GOOGLENET_MODEL_DIR} "GoogleNet_int8_model.tar.gz" )
290
  inference_analysis_api_int8_test_run_custom_warmup_batch_size(test_analyzer_int8_googlenet ${INT8_IMG_CLASS_TEST_APP} ${INT8_GOOGLENET_MODEL_DIR} ${IMAGENET_DATA_PATH} 10)
291 292

  ### Object detection models
293
  set(PASCALVOC_DATA_PATH "${INT8_DATA_DIR}/pascalvoc_val_head_300.bin")
294 295 296 297
  set(INT8_OBJ_DETECT_TEST_APP "test_analyzer_int8_object_detection")
  set(INT8_OBJ_DETECT_TEST_APP_SRC "analyzer_int8_object_detection_tester.cc")

  # download dataset if necessary
298
  download_int8_data(${INT8_DATA_DIR} "pascalvoc_val_head_300.tar.gz")
299

300

301
  # build test binary to be used in subsequent tests
302
  inference_analysis_api_test_build(${INT8_OBJ_DETECT_TEST_APP} ${INT8_OBJ_DETECT_TEST_APP_SRC})
303

304 305 306
  # mobilenet-ssd int8
  set(INT8_MOBILENET_SSD_MODEL_DIR "${INT8_DATA_DIR}/mobilenet-ssd")
  download_int8_data(${INT8_MOBILENET_SSD_MODEL_DIR} "mobilenet_ssd_int8_model.tar.gz" )
307
  inference_analysis_api_object_dection_int8_test_run(test_analyzer_int8_mobilenet_ssd ${INT8_OBJ_DETECT_TEST_APP} ${INT8_MOBILENET_SSD_MODEL_DIR} ${PASCALVOC_DATA_PATH})
308

309 310
  ### optimized FP32 vs. QAT INT8 tests
  
311
  set(QAT_DATA_DIR "${INFERENCE_DEMO_INSTALL_DIR}/qat")
312 313 314 315 316 317
  set(QAT_IMG_CLASS_TEST_APP "test_analyzer_qat_image_classification")
  set(QAT_IMG_CLASS_TEST_APP_SRC "analyzer_qat_image_classification_tester.cc")

  # build test binary to be used in subsequent tests
  inference_analysis_api_test_build(${QAT_IMG_CLASS_TEST_APP} ${QAT_IMG_CLASS_TEST_APP_SRC})

318
  # MobileNet FP32 vs. QAT INT8
319
  # The FP32 model should already be downloaded for slim QAT unit tests
320 321 322 323
  set(QAT2_MobileNet_MODEL_DIR "${QAT_DATA_DIR}/MobileNet_qat_perf")
  set(QAT2_INT8_MobileNet_MODEL_DIR "${QAT_DATA_DIR}/MobileNet_qat_perf_int8")
  download_qat_data(${QAT2_INT8_MobileNet_MODEL_DIR} "MobileNet_qat_perf_int8.tar.gz")
  inference_analysis_api_qat_test_run(test_analyzer_qat_performance_benchmark ${QAT_IMG_CLASS_TEST_APP} ${QAT2_MobileNet_MODEL_DIR}/MobileNet_qat_perf/float ${QAT2_INT8_MobileNet_MODEL_DIR}/MobileNet_qat_perf_int8 ${IMAGENET_DATA_PATH})
324

325 326 327 328 329 330 331
  ### Other tests
 
  # MKLDNN quantizer config
  set(MKLDNN_QUANTIZER_CONFIG_TEST_APP "test_mkldnn_quantizer_config")
  set(MKLDNN_QUANTIZER_CONFIG_TEST_APP_SRC "mkldnn_quantizer_config_tester.cc")
  inference_analysis_api_test_build(${MKLDNN_QUANTIZER_CONFIG_TEST_APP} ${MKLDNN_QUANTIZER_CONFIG_TEST_APP_SRC})
  inference_analysis_test_run(test_mkldnn_quantizer_config COMMAND ${MKLDNN_QUANTIZER_CONFIG_TEST_APP})
332 333 334 335 336 337 338 339 340 341 342 343 344

  # preprocess data2bin imagenet
    download_int8_data(${INT8_DATA_DIR} "imagenet_small.tar.gz")
    set(IMAGENET_SMALL_DATA_DIR "${INT8_DATA_DIR}/imagenet_small")
    set(IMAGENET_SMALL_OUTPUT_FILE "imagenet_small.bin")
    preprocess_data2bin_test_run(preprocess_local_imagenet "full_ILSVRC2012_val_preprocess.py" ${IMAGENET_SMALL_DATA_DIR} ${IMAGENET_SMALL_OUTPUT_FILE})
    
  # preprocess data2bin pascalvoc
  download_int8_data(${INT8_DATA_DIR} "pascalvoc_small.tar.gz")
  set(PASCALVOC_SMALL_DATA_DIR "${INT8_DATA_DIR}/pascalvoc_small")
  set(PASCALVOC_SMALL_OUTPUT_FILE "pascalvoc_small.bin")
  preprocess_data2bin_test_run(preprocess_local_pascalvoc "full_pascalvoc_test_preprocess.py" ${PASCALVOC_SMALL_DATA_DIR} ${PASCALVOC_SMALL_OUTPUT_FILE})

345 346
endif()

L
luotao1 已提交
347 348 349
# bert, max_len=20, embedding_dim=128
set(BERT_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/bert_emb128")
download_model_and_data(${BERT_INSTALL_DIR} "bert_emb128_model.tar.gz" "bert_data_len20.txt.tar.gz")
350
inference_analysis_api_test(test_analyzer_bert ${BERT_INSTALL_DIR} analyzer_bert_tester.cc)
351

N
nhzlx 已提交
352
if(WITH_GPU AND TENSORRT_FOUND)
353
    set(TRT_MODEL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/trt_models")
354
    if (NOT EXISTS ${TRT_MODEL_INSTALL_DIR})
355
        inference_download_and_uncompress(${TRT_MODEL_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test "trt_inference_test_models.tar.gz")
356
    endif()
357 358 359 360
    set(TEST_SPLIT_CONVERTER_MODEL "${TRT_MODEL_INSTALL_DIR}/trt_split_op_converter_test")
    if (NOT EXISTS ${TEST_SPLIT_CONVERTER_MODEL})
        inference_download_and_uncompress(${TEST_SPLIT_CONVERTER_MODEL} ${INFERENCE_URL}/tensorrt_test "split_converter.tgz")
    endif()
361 362 363 364
    set(TEST_INSTANCE_NORM_MODEL "${TRT_MODEL_INSTALL_DIR}/trt_instance_norm_test")
    if (NOT EXISTS ${TEST_INSTANCE_NORM_MODEL})
        inference_download_and_uncompress(${TEST_INSTANCE_NORM_MODEL} ${INFERENCE_URL}/tensorrt_test "instance_norm.tgz")
    endif()
365 366
    inference_analysis_test(trt_mobilenet_test SRCS trt_mobilenet_test.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
367
            ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
368 369
    inference_analysis_test(trt_resnet50_test SRCS trt_resnet50_test.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
370
            ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
371
    inference_analysis_test(trt_resnext_test SRCS trt_resnext_test.cc
372
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
373 374 375 376
            ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
    inference_analysis_test(trt_fc_prelu_test SRCS trt_fc_prelu_test.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
            ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
377 378 379
    inference_analysis_test(trt_cascade_rcnn_test SRCS trt_cascade_rcnn_test.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
            ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
380 381 382
    inference_analysis_test(trt_split_converter_test SRCS trt_split_converter_test.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} 
            ARGS --infer_model=${TEST_SPLIT_CONVERTER_MODEL}/)
383 384 385
    inference_analysis_test(trt_instance_norm_test SRCS trt_instance_norm_converter_test.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} 
            ARGS --infer_model=${TEST_INSTANCE_NORM_MODEL}/)
386 387 388
    inference_analysis_test(test_analyzer_capi_gpu SRCS analyzer_capi_gpu_tester.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} paddle_fluid_c
            ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
389
     
390 391 392 393 394 395 396
    set(TRT_MODEL_QUANT_RESNET_DIR "${INFERENCE_DEMO_INSTALL_DIR}/quant_small_model")
    if (NOT EXISTS ${TRT_MODEL_QUANT_RESNET_DIR})
        inference_download_and_uncompress(${INFERENCE_DEMO_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test "quant_small_model.tar.gz")
    endif()
    inference_analysis_test(trt_quant_int8_test SRCS trt_quant_int8_test.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
            ARGS --infer_model=${TRT_MODEL_QUANT_RESNET_DIR})
397

398 399 400 401 402 403 404 405
    set(TRT_MODEL_QUANT_YOLOV3_DIR "${INFERENCE_DEMO_INSTALL_DIR}/yolov3_r50_quant_aware")
    if (NOT EXISTS ${TRT_MODEL_QUANT_YOLOV3_DIR})
        inference_download_and_uncompress(${INFERENCE_DEMO_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test "yolov3_r50_quant_aware.tgz")
    endif()
    inference_analysis_test(trt_quant_int8_yolov3_r50_test SRCS trt_quant_int8_yolov3_r50_test.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
            ARGS --infer_model=${TRT_MODEL_QUANT_YOLOV3_DIR})

406 407 408 409 410
    set(TEST_TRT_DYNAMIC_MODEL2 "${TRT_MODEL_INSTALL_DIR}/complex_model_dynamic")
    if (NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL2})
        inference_download_and_uncompress(${TEST_TRT_DYNAMIC_MODEL2} ${INFERENCE_URL}/tensorrt_test "complex_model_dynamic2.tar.gz")
    endif()

411
    set(TEST_TRT_DYNAMIC_MODEL "${TRT_MODEL_INSTALL_DIR}/conv_bn_swish_split_gelu")
412
    if (NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL})
413
        inference_download_and_uncompress(${TEST_TRT_DYNAMIC_MODEL} ${INFERENCE_URL}/tensorrt_test "conv_bn_swish_split_gelu.tar.gz")
414 415 416
    endif()
    inference_analysis_test(trt_dynamic_shape_test SRCS trt_dynamic_shape_test.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
417
            ARGS --infer_model=${TRT_MODEL_INSTALL_DIR})
418 419 420 421 422 423 424 425 426

    set(TEST_TRT_ERNIE_MODEL "${TRT_MODEL_INSTALL_DIR}/ernie_test")
    if (NOT EXISTS ${TEST_TRT_ERNIE_MODEL})
        inference_download_and_uncompress(${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test "ernie_model_4.tar.gz")
    endif()

    inference_analysis_test(test_trt_dynamic_shape_ernie SRCS trt_dynamic_shape_ernie_test.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} 
            ARGS --infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4)
427 428

    set(TEST_TRT_ERNIE_UNSER_MODEL "${TRT_MODEL_INSTALL_DIR}/ernie_test/ernie_model_4_unserialized/")
429
    if (NOT EXISTS ${TEST_TRT_ERNIE_UNSER_MODEL}/ernie_model_4_unserialized.tgz)
430 431 432
        inference_download_and_uncompress(${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test "ernie_model_4_unserialized.tgz")
    endif()

433
    inference_analysis_test(test_trt_dynamic_shape_ernie_ser_deser SRCS trt_dynamic_shape_ernie_deserialize_test.cc
434 435 436 437
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} 
            ARGS --infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4_unserialized)


438 439
endif()

石晓伟 已提交
440 441 442 443 444 445
set(LITE_MODEL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/lite")
download_data(${LITE_MODEL_INSTALL_DIR} "mul_model_fp32.tgz")

inference_analysis_test(lite_mul_model_test SRCS lite_mul_model_test.cc
        EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
        ARGS --infer_model=${LITE_MODEL_INSTALL_DIR})
446 447 448
inference_analysis_test(lite_resnet50_test SRCS lite_resnet50_test.cc
        EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
        ARGS --infer_model=${RESNET50_MODEL_DIR})
449

450 451
inference_analysis_test(test_analyzer_capi SRCS analyzer_capi_tester.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} paddle_fluid_c
452
            ARGS --infer_model=${RESNET50_MODEL_DIR}/model)
453 454 455

inference_analysis_test(test_analyzer_capi_pd_tensor SRCS analyzer_capi_pd_tensor_tester.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} paddle_fluid_c
456
            ARGS --infer_model=${MOBILENET_INSTALL_DIR}/model)
457 458 459 460 461 462

if(WITH_MKLDNN)
  inference_analysis_test(test_analyzer_capi_int SRCS analyzer_capi_int_tester.cc
            EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} paddle_fluid_c
            ARGS --infer_model=${INT8_DATA_DIR}/resnet50/model)
 endif()
463 464 465 466

inference_analysis_test(test_analyzer_capi_ner SRCS analyzer_capi_ner_tester.cc 
        EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} paddle_fluid_c
        ARGS --infer_model=${CHINESE_NER_INSTALL_DIR}/model)