From 1771d9f88092ca1eae013c722f1a5f41b994fcb4 Mon Sep 17 00:00:00 2001 From: Zhou Wei <52485244+zhouwei25@users.noreply.github.com> Date: Wed, 2 Sep 2020 18:53:58 +0800 Subject: [PATCH] fix cache judge more safe (#26910) --- .../fluid/inference/tests/api/CMakeLists.txt | 22 +++++++++---------- paddle/fluid/inference/tests/test.cmake | 2 +- paddle/scripts/paddle_build.bat | 6 ++--- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/paddle/fluid/inference/tests/api/CMakeLists.txt b/paddle/fluid/inference/tests/api/CMakeLists.txt index 07af5c152b1..8721d07f322 100644 --- a/paddle/fluid/inference/tests/api/CMakeLists.txt +++ b/paddle/fluid/inference/tests/api/CMakeLists.txt @@ -215,7 +215,7 @@ inference_analysis_test(test_analyzer_transformer SRCS analyzer_transformer_test # ocr set(OCR_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/ocr") -if (NOT EXISTS ${OCR_INSTALL_DIR}) +if (NOT EXISTS ${OCR_INSTALL_DIR}/ocr.tar.gz) inference_download_and_uncompress(${OCR_INSTALL_DIR} "http://paddlemodels.bj.bcebos.com/" "inference-vis-demos%2Focr.tar.gz") endif() inference_analysis_api_test(test_analyzer_ocr ${OCR_INSTALL_DIR} analyzer_vis_tester.cc) @@ -231,7 +231,7 @@ set_property(TEST test_analyzer_detect PROPERTY ENVIRONMENT GLOG_vmodule=analysi # mobilenet with transpose op set(MOBILENET_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet") -if (NOT EXISTS ${MOBILENET_INSTALL_DIR}) +if (NOT EXISTS ${MOBILENET_INSTALL_DIR}/mobilenet.tar.gz) inference_download_and_uncompress(${MOBILENET_INSTALL_DIR} "http://paddlemodels.bj.bcebos.com/" "inference-vis-demos%2Fmobilenet.tar.gz") endif() inference_analysis_api_test(test_analyzer_mobilenet_transpose ${MOBILENET_INSTALL_DIR} analyzer_vis_tester.cc) @@ -395,15 +395,15 @@ inference_analysis_api_test(test_analyzer_bert ${BERT_INSTALL_DIR} analyzer_bert if(WITH_GPU AND TENSORRT_FOUND) set(TRT_MODEL_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/trt_models") - if (NOT EXISTS ${TRT_MODEL_INSTALL_DIR}) + if (NOT EXISTS ${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models.tar.gz) inference_download_and_uncompress(${TRT_MODEL_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test "trt_inference_test_models.tar.gz") endif() set(TEST_SPLIT_CONVERTER_MODEL "${TRT_MODEL_INSTALL_DIR}/trt_split_op_converter_test") - if (NOT EXISTS ${TEST_SPLIT_CONVERTER_MODEL}) + if (NOT EXISTS ${TEST_SPLIT_CONVERTER_MODEL}/split_converter.tgz) inference_download_and_uncompress(${TEST_SPLIT_CONVERTER_MODEL} ${INFERENCE_URL}/tensorrt_test "split_converter.tgz") endif() set(TEST_INSTANCE_NORM_MODEL "${TRT_MODEL_INSTALL_DIR}/trt_instance_norm_test") - if (NOT EXISTS ${TEST_INSTANCE_NORM_MODEL}) + if (NOT EXISTS ${TEST_INSTANCE_NORM_MODEL}/instance_norm.tgz) inference_download_and_uncompress(${TEST_INSTANCE_NORM_MODEL} ${INFERENCE_URL}/tensorrt_test "instance_norm.tgz") endif() inference_analysis_test(trt_mobilenet_test SRCS trt_mobilenet_test.cc @@ -432,7 +432,7 @@ if(WITH_GPU AND TENSORRT_FOUND) ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) set(TRT_MODEL_QUANT_RESNET_DIR "${INFERENCE_DEMO_INSTALL_DIR}/small_quant_model") - if (NOT EXISTS ${TRT_MODEL_QUANT_RESNET_DIR}) + if (NOT EXISTS ${TRT_MODEL_QUANT_RESNET_DIR}/small_quant_model.tgz) inference_download_and_uncompress(${INFERENCE_DEMO_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test "small_quant_model.tgz") endif() inference_analysis_test(trt_quant_int8_test SRCS trt_quant_int8_test.cc @@ -440,7 +440,7 @@ if(WITH_GPU AND TENSORRT_FOUND) ARGS --infer_model=${TRT_MODEL_QUANT_RESNET_DIR}) set(TRT_MODEL_QUANT_YOLOV3_DIR "${INFERENCE_DEMO_INSTALL_DIR}/yolov3_r50_quant_aware") - if (NOT EXISTS ${TRT_MODEL_QUANT_YOLOV3_DIR}) + if (NOT EXISTS ${TRT_MODEL_QUANT_YOLOV3_DIR}/yolov3_r50_quant_aware.tgz) inference_download_and_uncompress(${INFERENCE_DEMO_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test "yolov3_r50_quant_aware.tgz") endif() inference_analysis_test(trt_quant_int8_yolov3_r50_test SRCS trt_quant_int8_yolov3_r50_test.cc @@ -448,12 +448,12 @@ if(WITH_GPU AND TENSORRT_FOUND) ARGS --infer_model=${TRT_MODEL_QUANT_YOLOV3_DIR}) set(TEST_TRT_DYNAMIC_MODEL2 "${TRT_MODEL_INSTALL_DIR}/complex_model_dynamic") - if (NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL2}) + if (NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL2}/complex_model_dynamic2.tar.gz) inference_download_and_uncompress(${TEST_TRT_DYNAMIC_MODEL2} ${INFERENCE_URL}/tensorrt_test "complex_model_dynamic2.tar.gz") endif() set(TEST_TRT_DYNAMIC_MODEL "${TRT_MODEL_INSTALL_DIR}/conv_bn_swish_split_gelu") - if (NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL}) + if (NOT EXISTS ${TEST_TRT_DYNAMIC_MODEL}/conv_bn_swish_split_gelu.tar.gz) inference_download_and_uncompress(${TEST_TRT_DYNAMIC_MODEL} ${INFERENCE_URL}/tensorrt_test "conv_bn_swish_split_gelu.tar.gz") endif() inference_analysis_test(trt_dynamic_shape_test SRCS trt_dynamic_shape_test.cc @@ -461,7 +461,7 @@ if(WITH_GPU AND TENSORRT_FOUND) ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}) set(TEST_TRT_ERNIE_MODEL "${TRT_MODEL_INSTALL_DIR}/ernie_test") - if (NOT EXISTS ${TEST_TRT_ERNIE_MODEL}) + if (NOT EXISTS ${TEST_TRT_ERNIE_MODEL}/ernie_model_4.tar.gz) inference_download_and_uncompress(${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test "ernie_model_4.tar.gz") endif() @@ -470,7 +470,7 @@ if(WITH_GPU AND TENSORRT_FOUND) ARGS --infer_model=${TEST_TRT_ERNIE_MODEL}/ernie_model_4) set(TEST_TRT_ERNIE_UNSER_MODEL "${TRT_MODEL_INSTALL_DIR}/ernie_test/ernie_model_4_unserialized/") - if (NOT EXISTS ${TEST_TRT_ERNIE_UNSER_MODEL}) + if (NOT EXISTS ${TEST_TRT_ERNIE_UNSER_MODEL}/ernie_model_4_unserialized.tgz) inference_download_and_uncompress(${TEST_TRT_ERNIE_MODEL} ${INFERENCE_URL}/tensorrt_test "ernie_model_4_unserialized.tgz") endif() diff --git a/paddle/fluid/inference/tests/test.cmake b/paddle/fluid/inference/tests/test.cmake index b9f979f96d4..8bc10f2147f 100644 --- a/paddle/fluid/inference/tests/test.cmake +++ b/paddle/fluid/inference/tests/test.cmake @@ -45,7 +45,7 @@ function(inference_download_and_uncompress INSTALL_DIR URL FILENAME) endfunction() set(WORD2VEC_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/word2vec") -if(NOT EXISTS ${WORD2VEC_INSTALL_DIR}) +if(NOT EXISTS ${WORD2VEC_INSTALL_DIR}/word2vec.inference.model.tar.gz) inference_download_and_uncompress(${WORD2VEC_INSTALL_DIR} ${INFERENCE_URL} "word2vec.inference.model.tar.gz") endif() set(WORD2VEC_MODEL_DIR "${WORD2VEC_INSTALL_DIR}/word2vec.inference.model") diff --git a/paddle/scripts/paddle_build.bat b/paddle/scripts/paddle_build.bat index cfb59a04f81..8050f07af03 100644 --- a/paddle/scripts/paddle_build.bat +++ b/paddle/scripts/paddle_build.bat @@ -216,7 +216,7 @@ pip install -U %PADDLE_WHL_FILE_WIN% --user if %ERRORLEVEL% NEQ 0 ( call paddle_winci\Scripts\deactivate.bat 2>NUL echo pip install whl package failed! - exit /b 3 + exit /b 1 ) python %work_dir%\paddle\scripts\installation_validate.py @@ -225,7 +225,7 @@ goto:eof :test_whl_pacakage_error call paddle_winci\Scripts\deactivate.bat 2>NUL echo Test import paddle failed, will exit! -exit /b 3 +exit /b 1 rem --------------------------------------------------------------------------------------------- :unit_test @@ -268,7 +268,7 @@ goto:eof :test_inference_error call paddle_winci\Scripts\deactivate.bat 2>NUL echo Testing fluid library for inference failed! -exit /b 5 +exit /b 1 rem --------------------------------------------------------------------------------------------- :check_change_of_unittest -- GitLab