From e66447b3a8db6ac80ad4d7edf8747e68907bb5af Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Wed, 13 Jul 2022 18:07:50 +0800 Subject: [PATCH] skip python inference when test for mainbody_det --- ...ormal_normal_paddle2onnx_python_linux_cpu.txt | 10 +++++----- test_tipc/test_paddle2onnx.sh | 16 +++++++++------- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/test_tipc/configs/PP-ShiTu/PPShiTu_mainbody_det_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/PP-ShiTu/PPShiTu_mainbody_det_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt index 96c0eec1..bfd24bb4 100644 --- a/test_tipc/configs/PP-ShiTu/PPShiTu_mainbody_det_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt +++ b/test_tipc/configs/PP-ShiTu/PPShiTu_mainbody_det_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -9,8 +9,8 @@ python:python3.7 --opset_version:11 --enable_onnx_checker:True inference_model_url:https://paddledet.bj.bcebos.com/models/picodet_lcnet_x2_5_640_mainbody_infer.tar -inference:./python/predict_cls.py -Global.use_onnx:True -Global.inference_model_dir:./models/picodet_lcnet_x2_5_640_mainbody_infer -Global.use_gpu:False --c:configs/inference_cls.yaml \ No newline at end of file +inference:null +Global.use_onnx:null +Global.inference_model_dir:null +Global.use_gpu:null +-c:null \ No newline at end of file diff --git a/test_tipc/test_paddle2onnx.sh b/test_tipc/test_paddle2onnx.sh index a8c6914e..d025fb2e 100644 --- a/test_tipc/test_paddle2onnx.sh +++ b/test_tipc/test_paddle2onnx.sh @@ -59,13 +59,15 @@ function func_paddle2onnx(){ status_check $last_status "${trans_model_cmd}" "${status_log}" "${model_name}" # python inference - set_model_dir=$(func_set_params "${inference_model_dir_key}" "${inference_model_dir_value}") - set_use_onnx=$(func_set_params "${use_onnx_key}" "${use_onnx_value}") - set_hardware=$(func_set_params "${inference_hardware_key}" "${inference_hardware_value}") - set_inference_config=$(func_set_params "${inference_config_key}" "${inference_config_value}") - infer_model_cmd="cd deploy && ${python} ${inference_py} -o ${set_model_dir} -o ${set_use_onnx} -o ${set_hardware} ${set_inference_config} > ${_save_log_path} 2>&1 && cd ../" - eval $infer_model_cmd - status_check $last_status "${infer_model_cmd}" "${status_log}" "${model_name}" + if [[ ${inference_py} != "null" ]]; then + set_model_dir=$(func_set_params "${inference_model_dir_key}" "${inference_model_dir_value}") + set_use_onnx=$(func_set_params "${use_onnx_key}" "${use_onnx_value}") + set_hardware=$(func_set_params "${inference_hardware_key}" "${inference_hardware_value}") + set_inference_config=$(func_set_params "${inference_config_key}" "${inference_config_value}") + infer_model_cmd="cd deploy && ${python} ${inference_py} -o ${set_model_dir} -o ${set_use_onnx} -o ${set_hardware} ${set_inference_config} > ${_save_log_path} 2>&1 && cd ../" + eval $infer_model_cmd + status_check $last_status "${infer_model_cmd}" "${status_log}" "${model_name}" + fi } -- GitLab