diff --git a/test_tipc/configs/ch_PP-OCRv2_det_KL/model_linux_gpu_normal_normal_infer_python_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv2_det_KL/model_linux_gpu_normal_normal_infer_python_linux_gpu_cpu.txt index 1aad65b687992155133ed11533a14f642510361d..3aa1e71306069222b17b642ecb6539730f2d1337 100644 --- a/test_tipc/configs/ch_PP-OCRv2_det_KL/model_linux_gpu_normal_normal_infer_python_linux_gpu_cpu.txt +++ b/test_tipc/configs/ch_PP-OCRv2_det_KL/model_linux_gpu_normal_normal_infer_python_linux_gpu_cpu.txt @@ -3,7 +3,7 @@ model_name:PPOCRv2_ocr_det_kl python:python3.7 Global.pretrained_model:null Global.save_inference_dir:null -infer_model:./inference/ch_PP-OCRv2_det_infer/ +infer_model:./inference/ch_PP-OCRv2_det_infer infer_export:deploy/slim/quantization/quant_kl.py -c configs/det/ch_PP-OCRv2/ch_PP-OCRv2_det_cml.yml -o infer_quant:True inference:tools/infer/predict_det.py diff --git a/test_tipc/test_train_inference_python.sh b/test_tipc/test_train_inference_python.sh index b9bf9edf309c02fde0a679891b709deef6da9465..9bde89d78e0ee78c7b650306047b036488a3eab9 100644 --- a/test_tipc/test_train_inference_python.sh +++ b/test_tipc/test_train_inference_python.sh @@ -183,7 +183,7 @@ function func_inference(){ if [[ ${precision} =~ "fp16" || ${precision} =~ "int8" ]] && [ ${use_trt} = "False" ]; then continue fi - if [[ ${use_trt} = "False" || ${precision} =~ "int8" ]] && [ ${_flag_quant} = "True" ]; then + if [[ ${use_trt} = "False" && ${precision} =~ "int8" ]] && [ ${_flag_quant} = "True" ]; then continue fi for batch_size in ${batch_size_list[*]}; do @@ -227,7 +227,12 @@ if [ ${MODE} = "whole_infer" ] || [ ${MODE} = "klquant_whole_infer" ]; then for infer_model in ${infer_model_dir_list[*]}; do # run export if [ ${infer_run_exports[Count]} != "null" ];then - save_infer_dir=$(dirname $infer_model) + if [ ${MODE} = "klquant_whole_infer" ]; then + save_infer_dir="${infer_model}_klquant" + fi + if [ ${MODE} = "whole_infer" ]; then + save_infer_dir="${infer_model}" + fi set_export_weight=$(func_set_params "${export_weight}" "${infer_model}") set_save_infer_key=$(func_set_params "${save_infer_key}" "${save_infer_dir}") export_cmd="${python} ${infer_run_exports[Count]} ${set_export_weight} ${set_save_infer_key}"