diff --git a/tests/ocr_det_params.txt b/tests/ocr_det_params.txt index 8b9273f7455ef7c0f9617c7aefbe9deac6392a6b..3b8ddb2d7345716a54ac5f9e2388a9453c6b91b1 100644 --- a/tests/ocr_det_params.txt +++ b/tests/ocr_det_params.txt @@ -34,13 +34,13 @@ distill_export:null export1:null export2:null ## -infer_model:./inference/ch_ppocr_mobile_v2.0_det_infer/ -infer_export:null +train_model:./inference/ch_ppocr_mobile_v2.0_det_train/best_accuracy +infer_export:tools/export_model.py -c configs/det/det_mv3_db.yml -o infer_quant:False inference:tools/infer/predict_det.py --use_gpu:True|False --enable_mkldnn:True|False ---cpu_threads:1|6 +--cpu_threads:6 --rec_batch_num:1 --use_tensorrt:False|True --precision:fp32|fp16|int8 diff --git a/tests/prepare.sh b/tests/prepare.sh index 6e329b6581021370c354a54bcf2b654ccfe5aae8..bb79574371213952719909f8573b15f044ba5946 100644 --- a/tests/prepare.sh +++ b/tests/prepare.sh @@ -63,10 +63,10 @@ elif [ ${MODE} = "whole_infer" ];then cd ../ elif [ ${MODE} = "infer" ] || [ ${MODE} = "cpp_infer" ];then if [ ${model_name} = "ocr_det" ]; then - eval_model_name="ch_ppocr_mobile_v2.0_det_infer" + eval_model_name="ch_ppocr_mobile_v2.0_det_train" rm -rf ./train_data/icdar2015 wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/ch_det_data_50.tar - wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar + wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_train.tar cd ./inference && tar xf ${eval_model_name}.tar && tar xf ch_det_data_50.tar && cd ../ elif [ ${model_name} = "ocr_server_det" ]; then wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar diff --git a/tests/test.sh b/tests/test.sh index 484d55735368fa7ae341d63e9cb01439f511e2fe..9fe16968eee59e2854572b47ec1aa5811e071be0 100644 --- a/tests/test.sh +++ b/tests/test.sh @@ -332,9 +332,7 @@ if [ ${MODE} = "infer" ]; then export_cmd="${python} ${norm_export} ${set_export_weight} ${set_save_infer_key}" eval $export_cmd status_export=$? - if [ ${status_export} = 0 ];then - status_check $status_export "${export_cmd}" "${status_log}" - fi + status_check $status_export "${export_cmd}" "${status_log}" else save_infer_dir=${infer_model} fi