diff --git a/tests/params.txt b/tests/params.txt deleted file mode 100644 index 4fc6626cef8041df00d1346eca570523eb63c263..0000000000000000000000000000000000000000 --- a/tests/params.txt +++ /dev/null @@ -1,48 +0,0 @@ -===========================train_params=========================== -model_name:ocr_det -python:python3.7 -gpu_list:0|0,1 -Global.auto_cast:null -Global.epoch_num:2 -Global.save_model_dir:./output/ -Train.loader.batch_size_per_card:2 -Global.use_gpu: -Global.pretrained_model:null -train_model_name:latest -train_infer_img_dir:./train_data/icdar2015/text_localization/ch4_test_images/ -null:null -## -trainer:norm_train|pact_train -norm_train:tools/train.py -c configs/det/det_mv3_db.yml -o Global.pretrained_model=./pretrain_models/MobileNetV3_large_x0_5_pretrained -pact_train:deploy/slim/quantization/quant.py -c configs/det/det_mv3_db.yml -o Global.pretrained_model=./pretrain_models/det_mv3_db_v2.0_train/best_accuracy -fpgm_train:null -distill_train:null -null:null -null:null -## -===========================eval_params=========================== -eval:null -null:null -## -===========================infer_params=========================== -Global.save_inference_dir:./output/ -Global.pretrained_model: -norm_export:tools/export_model.py -c configs/det/det_mv3_db.yml -o -quant_export:deploy/slim/quantization/export_model.py -c configs/det/det_mv3_db.yml -o -fpgm_export:deploy/slim/prune/export_prune_model.py -distill_export:null -null:null -null:null -## -inference:tools/infer/predict_det.py ---use_gpu:True|False ---enable_mkldnn:True|False ---cpu_threads:1|6 ---rec_batch_num:1 ---use_tensorrt:True|False ---precision:fp32|fp16|int8 ---det_model_dir:./inference/ch_ppocr_mobile_v2.0_det_infer/ ---image_dir:./inference/ch_det_data_50/all-sum-510/ ---save_log_path:null ---benchmark:True -null:null diff --git a/tests/test.sh b/tests/test.sh index fde636fc1c70df7ce4a8d8fad38e71c08c56242d..10c26d2c0bc41e847a1c001edd8cf490430d13fc 100644 --- a/tests/test.sh +++ b/tests/test.sh @@ -132,11 +132,11 @@ function func_inference(){ _flag_quant=$6 # inference for use_gpu in ${use_gpu_list[*]}; do - if [ ${use_gpu} = "False" ] && [ ${_flag_quant} = "True" ]; then - continue - fi if [ ${use_gpu} = "False" ]; then for use_mkldnn in ${use_mkldnn_list[*]}; do + if [ ${use_mkldnn} = "False" ] && [ ${_flag_quant} = "True" ]; then + continue + fi for threads in ${cpu_threads_list[*]}; do for batch_size in ${batch_size_list[*]}; do _save_log_path="${_log_path}/infer_cpu_usemkldnn_${use_mkldnn}_threads_${threads}_batchsize_${batch_size}.log"