diff --git a/test_tipc/docs/test_train_inference_python.md b/test_tipc/docs/test_train_inference_python.md index fa14863fdad02dcb9b69f45494cc18b24ceaf36f..1b4dfe56226974b2de7a1e08d296c4273f81c898 100644 --- a/test_tipc/docs/test_train_inference_python.md +++ b/test_tipc/docs/test_train_inference_python.md @@ -51,37 +51,37 @@ `test_train_inference_python.sh`包含5种运行模式,每种模式的运行数据不同,分别用于测试速度和精度,分别是: -- 模式1:lite_train_infer,使用少量数据训练,用于快速验证训练到预测的走通流程,不验证精度和速度; +- 模式1:lite_train_lite_infer,使用少量数据训练,用于快速验证训练到预测的走通流程,不验证精度和速度; ```shell -bash test_tipc/prepare.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'lite_train_infer' -bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'lite_train_infer' +bash test_tipc/prepare.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'lite_train_lite_infer' +bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'lite_train_lite_infer' ``` -- 模式2:whole_infer,使用少量数据训练,一定量数据预测,用于验证训练后的模型执行预测,预测速度是否合理; +- 模式2:lite_train_whole_infer,使用少量数据训练,一定量数据预测,用于验证训练后的模型执行预测,预测速度是否合理; ```shell -bash test_tipc/prepare.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'whole_infer' -bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'whole_infer' +bash test_tipc/prepare.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'lite_train_whole_infer' +bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'lite_train_whole_infer' ``` -- 模式3:infer,不训练,全量数据预测,走通开源模型评估、动转静,检查inference model预测时间和精度; +- 模式3:whole_infer,不训练,全量数据预测,走通开源模型评估、动转静,检查inference model预测时间和精度; ```shell -bash test_tipc/prepare.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'infer' +bash test_tipc/prepare.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'whole_infer' # 用法1: -bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'infer' +bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'whole_infer' # 用法2: 指定GPU卡预测,第三个传入参数为GPU卡号 -bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'infer' '1' +bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'whole_infer' '1' ``` -- 模式4:whole_train_infer,CE: 全量数据训练,全量数据预测,验证模型训练精度,预测精度,预测速度; +- 模式4:whole_train_whole_infer,CE: 全量数据训练,全量数据预测,验证模型训练精度,预测精度,预测速度; ```shell -bash test_tipc/prepare.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'whole_train_infer' -bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'whole_train_infer' +bash test_tipc/prepare.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'whole_train_whole_infer' +bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'whole_train_whole_infer' ``` -- 模式5:klquant_infer,测试离线量化; +- 模式5:klquant_whole_infer,测试离线量化; ```shell -bash test_tipc/prepare.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'klquant_infer' -bash test_tipc/test_train_inference_python.sh test_tipc/configs/ppocr_det_mobile_params.txt 'klquant_infer' +bash test_tipc/prepare.sh ./test_tipc/configs/ppocr_det_mobile_params.txt 'klquant_whole_infer' +bash test_tipc/test_train_inference_python.sh test_tipc/configs/ppocr_det_mobile_params.txt 'klquant_whole_infer' ``` diff --git a/test_tipc/prepare.sh b/test_tipc/prepare.sh index 737256be5d39156a68189e72de5ebd413fabc3ff..0ca4c7c67bea88c0ff72a3a5a105a19d49d3136b 100644 --- a/test_tipc/prepare.sh +++ b/test_tipc/prepare.sh @@ -1,8 +1,9 @@ #!/bin/bash FILENAME=$1 -# MODE be one of ['lite_train_infer' 'whole_infer' 'whole_train_infer', 'infer', -# 'cpp_infer', 'serving_infer', 'klquant_infer', 'lite_infer'] +# MODE be one of ['lite_train_lite_infer' 'lite_train_whole_infer' 'whole_train_whole_infer', +# 'whole_infer', 'klquant_whole_infer', +# 'cpp_infer', 'serving_infer', 'lite_infer'] MODE=$2 @@ -34,7 +35,7 @@ trainer_list=$(func_parser_value "${lines[14]}") # MODE be one of ['lite_train_infer' 'whole_infer' 'whole_train_infer'] MODE=$2 -if [ ${MODE} = "lite_train_infer" ];then +if [ ${MODE} = "lite_train_lite_infer" ];then # pretrain lite train data wget -nc -P ./pretrain_models/ https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/MobileNetV3_large_x0_5_pretrained.pdparams wget -nc -P ./pretrain_models/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_mv3_db_v2.0_train.tar @@ -50,14 +51,14 @@ if [ ${MODE} = "lite_train_infer" ];then ln -s ./icdar2015_lite ./icdar2015 cd ../ cd ./inference && tar xf rec_inference.tar && cd ../ -elif [ ${MODE} = "whole_train_infer" ];then +elif [ ${MODE} = "whole_train_whole_infer" ];then wget -nc -P ./pretrain_models/ https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/MobileNetV3_large_x0_5_pretrained.pdparams rm -rf ./train_data/icdar2015 rm -rf ./train_data/ic15_data wget -nc -P ./train_data/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/icdar2015.tar wget -nc -P ./train_data/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/ic15_data.tar cd ./train_data/ && tar xf icdar2015.tar && tar xf ic15_data.tar && cd ../ -elif [ ${MODE} = "whole_infer" ];then +elif [ ${MODE} = "lite_train_whole_infer" ];then wget -nc -P ./pretrain_models/ https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/MobileNetV3_large_x0_5_pretrained.pdparams rm -rf ./train_data/icdar2015 rm -rf ./train_data/ic15_data @@ -66,7 +67,7 @@ elif [ ${MODE} = "whole_infer" ];then cd ./train_data/ && tar xf icdar2015_infer.tar && tar xf ic15_data.tar ln -s ./icdar2015_infer ./icdar2015 cd ../ -elif [ ${MODE} = "infer" ];then +elif [ ${MODE} = "whole_infer" ];then if [ ${model_name} = "ocr_det" ]; then eval_model_name="ch_ppocr_mobile_v2.0_det_train" rm -rf ./train_data/icdar2015 @@ -100,7 +101,7 @@ elif [ ${MODE} = "infer" ];then wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar cd ./inference && tar xf ${eval_model_name}.tar && tar xf rec_inference.tar && cd ../ fi -elif [ ${MODE} = "klquant_infer" ];then +elif [ ${MODE} = "klquant_whole_infer" ];then if [ ${model_name} = "ocr_det" ]; then wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/ch_det_data_50.tar diff --git a/test_tipc/test_train_inference_python.sh b/test_tipc/test_train_inference_python.sh index 756e1f89d74c1df8de50cf8e23fd3d9c95bd20c5..6458c995c8649a134d92d7101e49871323fe5eb7 100644 --- a/test_tipc/test_train_inference_python.sh +++ b/test_tipc/test_train_inference_python.sh @@ -2,7 +2,7 @@ source tests/common_func.sh FILENAME=$1 -# MODE be one of ['lite_train_infer' 'whole_infer' 'whole_train_infer', 'infer', 'klquant_infer'] +# MODE be one of ['lite_train_lite_infer' 'lite_train_whole_infer' 'whole_train_whole_infer', 'whole_infer', 'klquant_whole_infer'] MODE=$2 dataline=$(awk 'NR==1, NR==51{print}' $FILENAME) @@ -88,7 +88,7 @@ infer_key1=$(func_parser_key "${lines[50]}") infer_value1=$(func_parser_value "${lines[50]}") # parser klquant_infer -if [ ${MODE} = "klquant_infer" ]; then +if [ ${MODE} = "klquant_whole_infer" ]; then dataline=$(awk 'NR==82, NR==98{print}' $FILENAME) lines=(${dataline}) # parser inference model @@ -202,7 +202,7 @@ function func_inference(){ done } -if [ ${MODE} = "infer" ] || [ ${MODE} = "klquant_infer" ]; then +if [ ${MODE} = "whole_infer" ] || [ ${MODE} = "klquant_whole_infer" ]; then GPUID=$3 if [ ${#GPUID} -le 0 ];then env=" "