diff --git a/tests/ocr_det_params.txt b/tests/ocr_det_params.txt index 6aff66c6aa8591c9f48c81cf857809f956a3cda2..4e59fcd75674ee14f30a33963c2570a46888a9a5 100644 --- a/tests/ocr_det_params.txt +++ b/tests/ocr_det_params.txt @@ -49,4 +49,13 @@ inference:tools/infer/predict_det.py --save_log_path:null --benchmark:True null:null - +===========================deploy_params=========================== +trans_model:-m paddle_serving_client.convert +--dirname:./inference/ch_ppocr_mobile_v2.0_det_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--serving_server:./deploy/pdserving/ppocr_det_mobile_2.0_serving/ +--serving_client:./deploy/pdserving/ppocr_det_mobile_2.0_client/ +serving_dir:./deploy/pdserving +web_service:web_service_det.py &>log.txt & +pipline:pipeline_http_client.py --image_dir=../../doc/imgs diff --git a/tests/ocr_rec_params.txt b/tests/ocr_rec_params.txt index 71d12f90b3bda128c3f6047c6740911dac417954..cc558844718be602ff24de85d78dae0562f34f90 100644 --- a/tests/ocr_rec_params.txt +++ b/tests/ocr_rec_params.txt @@ -9,7 +9,7 @@ Global.save_model_dir:./output/ Train.loader.batch_size_per_card:lite_train_infer=128|whole_train_infer=128 Global.pretrained_model:null train_model_name:latest -train_infer_img_dir:./train_data/ic15_data/train +train_infer_img_dir:./train_data/ic15_data/test null:null ## trainer:norm_train|pact_train diff --git a/tests/prepare.sh b/tests/prepare.sh index d27a051cb0a7effc50305db8e2268ad36492d6cb..8f5291da6943a2a7250c4057638f14daed81506b 100644 --- a/tests/prepare.sh +++ b/tests/prepare.sh @@ -74,3 +74,13 @@ else fi fi +# prepare serving env +python_name=$(func_parser_value "${lines[2]}") +${python_name} -m pip install install paddle-serving-server-gpu==0.6.1.post101 +${python_name} -m pip install paddle_serving_client==0.6.1 +${python_name} -m pip install paddle-serving-app==0.6.1 +wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar +wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar +cd ./inference && tar xf ch_ppocr_mobile_v2.0_det_infer.tar && tar xf ch_ppocr_mobile_v2.0_rec_infer.tar + + diff --git a/tests/test.sh b/tests/test.sh index 9888e0faabb13b00acdf41ad154ba0a0e7ec2b63..12dfa9d3d7f263718f7719bd65868561c682c3ee 100644 --- a/tests/test.sh +++ b/tests/test.sh @@ -144,6 +144,22 @@ benchmark_key=$(func_parser_key "${lines[49]}") benchmark_value=$(func_parser_value "${lines[49]}") infer_key1=$(func_parser_key "${lines[50]}") infer_value1=$(func_parser_value "${lines[50]}") +# parser serving +trans_model_py=$(func_parser_value "${lines[52]}") +infer_model_dir_key=$(func_parser_key "${lines[53]}") +infer_model_dir_value=$(func_parser_value "${lines[53]}") +model_filename_key=$(func_parser_key "${lines[54]}") +model_filename_value=$(func_parser_value "${lines[54]}") +params_filename_key=$(func_parser_key "${lines[55]}") +params_filename_value=$(func_parser_value "${lines[55]}") +serving_server_key=$(func_parser_key "${lines[56]}") +serving_server_value=$(func_parser_value "${lines[56]}") +serving_client_key=$(func_parser_key "${lines[57]}") +serving_client_value=$(func_parser_value "${lines[57]}") +serving_dir_value=$(func_parser_value "${lines[58]}") +web_service_py=$(func_parser_value "${lines[59]}") +pipline_py=$(func_parser_value "${lines[60]}") + LOG_PATH="./tests/output" mkdir -p ${LOG_PATH} @@ -250,6 +266,23 @@ if [ ${MODE} = "infer" ]; then is_quant=${infer_quant_flag[Count]} func_inference "${python}" "${inference_py}" "${save_infer_dir}" "${LOG_PATH}" "${infer_img_dir}" ${is_quant} Count=$(($Count + 1)) + #run serving + set_dirname=$(func_set_params "${infer_model_dir_key}" "${infer_model_dir_value}") + set_model_filename=$(func_set_params "${model_filename_key}" "${model_filename_value}") + set_params_filename=$(func_set_params "${params_filename_key}" "${params_filename_value}") + set_serving_server=$(func_set_params "${serving_server_key}" "${serving_server_value}") + set_serving_client=$(func_set_params "${serving_client_key}" "${serving_client_value}") + trans_model_cmd="${python} ${trans_model_py} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_serving_server} ${set_serving_client}" + eval $trans_model_cmd + cd ${serving_dir_value} + echo $PWD + web_service_cmd="${python} ${web_service_py}" + echo $web_service_cmd + eval $web_service_cmd + pipline_cmd="${python} ${pipline_py}" + echo $pipline_cmd + eval $pipline_cmd + done else @@ -363,3 +396,4 @@ else done # done with: for autocast in ${autocast_list[*]}; do done # done with: for gpu in ${gpu_list[*]}; do fi # end if [ ${MODE} = "infer" ]; then +