未验证 提交 3d1e2320 编写于 作者: W wangguanzhong 提交者: GitHub

[test_tipc] add serving tipc (#5865)

上级 55ae0d1b
===========================cpp_infer_params===========================
model_name:yolov3_darknet53_270e_coco
python:python
filename:null
##
--output_dir:./output_inference
weights:https://paddledet.bj.bcebos.com/models/yolov3_darknet53_270e_coco.pdparams
norm_export:tools/export_model.py -c configs/yolov3/yolov3_darknet53_270e_coco.yml -o
quant_export:tools/export_model.py -c configs/yolov3/yolov3_darknet53_270e_coco.yml --slim_config configs/slim/quant/yolov3_darknet_qat.yml -o
fpgm_export:tools/export_model.py -c configs/yolov3/yolov3_darknet53_270e_coco.yml --slim_config configs/slim/prune/yolov3_darknet_prune_fpgm.yml -o
distill_export:null
export1:null
export2:null
kl_quant_export:tools/post_quant.py -c configs/yolov3/yolov3_darknet53_270e_coco.yml --slim_config configs/slim/post_quant/yolov3_darknet53_ptq.yml -o
--export_serving_model:True
##
start_serving:-m paddle_serving_server.serve --model serving_server
--port:9393
--gpu_ids:0
##
...@@ -71,6 +71,16 @@ elif [ ${MODE} = "paddle2onnx_infer" ];then ...@@ -71,6 +71,16 @@ elif [ ${MODE} = "paddle2onnx_infer" ];then
# set paddle2onnx_infer enve # set paddle2onnx_infer enve
${python} -m pip install install paddle2onnx ${python} -m pip install install paddle2onnx
${python} -m pip install onnxruntime==1.10.0 ${python} -m pip install onnxruntime==1.10.0
elif [ ${MODE} = "serving_infer" ];then
git clone https://github.com/PaddlePaddle/Serving
bash Serving/tools/paddle_env_install.sh
cd Serving
pip install -r python/requirements.txt
cd ..
pip install paddle-serving-client==0.8.3 -i https://pypi.tuna.tsinghua.edu.cn/simple
pip install paddle-serving-app==0.8.3 -i https://pypi.tuna.tsinghua.edu.cn/simple
pip install paddle-serving-server-gpu==0.8.3.post101 -i https://pypi.tuna.tsinghua.edu.cn/simple
python -m pip install paddlepaddle-gpu==2.2.2.post101 -f https://www.paddlepaddle.org.cn/whl/linux/mkl/avx/stable.html
else else
# download coco lite data # download coco lite data
wget -nc -P ./dataset/coco/ https://paddledet.bj.bcebos.com/data/tipc/coco_tipc.tar wget -nc -P ./dataset/coco/ https://paddledet.bj.bcebos.com/data/tipc/coco_tipc.tar
......
#!/bin/bash
source test_tipc/utils_func.sh
FILENAME=$1
# parser model_name
dataline=$(cat ${FILENAME})
IFS=$'\n'
lines=(${dataline})
model_name=$(func_parser_value "${lines[1]}")
echo "ppdet serving: ${model_name}"
python=$(func_parser_value "${lines[2]}")
filename_key=$(func_parser_key "${lines[3]}")
filename_value=$(func_parser_value "${lines[3]}")
# export params
save_export_key=$(func_parser_key "${lines[5]}")
save_export_value=$(func_parser_value "${lines[5]}")
export_weight_key=$(func_parser_key "${lines[6]}")
export_weight_value=$(func_parser_value "${lines[6]}")
norm_export=$(func_parser_value "${lines[7]}")
pact_export=$(func_parser_value "${lines[8]}")
fpgm_export=$(func_parser_value "${lines[9]}")
distill_export=$(func_parser_value "${lines[10]}")
export_key1=$(func_parser_key "${lines[11]}")
export_value1=$(func_parser_value "${lines[11]}")
export_key2=$(func_parser_key "${lines[12]}")
export_value2=$(func_parser_value "${lines[12]}")
kl_quant_export=$(func_parser_value "${lines[13]}")
export_serving_model_key=$(func_parser_key "${lines[14]}")
export_serving_model_value=$(func_parser_value "${lines[14]}")
# parser serving
start_serving=$(func_parser_value "${lines[16]}")
port_key=$(func_parser_key "${lines[17]}")
port_value=$(func_parser_value "${lines[17]}")
gpu_id_key=$(func_parser_key "${lines[18]}")
gpu_id_value=$(func_parser_value "${lines[18]}")
LOG_PATH="./test_tipc/output"
mkdir -p ${LOG_PATH}
status_log="${LOG_PATH}/results_serving.log"
function func_serving(){
IFS='|'
if [ ${gpu_id_key} = "null" ]; then
start_serving_command="nohup ${python} ${start_serving} ${port_key} ${port_value} > serving.log 2>&1 &"
else
start_serving_command="nohup ${python} ${start_serving} ${port_key} ${port_value} ${gpu_id_key} ${gpu_id_value} > serving.log 2>&1 &"
fi
echo $start_serving_command
eval $start_serving_command
last_status=${PIPESTATUS[0]}
status_check $last_status "${start_serving_command}" "${status_log}"
}
cd output_inference/${model_name}
echo $PWD
func_serving
test_command="${python} ../../deploy/serving/test_client.py ../../deploy/serving/label_list.txt ../../demo/000000014439.jpg"
echo $test_command
eval $test_command
last_status=${PIPESTATUS[0]}
status_check $last_status"${test_command}" "${status_log}"
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册