diff --git a/tests/prepare.sh b/tests/prepare.sh index cdcbfb0ffb6d95c472718ed540a099561e7acffb..aa6420054a0d61b8e1aa3554328fcda6f57f4e9d 100644 --- a/tests/prepare.sh +++ b/tests/prepare.sh @@ -1,6 +1,7 @@ #!/bin/bash FILENAME=$1 -# MODE be one of ['lite_train_infer' 'whole_infer' 'whole_train_infer', 'infer', 'cpp_infer'] + +# MODE be one of ['lite_train_infer' 'whole_infer' 'whole_train_infer', 'infer', 'cpp_infer', 'serving_infer'] MODE=$2 dataline=$(cat ${FILENAME}) @@ -88,16 +89,16 @@ elif [ ${MODE} = "infer" ] || [ ${MODE} = "cpp_infer" ];then fi fi -# prepare serving env -python_name=$(func_parser_value "${lines[2]}") -wget https://paddle-serving.bj.bcebos.com/chain/paddle_serving_server_gpu-0.0.0.post101-py3-none-any.whl -${python_name} -m pip install install paddle_serving_server_gpu-0.0.0.post101-py3-none-any.whl -${python_name} -m pip install paddle_serving_client==0.6.1 -${python_name} -m pip install paddle-serving-app==0.6.3 -wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar -wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar -cd ./inference && tar xf ch_ppocr_mobile_v2.0_det_infer.tar && tar xf ch_ppocr_mobile_v2.0_rec_infer.tar - +if [ ${MODE} = "serving_infer" ];then + # prepare serving env + python_name=$(func_parser_value "${lines[2]}") + ${python_name} -m pip install install paddle-serving-server-gpu==0.6.1.post101 + ${python_name} -m pip install paddle_serving_client==0.6.1 + ${python_name} -m pip install paddle-serving-app==0.6.1 + wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar + wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar + cd ./inference && tar xf ch_ppocr_mobile_v2.0_det_infer.tar && tar xf ch_ppocr_mobile_v2.0_rec_infer.tar && cd ../ +fi if [ ${MODE} = "cpp_infer" ];then cd deploy/cpp_infer