未验证 提交 6047215c 编写于 作者: A andyjpaddle 提交者: GitHub

Merge pull request #6462 from andyjpaddle/cpp_infer_tipc

[TIPC] update_tipc_cpp_infer
...@@ -3,7 +3,7 @@ model_name:ch_PP-OCRv2 ...@@ -3,7 +3,7 @@ model_name:ch_PP-OCRv2
use_opencv:True use_opencv:True
infer_model:./inference/ch_PP-OCRv2_det_infer/ infer_model:./inference/ch_PP-OCRv2_det_infer/
infer_quant:False infer_quant:False
inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppocr_keys_v1.txt inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppocr_keys_v1.txt --rec_img_h=32
--use_gpu:True|False --use_gpu:True|False
--enable_mkldnn:False --enable_mkldnn:False
--cpu_threads:6 --cpu_threads:6
......
...@@ -3,7 +3,7 @@ model_name:ch_PP-OCRv2_rec ...@@ -3,7 +3,7 @@ model_name:ch_PP-OCRv2_rec
use_opencv:True use_opencv:True
infer_model:./inference/ch_PP-OCRv2_rec_infer/ infer_model:./inference/ch_PP-OCRv2_rec_infer/
infer_quant:False infer_quant:False
inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppocr_keys_v1.txt inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppocr_keys_v1.txt --rec_img_h=32
--use_gpu:True|False --use_gpu:True|False
--enable_mkldnn:False --enable_mkldnn:False
--cpu_threads:6 --cpu_threads:6
......
...@@ -3,7 +3,7 @@ model_name:ch_ppocr_mobile_v2.0 ...@@ -3,7 +3,7 @@ model_name:ch_ppocr_mobile_v2.0
use_opencv:True use_opencv:True
infer_model:./inference/ch_ppocr_mobile_v2.0_det_infer/ infer_model:./inference/ch_ppocr_mobile_v2.0_det_infer/
infer_quant:False infer_quant:False
inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppocr_keys_v1.txt inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppocr_keys_v1.txt --rec_img_h=32
--use_gpu:True|False --use_gpu:True|False
--enable_mkldnn:False --enable_mkldnn:False
--cpu_threads:6 --cpu_threads:6
......
...@@ -3,7 +3,7 @@ model_name:ch_ppocr_mobile_v2.0_rec ...@@ -3,7 +3,7 @@ model_name:ch_ppocr_mobile_v2.0_rec
use_opencv:True use_opencv:True
infer_model:./inference/ch_ppocr_mobile_v2.0_rec_infer/ infer_model:./inference/ch_ppocr_mobile_v2.0_rec_infer/
infer_quant:False infer_quant:False
inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppocr_keys_v1.txt inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppocr_keys_v1.txt --rec_img_h=32
--use_gpu:True|False --use_gpu:True|False
--enable_mkldnn:False --enable_mkldnn:False
--cpu_threads:6 --cpu_threads:6
......
...@@ -3,7 +3,7 @@ model_name:ch_ppocr_server_v2.0 ...@@ -3,7 +3,7 @@ model_name:ch_ppocr_server_v2.0
use_opencv:True use_opencv:True
infer_model:./inference/ch_ppocr_server_v2.0_det_infer/ infer_model:./inference/ch_ppocr_server_v2.0_det_infer/
infer_quant:False infer_quant:False
inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppocr_keys_v1.txt inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppocr_keys_v1.txt --rec_img_h=32
--use_gpu:True|False --use_gpu:True|False
--enable_mkldnn:False --enable_mkldnn:False
--cpu_threads:6 --cpu_threads:6
......
...@@ -3,7 +3,7 @@ model_name:ch_ppocr_server_v2.0_rec ...@@ -3,7 +3,7 @@ model_name:ch_ppocr_server_v2.0_rec
use_opencv:True use_opencv:True
infer_model:./inference/ch_ppocr_server_v2.0_rec_infer/ infer_model:./inference/ch_ppocr_server_v2.0_rec_infer/
infer_quant:False infer_quant:False
inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppocr_keys_v1.txt inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppocr_keys_v1.txt --rec_img_h=32
--use_gpu:True|False --use_gpu:True|False
--enable_mkldnn:False --enable_mkldnn:False
--cpu_threads:6 --cpu_threads:6
......
...@@ -328,7 +328,6 @@ if [ ${MODE} = "klquant_whole_infer" ]; then ...@@ -328,7 +328,6 @@ if [ ${MODE} = "klquant_whole_infer" ]; then
cd ./inference && tar xf rec_inference.tar && tar xf ch_PP-OCRv2_rec_infer.tar && cd ../ cd ./inference && tar xf rec_inference.tar && tar xf ch_PP-OCRv2_rec_infer.tar && cd ../
fi fi
if [ ${model_name} = "ch_PP-OCRv3_rec_KL" ]; then if [ ${model_name} = "ch_PP-OCRv3_rec_KL" ]; then
# TODO check model link
wget -nc -P ./inference https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar --no-check-certificate wget -nc -P ./inference https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar --no-check-certificate
wget -nc -P ./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/rec_inference.tar --no-check-certificate wget -nc -P ./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/rec_inference.tar --no-check-certificate
wget -nc -P ./train_data/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/ic15_data.tar --no-check-certificate wget -nc -P ./train_data/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/ic15_data.tar --no-check-certificate
...@@ -341,7 +340,6 @@ if [ ${MODE} = "klquant_whole_infer" ]; then ...@@ -341,7 +340,6 @@ if [ ${MODE} = "klquant_whole_infer" ]; then
cd ./inference && tar xf ch_PP-OCRv2_det_infer.tar && tar xf ch_det_data_50.tar && cd ../ cd ./inference && tar xf ch_PP-OCRv2_det_infer.tar && tar xf ch_det_data_50.tar && cd ../
fi fi
if [ ${model_name} = "ch_PP-OCRv3_det_KL" ]; then if [ ${model_name} = "ch_PP-OCRv3_det_KL" ]; then
# TODO check model link
wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/ch_det_data_50.tar --no-check-certificate wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/ch_det_data_50.tar --no-check-certificate
wget -nc -P ./inference https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar --no-check-certificate wget -nc -P ./inference https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar --no-check-certificate
cd ./inference && tar xf ch_PP-OCRv3_det_infer.tar && tar xf ch_det_data_50.tar && cd ../ cd ./inference && tar xf ch_PP-OCRv3_det_infer.tar && tar xf ch_det_data_50.tar && cd ../
...@@ -417,9 +415,9 @@ if [ ${MODE} = "serving_infer" ];then ...@@ -417,9 +415,9 @@ if [ ${MODE} = "serving_infer" ];then
IFS='|' IFS='|'
array=(${python_name_list}) array=(${python_name_list})
python_name=${array[0]} python_name=${array[0]}
# ${python_name} -m pip install paddle-serving-server-gpu==0.8.3.post101 ${python_name} -m pip install paddle-serving-server-gpu
# ${python_name} -m pip install paddle_serving_client==0.8.3 ${python_name} -m pip install paddle_serving_client
# ${python_name} -m pip install paddle-serving-app==0.8.3 ${python_name} -m pip install paddle-serving-app
# wget model # wget model
if [[ ${model_name} =~ "ch_ppocr_mobile_v2.0" ]]; then if [[ ${model_name} =~ "ch_ppocr_mobile_v2.0" ]]; then
wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar --no-check-certificate wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar --no-check-certificate
...@@ -447,7 +445,7 @@ if [ ${MODE} = "paddle2onnx_infer" ];then ...@@ -447,7 +445,7 @@ if [ ${MODE} = "paddle2onnx_infer" ];then
# prepare serving env # prepare serving env
python_name=$(func_parser_value "${lines[2]}") python_name=$(func_parser_value "${lines[2]}")
${python_name} -m pip install paddle2onnx ${python_name} -m pip install paddle2onnx
${python_name} -m pip install onnxruntime==1.4.0 ${python_name} -m pip install onnxruntime
# wget model # wget model
if [[ ${model_name} =~ "ch_ppocr_mobile_v2.0" ]]; then if [[ ${model_name} =~ "ch_ppocr_mobile_v2.0" ]]; then
wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar --no-check-certificate wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar --no-check-certificate
......
...@@ -189,6 +189,9 @@ else ...@@ -189,6 +189,9 @@ else
wget -nc $PADDLEInfer --no-check-certificate wget -nc $PADDLEInfer --no-check-certificate
fi fi
tar zxf paddle_inference.tgz tar zxf paddle_inference.tgz
if [ ! -d "paddle_inference" ]; then
ln -s paddle_inference_install_dir paddle_inference
fi
echo "################### download paddle inference finished ###################" echo "################### download paddle inference finished ###################"
fi fi
LIB_DIR=$(pwd)/paddle_inference/ LIB_DIR=$(pwd)/paddle_inference/
...@@ -218,11 +221,10 @@ echo "################### build PaddleOCR demo finished ###################" ...@@ -218,11 +221,10 @@ echo "################### build PaddleOCR demo finished ###################"
# set cuda device # set cuda device
GPUID=$2 GPUID=$2
if [ ${#GPUID} -le 0 ];then if [ ${#GPUID} -le 0 ];then
env=" " env="export CUDA_VISIBLE_DEVICES=0"
else else
env="export CUDA_VISIBLE_DEVICES=${GPUID}" env="export CUDA_VISIBLE_DEVICES=${GPUID}"
fi fi
set CUDA_VISIBLE_DEVICES
eval $env eval $env
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册