未验证 提交 fbac729a 编写于 作者: M MissPenguin 提交者: GitHub

Merge pull request #4753 from cuicheng01/dygraph

add compile chain for tipc lite
===========================lite_params===========================
inference:./ocr_db_crnn system
runtime_device:ARM_CPU
det_infer_model:ch_ppocr_mobile_v2.0_det_infer|ch_ppocr_db_mobile_v2.0_det_quant_infer
rec_infer_model:ch_ppocr_mobile_v2.0_rec_infer|ch_ppocr_mobile_v2.0_rec_slim_infer
cls_infer_model:ch_ppocr_mobile_v2.0_cls_infer|ch_ppocr_mobile_v2.0_cls_slim_infer
--cpu_threads:1|4
--det_batch_size:1
--rec_batch_size:1
--image_dir:./test_data/icdar2015_lite/text_localization/ch4_test_images/
--config_dir:./config.txt
--rec_dict_dir:./ppocr_keys_v1.txt
--benchmark:True
===========================lite_params===========================
inference:./ocr_db_crnn system
runtime_device:ARM_GPU_OPENCL
det_infer_model:ch_ppocr_mobile_v2.0_det_infer|ch_ppocr_db_mobile_v2.0_det_quant_infer
rec_infer_model:ch_ppocr_mobile_v2.0_rec_infer|ch_ppocr_mobile_v2.0_rec_slim_infer
cls_infer_model:ch_ppocr_mobile_v2.0_cls_infer|ch_ppocr_mobile_v2.0_cls_slim_infer
--cpu_threads:1|4
--det_batch_size:1
--rec_batch_size:1
--image_dir:./test_data/icdar2015_lite/text_localization/ch4_test_images/
--config_dir:./config.txt
--rec_dict_dir:./ppocr_keys_v1.txt
--benchmark:True
===========================lite_params=========================== ===========================lite_params===========================
inference:./ocr_db_crnn det inference:./ocr_db_crnn det
infer_model:ch_PP-OCRv2_det_infer|ch_PP-OCRv2_det_slim_quant_infer
runtime_device:ARM_CPU runtime_device:ARM_CPU
det_infer_model:ch_ppocr_mobile_v2.0_det_infer|ch_ppocr_db_mobile_v2.0_det_quant_infer
null:null
null:null
--cpu_threads:1|4 --cpu_threads:1|4
--det_batch_size:1 --det_batch_size:1
--rec_batch_size:1 null:null
--system_batch_size:1
--image_dir:./test_data/icdar2015_lite/text_localization/ch4_test_images/ --image_dir:./test_data/icdar2015_lite/text_localization/ch4_test_images/
--config_dir:./config.txt --config_dir:./config.txt
--rec_dict_dir:./ppocr_keys_v1.txt null:null
--benchmark:True --benchmark:True
===========================lite_params===========================
inference:./ocr_db_crnn det
runtime_device:ARM_GPU_OPENCL
det_infer_model:ch_ppocr_mobile_v2.0_det_infer|ch_ppocr_db_mobile_v2.0_det_quant_infer
null:null
null:null
--cpu_threads:1|4
--det_batch_size:1
null:null
--image_dir:./test_data/icdar2015_lite/text_localization/ch4_test_images/
--config_dir:./config.txt
null:null
--benchmark:True
...@@ -16,7 +16,7 @@ Lite\_arm\_cpp预测功能测试的主程序为`test_lite_arm_cpp.sh`,可以 ...@@ -16,7 +16,7 @@ Lite\_arm\_cpp预测功能测试的主程序为`test_lite_arm_cpp.sh`,可以
| 模型类型 | batch-size | threads | predictor数量 | 预测库来源 | 测试硬件 | | 模型类型 | batch-size | threads | predictor数量 | 预测库来源 | 测试硬件 |
| :----: | :----: | :----: | :----: | :----: | :----: | | :----: | :----: | :----: | :----: | :----: | :----: |
| 正常模型/量化模型 | 1 | 1/4 | 单/多 | 下载方式 | ARM\_CPU/ARM\_GPU_OPENCL | | 正常模型/量化模型 | 1 | 1/4 | 单/多 | 下载方式/编译方式 | ARM\_CPU/ARM\_GPU_OPENCL |
## 2. 测试流程 ## 2. 测试流程
...@@ -30,8 +30,11 @@ Lite\_arm\_cpp预测功能测试的主程序为`test_lite_arm_cpp.sh`,可以 ...@@ -30,8 +30,11 @@ Lite\_arm\_cpp预测功能测试的主程序为`test_lite_arm_cpp.sh`,可以
```shell ```shell
# 数据和模型准备 # 数据、模型、Paddle-Lite预测库准备
bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ppocr_det_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt #预测库为下载方式
bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt download
#预测库为编译方式
bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt compile
# 手机端测试: # 手机端测试:
bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt
...@@ -42,8 +45,11 @@ bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt ...@@ -42,8 +45,11 @@ bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt
```shell ```shell
# 数据和模型准备 # 数据、模型、Paddle-Lite预测库准备
bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ppocr_det_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt #预测库下载方式
bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt download
#预测库编译方式
bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt compile
# 手机端测试: # 手机端测试:
bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt
...@@ -53,9 +59,7 @@ bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl. ...@@ -53,9 +59,7 @@ bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.
**注意** **注意**
1.由于运行该项目需要bash等命令,传统的adb方式不能很好的安装。所以此处推荐通在手机上开启虚拟终端的方式连接电脑,连接方式可以参考[安卓手机termux连接电脑](./termux_for_android.md) 由于运行该项目需要bash等命令,传统的adb方式不能很好的安装。所以此处推荐通在手机上开启虚拟终端的方式连接电脑,连接方式可以参考[安卓手机termux连接电脑](./termux_for_android.md)
2.如果测试文本检测和识别完整的pipeline,在执行`prepare_lite_cpp.sh`时,配置文件需替换为`test_tipc/configs/ppocr_system_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt`。在手机端测试阶段,配置文件同样修改为该文件。
### 2.2 运行结果 ### 2.2 运行结果
......
...@@ -6,6 +6,7 @@ dataline=$(cat ${FILENAME}) ...@@ -6,6 +6,7 @@ dataline=$(cat ${FILENAME})
IFS=$'\n' IFS=$'\n'
lines=(${dataline}) lines=(${dataline})
IFS=$'\n' IFS=$'\n'
paddlelite_library_source=$2
inference_cmd=$(func_parser_value "${lines[1]}") inference_cmd=$(func_parser_value "${lines[1]}")
DEVICE=$(func_parser_value "${lines[2]}") DEVICE=$(func_parser_value "${lines[2]}")
...@@ -13,40 +14,42 @@ det_lite_model_list=$(func_parser_value "${lines[3]}") ...@@ -13,40 +14,42 @@ det_lite_model_list=$(func_parser_value "${lines[3]}")
rec_lite_model_list=$(func_parser_value "${lines[4]}") rec_lite_model_list=$(func_parser_value "${lines[4]}")
cls_lite_model_list=$(func_parser_value "${lines[5]}") cls_lite_model_list=$(func_parser_value "${lines[5]}")
if [[ $inference_cmd =~ "det" ]];then if [[ $inference_cmd =~ "det" ]]; then
lite_model_list=${det_lite_model_list} lite_model_list=${det_lite_model_list}
elif [[ $inference_cmd =~ "rec" ]];then elif [[ $inference_cmd =~ "rec" ]]; then
lite_model_list=(${rec_lite_model_list[*]} ${cls_lite_model_list[*]}) lite_model_list=(${rec_lite_model_list[*]} ${cls_lite_model_list[*]})
elif [[ $inference_cmd =~ "system" ]];then elif [[ $inference_cmd =~ "system" ]]; then
lite_model_list=(${det_lite_model_list[*]} ${rec_lite_model_list[*]} ${cls_lite_model_list[*]}) lite_model_list=(${det_lite_model_list[*]} ${rec_lite_model_list[*]} ${cls_lite_model_list[*]})
else else
echo "inference_cmd is wrong, please check." echo "inference_cmd is wrong, please check."
exit 1 exit 1
fi fi
if [ ${DEVICE} = "ARM_CPU" ];then if [ ${DEVICE} = "ARM_CPU" ]; then
valid_targets="arm" valid_targets="arm"
paddlelite_url="https://github.com/PaddlePaddle/Paddle-Lite/releases/download/v2.10-rc/inference_lite_lib.android.armv8.gcc.c++_shared.with_extra.with_cv.tar.gz" paddlelite_library_url="https://github.com/PaddlePaddle/Paddle-Lite/releases/download/v2.10-rc/inference_lite_lib.android.armv8.gcc.c++_shared.with_extra.with_cv.tar.gz"
end_index="66" end_index="66"
elif [ ${DEVICE} = "ARM_GPU_OPENCL" ];then compile_with_opencl="OFF"
elif [ ${DEVICE} = "ARM_GPU_OPENCL" ]; then
valid_targets="opencl" valid_targets="opencl"
paddlelite_url="https://github.com/PaddlePaddle/Paddle-Lite/releases/download/v2.10-rc/inference_lite_lib.armv8.clang.with_exception.with_extra.with_cv.opencl.tar.gz" paddlelite_library_url="https://github.com/PaddlePaddle/Paddle-Lite/releases/download/v2.10-rc/inference_lite_lib.armv8.clang.with_exception.with_extra.with_cv.opencl.tar.gz"
end_index="71" end_index="71"
compile_with_opencl="ON"
else else
echo "DEVICE only suport ARM_CPU, ARM_GPU_OPENCL." echo "DEVICE only support ARM_CPU, ARM_GPU_OPENCL."
exit 2 exit 2
fi fi
# prepare lite .nb model # prepare paddlelite model
pip install paddlelite==2.10-rc pip install paddlelite==2.10-rc
current_dir=${PWD} current_dir=${PWD}
IFS="|" IFS="|"
model_path=./inference_models model_path=./inference_models
for model in ${lite_model_list[*]}; do for model in ${lite_model_list[*]}; do
if [[ $model =~ "PP-OCRv2" ]];then if [[ $model =~ "PP-OCRv2" ]]; then
inference_model_url=https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/${model}.tar inference_model_url=https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/${model}.tar
elif [[ $model =~ "v2.0" ]];then elif [[ $model =~ "v2.0" ]]; then
inference_model_url=https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/${model}.tar inference_model_url=https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/${model}.tar
else else
echo "Model is wrong, please check." echo "Model is wrong, please check."
...@@ -63,31 +66,42 @@ done ...@@ -63,31 +66,42 @@ done
# prepare test data # prepare test data
data_url=https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/icdar2015_lite.tar data_url=https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/icdar2015_lite.tar
model_path=./inference_models
inference_model=${inference_model_url##*/}
data_file=${data_url##*/} data_file=${data_url##*/}
wget -nc -P ./inference_models ${inference_model_url}
wget -nc -P ./test_data ${data_url} wget -nc -P ./test_data ${data_url}
cd ./inference_models && tar -xf ${inference_model} && cd ../
cd ./test_data && tar -xf ${data_file} && rm ${data_file} && cd ../ cd ./test_data && tar -xf ${data_file} && rm ${data_file} && cd ../
# prepare lite env # prepare paddlelite predict library
paddlelite_zipfile=$(echo $paddlelite_url | awk -F "/" '{print $NF}') if [[ ${paddlelite_library_source} = "download" ]]; then
paddlelite_file=${paddlelite_zipfile:0:${end_index}} paddlelite_library_zipfile=$(echo $paddlelite_library_url | awk -F "/" '{print $NF}')
wget ${paddlelite_url} && tar -xf ${paddlelite_zipfile} paddlelite_library_file=${paddlelite_library_zipfile:0:${end_index}}
mkdir -p ${paddlelite_file}/demo/cxx/ocr/test_lite wget ${paddlelite_library_url} && tar -xf ${paddlelite_library_zipfile}
cp -r ${model_path}/*_opt.nb test_data ${paddlelite_file}/demo/cxx/ocr/test_lite cd ${paddlelite_library_zipfile}
cp ppocr/utils/ppocr_keys_v1.txt deploy/lite/config.txt ${paddlelite_file}/demo/cxx/ocr/test_lite elif [[ ${paddlelite_library_source} = "compile" ]]; then
cp -r ./deploy/lite/* ${paddlelite_file}/demo/cxx/ocr/ git clone -b release/v2.10 https://github.com/PaddlePaddle/Paddle-Lite.git
cp ${paddlelite_file}/cxx/lib/libpaddle_light_api_shared.so ${paddlelite_file}/demo/cxx/ocr/test_lite cd Paddle-Lite
cp ${FILENAME} test_tipc/test_lite_arm_cpp.sh test_tipc/common_func.sh ${paddlelite_file}/demo/cxx/ocr/test_lite ./lite/tools/build_android.sh --arch=armv8 --with_cv=ON --with_extra=ON --toolchain=clang --with_opencl=${compile_with_opencl}
cd ${paddlelite_file}/demo/cxx/ocr/ cd ../
cp -r Paddle-Lite/build.lite.android.armv8.clang/inference_lite_lib.android.armv8/ .
paddlelite_library_file=inference_lite_lib.android.armv8
else
echo "paddlelite_library_source only support 'download' and 'compile'"
exit 3
fi
# organize the required files
mkdir -p ${paddlelite_library_file}/demo/cxx/ocr/test_lite
cp -r ${model_path}/*_opt.nb test_data ${paddlelite_library_file}/demo/cxx/ocr/test_lite
cp ppocr/utils/ppocr_keys_v1.txt deploy/lite/config.txt ${paddlelite_library_file}/demo/cxx/ocr/test_lite
cp -r ./deploy/lite/* ${paddlelite_library_file}/demo/cxx/ocr/
cp ${paddlelite_library_file}/cxx/lib/libpaddle_light_api_shared.so ${paddlelite_library_file}/demo/cxx/ocr/test_lite
cp ${FILENAME} test_tipc/test_lite_arm_cpp.sh test_tipc/common_func.sh ${paddlelite_library_file}/demo/cxx/ocr/test_lite
cd ${paddlelite_library_file}/demo/cxx/ocr/
git clone https://github.com/cuicheng01/AutoLog.git git clone https://github.com/cuicheng01/AutoLog.git
# make # compile and do some postprocess
make -j make -j
sleep 1 sleep 1
make -j make -j
cp ocr_db_crnn test_lite && cp test_lite/libpaddle_light_api_shared.so test_lite/libc++_shared.so cp ocr_db_crnn test_lite && cp test_lite/libpaddle_light_api_shared.so test_lite/libc++_shared.so
tar -cf test_lite.tar ./test_lite && cp test_lite.tar ${current_dir} && cd ${current_dir} tar -cf test_lite.tar ./test_lite && cp test_lite.tar ${current_dir} && cd ${current_dir}
rm -rf ${paddlelite_file}* && rm -rf ${model_path} rm -rf ${paddlelite_library_file}* && rm -rf ${model_path}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册