From 31d7a2c54e41923ec7d9fae558238599060d3bca Mon Sep 17 00:00:00 2001 From: cuicheng01 Date: Thu, 25 Nov 2021 02:50:16 +0000 Subject: [PATCH] add compile chain for tipc lite --- ...nux_gpu_normal_normal_lite_cpp_arm_cpu.txt | 0 ..._normal_normal_lite_cpp_arm_gpu_opencl.txt | 0 ...nux_gpu_normal_normal_lite_cpp_arm_cpu.txt | 0 ..._normal_normal_lite_cpp_arm_gpu_opencl.txt | 0 ...nux_gpu_normal_normal_lite_cpp_arm_cpu.txt | 13 ++++ ..._normal_normal_lite_cpp_arm_gpu_opencl.txt | 13 ++++ ...nux_gpu_normal_normal_lite_cpp_arm_cpu.txt | 9 +-- ..._normal_normal_lite_cpp_arm_gpu_opencl.txt | 13 ++++ test_tipc/docs/test_lite_arm_cpp.md | 20 +++--- test_tipc/prepare_lite_cpp.sh | 70 +++++++++++-------- 10 files changed, 98 insertions(+), 40 deletions(-) rename test_tipc/configs/{ppocr_system_mobile => ch_PP-OCRv2}/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt (100%) rename test_tipc/configs/{ppocr_system_mobile => ch_PP-OCRv2}/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt (100%) rename test_tipc/configs/{ppocr_det_mobile => ch_PP-OCRv2_det}/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt (100%) rename test_tipc/configs/{ppocr_det_mobile => ch_PP-OCRv2_det}/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt (100%) create mode 100644 test_tipc/configs/ch_ppocr_mobile_v2.0/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt create mode 100644 test_tipc/configs/ch_ppocr_mobile_v2.0/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt create mode 100644 test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt diff --git a/test_tipc/configs/ppocr_system_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt b/test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt similarity index 100% rename from test_tipc/configs/ppocr_system_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt rename to test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt diff --git a/test_tipc/configs/ppocr_system_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt b/test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt similarity index 100% rename from test_tipc/configs/ppocr_system_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt rename to test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt diff --git a/test_tipc/configs/ppocr_det_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt b/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt similarity index 100% rename from test_tipc/configs/ppocr_det_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt rename to test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt diff --git a/test_tipc/configs/ppocr_det_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt b/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt similarity index 100% rename from test_tipc/configs/ppocr_det_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt rename to test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt diff --git a/test_tipc/configs/ch_ppocr_mobile_v2.0/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2.0/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt new file mode 100644 index 00000000..95943683 --- /dev/null +++ b/test_tipc/configs/ch_ppocr_mobile_v2.0/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt @@ -0,0 +1,13 @@ +===========================lite_params=========================== +inference:./ocr_db_crnn system +runtime_device:ARM_CPU +det_infer_model:ch_ppocr_mobile_v2.0_det_infer|ch_ppocr_db_mobile_v2.0_det_quant_infer +rec_infer_model:ch_ppocr_mobile_v2.0_rec_infer|ch_ppocr_mobile_v2.0_rec_slim_infer +cls_infer_model:ch_ppocr_mobile_v2.0_cls_infer|ch_ppocr_mobile_v2.0_cls_slim_infer +--cpu_threads:1|4 +--det_batch_size:1 +--rec_batch_size:1 +--image_dir:./test_data/icdar2015_lite/text_localization/ch4_test_images/ +--config_dir:./config.txt +--rec_dict_dir:./ppocr_keys_v1.txt +--benchmark:True diff --git a/test_tipc/configs/ch_ppocr_mobile_v2.0/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt b/test_tipc/configs/ch_ppocr_mobile_v2.0/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt new file mode 100644 index 00000000..c13b9bee --- /dev/null +++ b/test_tipc/configs/ch_ppocr_mobile_v2.0/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt @@ -0,0 +1,13 @@ +===========================lite_params=========================== +inference:./ocr_db_crnn system +runtime_device:ARM_GPU_OPENCL +det_infer_model:ch_ppocr_mobile_v2.0_det_infer|ch_ppocr_db_mobile_v2.0_det_quant_infer +rec_infer_model:ch_ppocr_mobile_v2.0_rec_infer|ch_ppocr_mobile_v2.0_rec_slim_infer +cls_infer_model:ch_ppocr_mobile_v2.0_cls_infer|ch_ppocr_mobile_v2.0_cls_slim_infer +--cpu_threads:1|4 +--det_batch_size:1 +--rec_batch_size:1 +--image_dir:./test_data/icdar2015_lite/text_localization/ch4_test_images/ +--config_dir:./config.txt +--rec_dict_dir:./ppocr_keys_v1.txt +--benchmark:True diff --git a/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt index af71ce3b..e83534b5 100644 --- a/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt +++ b/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt @@ -1,12 +1,13 @@ ===========================lite_params=========================== inference:./ocr_db_crnn det -infer_model:ch_PP-OCRv2_det_infer|ch_PP-OCRv2_det_slim_quant_infer runtime_device:ARM_CPU +det_infer_model:ch_ppocr_mobile_v2.0_det_infer|ch_ppocr_db_mobile_v2.0_det_quant_infer +null:null +null:null --cpu_threads:1|4 --det_batch_size:1 ---rec_batch_size:1 ---system_batch_size:1 +null:null --image_dir:./test_data/icdar2015_lite/text_localization/ch4_test_images/ --config_dir:./config.txt ---rec_dict_dir:./ppocr_keys_v1.txt +null:null --benchmark:True diff --git a/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt b/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt new file mode 100644 index 00000000..2ce8aec5 --- /dev/null +++ b/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt @@ -0,0 +1,13 @@ +===========================lite_params=========================== +inference:./ocr_db_crnn det +runtime_device:ARM_GPU_OPENCL +det_infer_model:ch_ppocr_mobile_v2.0_det_infer|ch_ppocr_db_mobile_v2.0_det_quant_infer +null:null +null:null +--cpu_threads:1|4 +--det_batch_size:1 +null:null +--image_dir:./test_data/icdar2015_lite/text_localization/ch4_test_images/ +--config_dir:./config.txt +null:null +--benchmark:True diff --git a/test_tipc/docs/test_lite_arm_cpp.md b/test_tipc/docs/test_lite_arm_cpp.md index b3f24f47..166b5981 100644 --- a/test_tipc/docs/test_lite_arm_cpp.md +++ b/test_tipc/docs/test_lite_arm_cpp.md @@ -16,7 +16,7 @@ Lite\_arm\_cpp预测功能测试的主程序为`test_lite_arm_cpp.sh`,可以 | 模型类型 | batch-size | threads | predictor数量 | 预测库来源 | 测试硬件 | | :----: | :----: | :----: | :----: | :----: | :----: | -| 正常模型/量化模型 | 1 | 1/4 | 单/多 | 下载方式 | ARM\_CPU/ARM\_GPU_OPENCL | +| 正常模型/量化模型 | 1 | 1/4 | 单/多 | 下载方式/编译方式 | ARM\_CPU/ARM\_GPU_OPENCL | ## 2. 测试流程 @@ -30,8 +30,11 @@ Lite\_arm\_cpp预测功能测试的主程序为`test_lite_arm_cpp.sh`,可以 ```shell -# 数据和模型准备 -bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ppocr_det_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt +# 数据、模型、Paddle-Lite预测库准备 +#预测库为下载方式 +bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt download +#预测库为编译方式 +bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt compile # 手机端测试: bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt @@ -42,8 +45,11 @@ bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt ```shell -# 数据和模型准备 -bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ppocr_det_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt +# 数据、模型、Paddle-Lite预测库准备 +#预测库下载方式 +bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt download +#预测库编译方式 +bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt compile # 手机端测试: bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt @@ -53,9 +59,7 @@ bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl. **注意**: -1.由于运行该项目需要bash等命令,传统的adb方式不能很好的安装。所以此处推荐通在手机上开启虚拟终端的方式连接电脑,连接方式可以参考[安卓手机termux连接电脑](./termux_for_android.md)。 - -2.如果测试文本检测和识别完整的pipeline,在执行`prepare_lite_cpp.sh`时,配置文件需替换为`test_tipc/configs/ppocr_system_mobile/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt`。在手机端测试阶段,配置文件同样修改为该文件。 +由于运行该项目需要bash等命令,传统的adb方式不能很好的安装。所以此处推荐通在手机上开启虚拟终端的方式连接电脑,连接方式可以参考[安卓手机termux连接电脑](./termux_for_android.md)。 ### 2.2 运行结果 diff --git a/test_tipc/prepare_lite_cpp.sh b/test_tipc/prepare_lite_cpp.sh index b129322d..94af43c8 100644 --- a/test_tipc/prepare_lite_cpp.sh +++ b/test_tipc/prepare_lite_cpp.sh @@ -6,6 +6,7 @@ dataline=$(cat ${FILENAME}) IFS=$'\n' lines=(${dataline}) IFS=$'\n' +paddlelite_library_source=$2 inference_cmd=$(func_parser_value "${lines[1]}") DEVICE=$(func_parser_value "${lines[2]}") @@ -13,40 +14,42 @@ det_lite_model_list=$(func_parser_value "${lines[3]}") rec_lite_model_list=$(func_parser_value "${lines[4]}") cls_lite_model_list=$(func_parser_value "${lines[5]}") -if [[ $inference_cmd =~ "det" ]];then +if [[ $inference_cmd =~ "det" ]]; then lite_model_list=${det_lite_model_list} -elif [[ $inference_cmd =~ "rec" ]];then +elif [[ $inference_cmd =~ "rec" ]]; then lite_model_list=(${rec_lite_model_list[*]} ${cls_lite_model_list[*]}) -elif [[ $inference_cmd =~ "system" ]];then +elif [[ $inference_cmd =~ "system" ]]; then lite_model_list=(${det_lite_model_list[*]} ${rec_lite_model_list[*]} ${cls_lite_model_list[*]}) else echo "inference_cmd is wrong, please check." exit 1 fi -if [ ${DEVICE} = "ARM_CPU" ];then +if [ ${DEVICE} = "ARM_CPU" ]; then valid_targets="arm" - paddlelite_url="https://github.com/PaddlePaddle/Paddle-Lite/releases/download/v2.10-rc/inference_lite_lib.android.armv8.gcc.c++_shared.with_extra.with_cv.tar.gz" + paddlelite_library_url="https://github.com/PaddlePaddle/Paddle-Lite/releases/download/v2.10-rc/inference_lite_lib.android.armv8.gcc.c++_shared.with_extra.with_cv.tar.gz" end_index="66" -elif [ ${DEVICE} = "ARM_GPU_OPENCL" ];then + compile_with_opencl="OFF" +elif [ ${DEVICE} = "ARM_GPU_OPENCL" ]; then valid_targets="opencl" - paddlelite_url="https://github.com/PaddlePaddle/Paddle-Lite/releases/download/v2.10-rc/inference_lite_lib.armv8.clang.with_exception.with_extra.with_cv.opencl.tar.gz" + paddlelite_library_url="https://github.com/PaddlePaddle/Paddle-Lite/releases/download/v2.10-rc/inference_lite_lib.armv8.clang.with_exception.with_extra.with_cv.opencl.tar.gz" end_index="71" + compile_with_opencl="ON" else - echo "DEVICE only suport ARM_CPU, ARM_GPU_OPENCL." + echo "DEVICE only support ARM_CPU, ARM_GPU_OPENCL." exit 2 fi -# prepare lite .nb model +# prepare paddlelite model pip install paddlelite==2.10-rc current_dir=${PWD} IFS="|" model_path=./inference_models for model in ${lite_model_list[*]}; do - if [[ $model =~ "PP-OCRv2" ]];then + if [[ $model =~ "PP-OCRv2" ]]; then inference_model_url=https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/${model}.tar - elif [[ $model =~ "v2.0" ]];then + elif [[ $model =~ "v2.0" ]]; then inference_model_url=https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/${model}.tar else echo "Model is wrong, please check." @@ -63,31 +66,42 @@ done # prepare test data data_url=https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/icdar2015_lite.tar -model_path=./inference_models -inference_model=${inference_model_url##*/} data_file=${data_url##*/} -wget -nc -P ./inference_models ${inference_model_url} wget -nc -P ./test_data ${data_url} -cd ./inference_models && tar -xf ${inference_model} && cd ../ cd ./test_data && tar -xf ${data_file} && rm ${data_file} && cd ../ -# prepare lite env -paddlelite_zipfile=$(echo $paddlelite_url | awk -F "/" '{print $NF}') -paddlelite_file=${paddlelite_zipfile:0:${end_index}} -wget ${paddlelite_url} && tar -xf ${paddlelite_zipfile} -mkdir -p ${paddlelite_file}/demo/cxx/ocr/test_lite -cp -r ${model_path}/*_opt.nb test_data ${paddlelite_file}/demo/cxx/ocr/test_lite -cp ppocr/utils/ppocr_keys_v1.txt deploy/lite/config.txt ${paddlelite_file}/demo/cxx/ocr/test_lite -cp -r ./deploy/lite/* ${paddlelite_file}/demo/cxx/ocr/ -cp ${paddlelite_file}/cxx/lib/libpaddle_light_api_shared.so ${paddlelite_file}/demo/cxx/ocr/test_lite -cp ${FILENAME} test_tipc/test_lite_arm_cpp.sh test_tipc/common_func.sh ${paddlelite_file}/demo/cxx/ocr/test_lite -cd ${paddlelite_file}/demo/cxx/ocr/ +# prepare paddlelite predict library +if [[ ${paddlelite_library_source} = "download" ]]; then + paddlelite_library_zipfile=$(echo $paddlelite_library_url | awk -F "/" '{print $NF}') + paddlelite_library_file=${paddlelite_library_zipfile:0:${end_index}} + wget ${paddlelite_library_url} && tar -xf ${paddlelite_library_zipfile} + cd ${paddlelite_library_zipfile} +elif [[ ${paddlelite_library_source} = "compile" ]]; then + git clone -b release/v2.10 https://github.com/PaddlePaddle/Paddle-Lite.git + cd Paddle-Lite + ./lite/tools/build_android.sh --arch=armv8 --with_cv=ON --with_extra=ON --toolchain=clang --with_opencl=${compile_with_opencl} + cd ../ + cp -r Paddle-Lite/build.lite.android.armv8.clang/inference_lite_lib.android.armv8/ . + paddlelite_library_file=inference_lite_lib.android.armv8 +else + echo "paddlelite_library_source only support 'download' and 'compile'" + exit 3 +fi + +# organize the required files +mkdir -p ${paddlelite_library_file}/demo/cxx/ocr/test_lite +cp -r ${model_path}/*_opt.nb test_data ${paddlelite_library_file}/demo/cxx/ocr/test_lite +cp ppocr/utils/ppocr_keys_v1.txt deploy/lite/config.txt ${paddlelite_library_file}/demo/cxx/ocr/test_lite +cp -r ./deploy/lite/* ${paddlelite_library_file}/demo/cxx/ocr/ +cp ${paddlelite_library_file}/cxx/lib/libpaddle_light_api_shared.so ${paddlelite_library_file}/demo/cxx/ocr/test_lite +cp ${FILENAME} test_tipc/test_lite_arm_cpp.sh test_tipc/common_func.sh ${paddlelite_library_file}/demo/cxx/ocr/test_lite +cd ${paddlelite_library_file}/demo/cxx/ocr/ git clone https://github.com/cuicheng01/AutoLog.git -# make +# compile and do some postprocess make -j sleep 1 make -j cp ocr_db_crnn test_lite && cp test_lite/libpaddle_light_api_shared.so test_lite/libc++_shared.so tar -cf test_lite.tar ./test_lite && cp test_lite.tar ${current_dir} && cd ${current_dir} -rm -rf ${paddlelite_file}* && rm -rf ${model_path} +rm -rf ${paddlelite_library_file}* && rm -rf ${model_path} -- GitLab