diff --git a/configs/keypoint/tiny_pose/README.md b/configs/keypoint/tiny_pose/README.md index 276222ce8938b0a1c275847058f691e18dc2f566..6d9c5be02d4eccfff00abc611da96296fab2223c 100644 --- a/configs/keypoint/tiny_pose/README.md +++ b/configs/keypoint/tiny_pose/README.md @@ -35,16 +35,19 @@ PP-TinyPose是PaddleDetecion针对移动端设备优化的实时关键点检测 ## 模型库 ### 关键点检测模型 -| 模型 | 输入尺寸 | AP (COCO Val) | 单人推理耗时 (FP32)| 单人推理耗时(FP16) | 配置文件 | 模型权重 | 预测部署模型 | Paddle-Lite部署模型(FP32) | Paddle-Lite部署模型(FP16)| -| :------------------------ | :-------: | :------: | :------: |:---: | :---: | :---: | :---: | :---: | :---: | -| PP-TinyPose | 128*96 | 58.1 | 4.57ms | 3.27ms | [Config](./tinypose_128x96.yml) |[Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96.pdparams) | [预测部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96.tar) | [Lite部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96.nb) | [Lite部署模型(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96_fp16.nb) | -| PP-TinyPose | 256*192 | 68.8 | 14.07ms | 8.33ms | [Config](./tinypose_256x192.yml) | [Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192.pdparams) | [预测部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192.tar) | [Lite部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192.nb) | [Lite部署模型(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192_fp16.nb) | +| 模型 | 输入尺寸 | AP (COCO Val) | 单人推理耗时 (FP32) | 单人推理耗时(FP16) | 配置文件 | 模型权重 | 预测部署模型 | Paddle-Lite部署模型(FP32) | Paddle-Lite部署模型(FP16) | +| :---------- | :------: | :-----------: | :-----------------: | :-----------------: | :------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| PP-TinyPose | 128*96 | 58.1 | 4.57ms | 3.27ms | [Config](./tinypose_128x96.yml) | [Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96.pdparams) | [预测部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96.tar) | [Lite部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96.tar) | [Lite部署模型(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96_fp16.tar) | +| PP-TinyPose | 256*192 | 68.8 | 14.07ms | 8.33ms | [Config](./tinypose_256x192.yml) | [Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192.pdparams) | [预测部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192.tar) | [Lite部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192.tar) | [Lite部署模型(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192_fp16.tar) | + + ### 行人检测模型 -| 模型 | 输入尺寸 | mAP (COCO Val) | 平均推理耗时 (FP32) | 平均推理耗时 (FP16) | 配置文件 | 模型权重 | 预测部署模型 | Paddle-Lite部署模型(FP32) | Paddle-Lite部署模型(FP16)| -| :------------------------ | :-------: | :------: | :------: | :---: | :---: | :---: | :---: | :---: | :---: | -| PicoDet-S-Pedestrian | 192*192 | 29.0 | 4.30ms | 2.37ms | [Config](../../picodet/application/pedestrian_detection/picodet_s_192_pedestrian.yml) |[Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian.pdparams) | [预测部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian.tar) | [Lite部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian.nb) | [Lite部署模型(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian_fp16.nb) | -| PicoDet-S-Pedestrian | 320*320 | 38.5 | 10.26ms | 6.30ms | [Config](../../picodet/application/pedestrian_detection/picodet_s_320_pedestrian.yml) | [Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian.pdparams) | [预测部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian.tar) | [Lite部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian.nb) | [Lite部署模型(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian_fp16.nb) | +| 模型 | 输入尺寸 | mAP (COCO Val) | 平均推理耗时 (FP32) | 平均推理耗时 (FP16) | 配置文件 | 模型权重 | 预测部署模型 | Paddle-Lite部署模型(FP32) | Paddle-Lite部署模型(FP16) | +| :------------------- | :------: | :------------: | :-----------------: | :-----------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| PicoDet-S-Pedestrian | 192*192 | 29.0 | 4.30ms | 2.37ms | [Config](../../picodet/application/pedestrian_detection/picodet_s_192_pedestrian.yml) | [Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian.pdparams) | [预测部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian.tar) | [Lite部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian.tar) | [Lite部署模型(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian_fp16.tar) | +| PicoDet-S-Pedestrian | 320*320 | 38.5 | 10.26ms | 6.30ms | [Config](../../picodet/application/pedestrian_detection/picodet_s_320_pedestrian.yml) | [Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian.pdparams) | [预测部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian.tar) | [Lite部署模型](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian.tar) | [Lite部署模型(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian_fp16.tar) | + **说明** diff --git a/configs/keypoint/tiny_pose/README_en.md b/configs/keypoint/tiny_pose/README_en.md index d2b33a0fee87b67bdae785b6ae1919be4068fa61..e632c5b1996b71e5414fb8f596a4a497c9160015 100644 --- a/configs/keypoint/tiny_pose/README_en.md +++ b/configs/keypoint/tiny_pose/README_en.md @@ -37,14 +37,14 @@ If you want to deploy it on the mobile devives, you also need: ### Keypoint Detection Model | Model | Input Size | AP (COCO Val) | Inference Time for Single Person (FP32)| Inference Time for Single Person(FP16) | Config | Model Weights | Deployment Model | Paddle-Lite Model(FP32) | Paddle-Lite Model(FP16)| | :------------------------ | :-------: | :------: | :------: |:---: | :---: | :---: | :---: | :---: | :---: | -| PP-TinyPose | 128*96 | 58.1 | 4.57ms | 3.27ms | [Config](./tinypose_128x96.yml) |[Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96.pdparams) | [Deployment Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96.tar) | [Lite Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96.nb) | [Lite Model(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96_fp16.nb) | -| PP-TinyPose | 256*192 | 68.8 | 14.07ms | 8.33ms | [Config](./tinypose_256x192.yml) | [Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192.pdparams) | [Deployment Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192.tar) | [Lite Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192.nb) | [Lite Model(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192_fp16.nb) | +| PP-TinyPose | 128*96 | 58.1 | 4.57ms | 3.27ms | [Config](./tinypose_128x96.yml) |[Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96.pdparams) | [Deployment Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96.tar) | [Lite Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96.tar) | [Lite Model(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_128x96_fp16.tar) | +| PP-TinyPose | 256*192 | 68.8 | 14.07ms | 8.33ms | [Config](./tinypose_256x192.yml) | [Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192.pdparams) | [Deployment Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192.tar) | [Lite Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192.tar) | [Lite Model(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/tinypose_256x192_fp16.tar) | ### Pedestrian Detection Model | Model | Input Size | mAP (COCO Val) | Average Inference Time (FP32)| Average Inference Time (FP16) | Config | Model Weights | Deployment Model | Paddle-Lite Model(FP32) | Paddle-Lite Model(FP16)| | :------------------------ | :-------: | :------: | :------: | :---: | :---: | :---: | :---: | :---: | :---: | -| PicoDet-S-Pedestrian | 192*192 | 29.0 | 4.30ms | 2.37ms | [Config](../../picodet/application/pedestrian_detection/picodet_s_192_pedestrian.yml) |[Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian.pdparams) | [Deployment Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian.tar) | [Lite Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian.nb) | [Lite Model(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian_fp16.nb) | -| PicoDet-S-Pedestrian | 320*320 | 38.5 | 10.26ms | 6.30ms | [Config](../../picodet/application/pedestrian_detection/picodet_s_320_pedestrian.yml) | [Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian.pdparams) | [Deployment Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian.tar) | [Lite Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian.nb) | [Lite Model(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian_fp16.nb) | +| PicoDet-S-Pedestrian | 192*192 | 29.0 | 4.30ms | 2.37ms | [Config](../../picodet/application/pedestrian_detection/picodet_s_192_pedestrian.yml) |[Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian.pdparams) | [Deployment Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian.tar) | [Lite Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian.tar) | [Lite Model(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_192_pedestrian_fp16.tar) | +| PicoDet-S-Pedestrian | 320*320 | 38.5 | 10.26ms | 6.30ms | [Config](../../picodet/application/pedestrian_detection/picodet_s_320_pedestrian.yml) | [Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian.pdparams) | [Deployment Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian.tar) | [Lite Model](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian.tar) | [Lite Model(FP16)](https://bj.bcebos.com/v1/paddledet/models/keypoint/picodet_s_320_pedestrian_fp16.tar) | **Tips** diff --git a/deploy/lite/README.md b/deploy/lite/README.md index e775b134a9923ffdbbbccfe80e8bef4d4c2c904a..e8b58e35309a225f189ca6f05b684195b48c0b75 100644 --- a/deploy/lite/README.md +++ b/deploy/lite/README.md @@ -12,7 +12,12 @@ Paddle Lite是飞桨轻量化推理引擎,为手机、IOT端提供高效推理 ### 1.1 准备交叉编译环境 交叉编译环境用于编译 Paddle Lite 和 PaddleDetection 的C++ demo。 -支持多种开发环境,不同开发环境的编译流程请参考对应文档,请确保安装完成Java jdk、Android NDK(R17以上)。 +支持多种开发环境,不同开发环境的编译流程请参考对应文档,请确保安装完成Java jdk、Android NDK(R17 < version < R21,其他版本以上未做测试)。 +设置NDK_ROOT命令: +```shell +export NDK_ROOT=[YOUR_NDK_PATH]/android-ndk-r17c +``` + 1. [Docker](https://paddle-lite.readthedocs.io/zh/latest/source_compile/compile_env.html#docker) 2. [Linux](https://paddle-lite.readthedocs.io/zh/latest/source_compile/compile_env.html#linux) @@ -21,7 +26,7 @@ Paddle Lite是飞桨轻量化推理引擎,为手机、IOT端提供高效推理 ### 1.2 准备预测库 预测库有两种获取方式: -1. [**建议**]直接下载,预测库下载链接如下: +1. [**建议**]直接下载,预测库下载链接如下:(请注意使用模型FP32/16版本需要与库相对应) |平台| 架构 | 预测库下载链接| |-|-|-| |Android| arm7 | [inference_lite_lib](https://github.com/PaddlePaddle/Paddle-Lite/releases/download/v2.10-rc/inference_lite_lib.android.armv7.clang.c++_static.with_extra.with_cv.tar.gz) | @@ -31,7 +36,7 @@ Paddle Lite是飞桨轻量化推理引擎,为手机、IOT端提供高效推理 **注意**:1. 如果是从 Paddle-Lite [官方文档](https://paddle-lite.readthedocs.io/zh/latest/quick_start/release_lib.html#android-toolchain-gcc)下载的预测库,注意选择`with_extra=ON,with_cv=ON`的下载链接。2. 目前只提供Android端demo,IOS端demo可以参考[Paddle-Lite IOS demo](https://github.com/PaddlePaddle/Paddle-Lite-Demo/tree/master/PaddleLite-ios-demo) -2. 编译Paddle-Lite得到预测库,Paddle-Lite的编译方式如下: +2. 编译Paddle-Lite得到预测库,Paddle-Lite的编译方式如下(Lite库在不断更新,如若下列命令无效,请以Lite官方repo为主): ```shell git clone https://github.com/PaddlePaddle/Paddle-Lite.git cd Paddle-Lite diff --git a/deploy/lite/include/config_parser.h b/deploy/lite/include/config_parser.h index 67f662e7221fa71325b47995489af8902de090c0..5171885ca954f50a44d511d24b3ca23845462d45 100644 --- a/deploy/lite/include/config_parser.h +++ b/deploy/lite/include/config_parser.h @@ -29,7 +29,7 @@ namespace PaddleDetection { -void load_jsonf(std::string jsonfile, Json::Value& jsondata); +void load_jsonf(std::string jsonfile, const Json::Value& jsondata); // Inference model configuration parser class ConfigPaser { @@ -43,13 +43,14 @@ class ConfigPaser { Json::Value config; load_jsonf(model_dir + OS_PATH_SEP + cfg + ".json", config); - // Get model arch : YOLO, SSD, RetinaNet, RCNN, Face + // Get model arch : YOLO, SSD, RetinaNet, RCNN, Face, PicoDet, HRNet if (config.isMember("arch")) { arch_ = config["arch"].as(); } else { - std::cerr << "Please set model arch," - << "support value : YOLO, SSD, RetinaNet, RCNN, Face." - << std::endl; + std::cerr + << "Please set model arch," + << "support value : YOLO, SSD, RetinaNet, RCNN, Face, PicoDet, HRNet." + << std::endl; return false; } diff --git a/deploy/lite/src/config_parser.cc b/deploy/lite/src/config_parser.cc index 70c43e76c2c85d2917eb1c3384304260c591b85c..ed139a17dc8b2535877f3981849fdca8ce16993c 100644 --- a/deploy/lite/src/config_parser.cc +++ b/deploy/lite/src/config_parser.cc @@ -16,7 +16,7 @@ namespace PaddleDetection { -void load_jsonf(std::string jsonfile, Json::Value &jsondata) { +void load_jsonf(std::string jsonfile, const Json::Value &jsondata) { std::ifstream ifs; ifs.open(jsonfile); diff --git a/deploy/lite/src/main.cc b/deploy/lite/src/main.cc index 32f2979a0e3dc834eb5bc2812e47754c9a49e294..51f3b338064a90e7b7fd411f964d08ce72f4441e 100644 --- a/deploy/lite/src/main.cc +++ b/deploy/lite/src/main.cc @@ -43,10 +43,8 @@ void PrintBenchmarkLog(std::vector det_time, int img_num) { << std::endl; RT_Config["model_dir_det"].as().erase( RT_Config["model_dir_det"].as().find_last_not_of("/") + 1); - std::cout - << "detection model_name: " - << RT_Config["model_dir_det"].as() - << std::endl; + std::cout << "detection model_name: " + << RT_Config["model_dir_det"].as() << std::endl; std::cout << "----------------------- Perf info ------------------------" << std::endl; std::cout << "Total number of predicted data: " << img_num @@ -59,7 +57,7 @@ void PrintBenchmarkLog(std::vector det_time, int img_num) { << ", postprocess_time(ms): " << det_time[2] / img_num << std::endl; } -void PrintKptsBenchmarkLog(std::vector det_time, int img_num){ +void PrintKptsBenchmarkLog(std::vector det_time, int img_num) { std::cout << "----------------------- Data info -----------------------" << std::endl; std::cout << "batch_size_keypoint: " @@ -69,16 +67,16 @@ void PrintKptsBenchmarkLog(std::vector det_time, int img_num){ RT_Config["model_dir_keypoint"].as().erase( RT_Config["model_dir_keypoint"].as().find_last_not_of("/") + 1); - std::cout - << "keypoint model_name: " - << RT_Config["model_dir_keypoint"].as() << std::endl; + std::cout << "keypoint model_name: " + << RT_Config["model_dir_keypoint"].as() << std::endl; std::cout << "----------------------- Perf info ------------------------" << std::endl; std::cout << "Total number of predicted data: " << img_num << " and total time spent(ms): " - << std::accumulate(det_time.begin(), det_time.end(), 0.) << std::endl; + << std::accumulate(det_time.begin(), det_time.end(), 0.) + << std::endl; img_num = std::max(1, img_num); - std::cout << "Average time cost per person:" << std::endl + std::cout << "Average time cost per person:" << std::endl << "preproce_time(ms): " << det_time[0] / img_num << ", inference_time(ms): " << det_time[1] / img_num << ", postprocess_time(ms): " << det_time[2] / img_num << std::endl; @@ -136,7 +134,7 @@ void PredictImage(const std::vector all_img_paths, PaddleDetection::KeyPointDetector* keypoint, const std::string& output_dir = "output") { std::vector det_t = {0, 0, 0}; - int steps = ceil(float(all_img_paths.size()) / batch_size_det); + int steps = ceil(static_cast(all_img_paths.size()) / batch_size_det); int kpts_imgs = 0; std::vector keypoint_t = {0, 0, 0}; double midtimecost = 0; @@ -243,7 +241,7 @@ void PredictImage(const std::vector all_img_paths, std::chrono::duration midtimediff = keypoint_crop_time - keypoint_start_time; - midtimecost += double(midtimediff.count() * 1000); + midtimecost += static_cast(midtimediff.count() * 1000); if (imgs_kpts.size() == RT_Config["batch_size_keypoint"].as() || ((i == imsize - 1) && !imgs_kpts.empty())) { @@ -275,8 +273,8 @@ void PredictImage(const std::vector all_img_paths, std::string kpts_savepath = output_path + "keypoint_" + image_file_path.substr(image_file_path.find_last_of('/') + 1); - cv::Mat kpts_vis_img = - VisualizeKptsResult(im, result_kpts, colormap_kpts, keypoint->get_threshold()); + cv::Mat kpts_vis_img = VisualizeKptsResult( + im, result_kpts, colormap_kpts, keypoint->get_threshold()); cv::imwrite(kpts_savepath, kpts_vis_img, compression_params); printf("Visualized output saved as %s\n", kpts_savepath.c_str()); } else { @@ -298,23 +296,22 @@ void PredictImage(const std::vector all_img_paths, PrintBenchmarkLog(det_t, all_img_paths.size()); if (keypoint) { PrintKptsBenchmarkLog(keypoint_t, kpts_imgs); - PrintTotalIimeLog((det_t[0] + det_t[1] + det_t[2]) / all_img_paths.size(), - (keypoint_t[0] + keypoint_t[1] + keypoint_t[2]) / all_img_paths.size(), - midtimecost / all_img_paths.size()); + PrintTotalIimeLog( + (det_t[0] + det_t[1] + det_t[2]) / all_img_paths.size(), + (keypoint_t[0] + keypoint_t[1] + keypoint_t[2]) / all_img_paths.size(), + midtimecost / all_img_paths.size()); } - } int main(int argc, char** argv) { - std::cout << "Usage: " << argv[0] - << " [config_path](option) [image_dir](option)\n"; + std::cout << "Usage: " << argv[0] << " [config_path] [image_dir](option)\n"; if (argc < 2) { std::cout << "Usage: ./main det_runtime_config.json" << std::endl; return -1; } std::string config_path = argv[1]; std::string img_path = ""; - + if (argc >= 3) { img_path = argv[2]; }