From eb3fe8139d8eead8515b62e2129825e4fa5c392a Mon Sep 17 00:00:00 2001 From: Feng Ni Date: Sat, 28 Jan 2023 20:09:24 +0800 Subject: [PATCH] [MOT] update mot docs for BoT-SORT and CenterTrack (#7641) --- README_cn.md | 4 +++- README_en.md | 1 + configs/mot/README.md | 15 ++++++++++--- configs/mot/README_en.md | 10 ++++++++- configs/mot/bytetrack/README_cn.md | 29 +++++++++++++++++++++++-- configs/mot/headtracking21/README_cn.md | 12 ++++++++-- docs/MODEL_ZOO_cn.md | 4 ++++ docs/MODEL_ZOO_en.md | 4 ++++ 8 files changed, 70 insertions(+), 9 deletions(-) diff --git a/README_cn.md b/README_cn.md index bb0d6a900..db1fe7f44 100644 --- a/README_cn.md +++ b/README_cn.md @@ -111,7 +111,7 @@ PaddleDetection整理工业、农业、林业、交通、医疗、金融、能 - **🎈社区近期活动** - + - **👀YOLO系列专题** - `文章传送门`:[YOLOv8来啦!YOLO内卷期模型怎么选?9+款AI硬件如何快速部署?深度解析](https://mp.weixin.qq.com/s/rPwprZeHEpmGOe5wxrmO5g) - `代码传送门`:[PaddleYOLO全系列](https://github.com/PaddlePaddle/PaddleDetection/blob/release/2.5/docs/feature_models/PaddleYOLO_MODEL.md) @@ -396,6 +396,8 @@ PaddleDetection整理工业、农业、林业、交通、医疗、金融、能
  • DeepSORT
  • ByteTrack
  • OC-SORT
  • +
  • BoT-SORT
  • +
  • CenterTrack
  • diff --git a/README_en.md b/README_en.md index 26454eb5f..c45b100e1 100644 --- a/README_en.md +++ b/README_en.md @@ -163,6 +163,7 @@
  • DeepSORT
  • ByteTrack
  • OC-SORT
  • +
  • BoT-SORT
  • CenterTrack
  • KeyPoint-Detection diff --git a/configs/mot/README.md b/configs/mot/README.md index 533c3fcba..73bf75fdf 100644 --- a/configs/mot/README.md +++ b/configs/mot/README.md @@ -23,6 +23,7 @@ PaddleDetection中提供了SDE和JDE两个系列的多种算法实现: - SDE - [ByteTrack](./bytetrack) - [OC-SORT](./ocsort) + - [BoT-SORT](./botsort) - [DeepSORT](./deepsort) - [CenterTrack](./centertrack) - JDE @@ -32,7 +33,7 @@ PaddleDetection中提供了SDE和JDE两个系列的多种算法实现: **注意:** - 以上算法原论文均为单类别的多目标跟踪,PaddleDetection团队同时也支持了[ByteTrack](./bytetrack)和FairMOT([MCFairMOT](./mcfairmot))的多类别的多目标跟踪; - - [DeepSORT](./deepsort)、[JDE](./jde)和[CenterTrack](./centertrack)均只支持单类别的多目标跟踪; + - [DeepSORT](./deepsort)、[JDE](./jde)、[OC-SORT](./ocsort)、[BoT-SORT](./botsort)和[CenterTrack](./centertrack)均只支持单类别的多目标跟踪; - [DeepSORT](./deepsort)需要额外添加ReID权重一起执行,[ByteTrack](./bytetrack)可加可不加ReID权重,默认不加; @@ -83,7 +84,7 @@ PP-Human赋能社区智能精细化管理教程[链接](https://aistudio.baidu.c ``` pip install -r requirements.txt # 或手动pip安装MOT相关的库 -pip install lap motmetrics sklearn filterpy +pip install lap motmetrics sklearn ``` **注意:** - 预测需确保已安装[ffmpeg](https://ffmpeg.org/ffmpeg.html), Linux(Ubuntu)平台可以直接用以下命令安装:`apt-get update && apt-get install -y ffmpeg`。 @@ -94,6 +95,7 @@ pip install lap motmetrics sklearn filterpy - 基础模型 - [ByteTrack](bytetrack/README_cn.md) - [OC-SORT](ocsort/README_cn.md) + - [BoT-SORT](botsort/README_cn.md) - [DeepSORT](deepsort/README_cn.md) - [JDE](jde/README_cn.md) - [FairMOT](fairmot/README_cn.md) @@ -113,7 +115,7 @@ pip install lap motmetrics sklearn filterpy | MOT方式 | 经典算法 | 算法流程 | 数据集要求 | 其他特点 | | :--------------| :--------------| :------- | :----: | :----: | -| SDE系列 | DeepSORT,ByteTrack,OC-SORT,CenterTrack | 分离式,两个独立模型权重先检测后ReID,也可不加ReID | 检测和ReID数据相对独立,不加ReID时即纯检测数据集 |检测和ReID可分别调优,鲁棒性较高,AI竞赛常用| +| SDE系列 | DeepSORT,ByteTrack,OC-SORT,BoT-SORT,CenterTrack | 分离式,两个独立模型权重先检测后ReID,也可不加ReID | 检测和ReID数据相对独立,不加ReID时即纯检测数据集 |检测和ReID可分别调优,鲁棒性较高,AI竞赛常用| | JDE系列 | FairMOT,JDE | 联合式,一个模型权重端到端同时检测和ReID | 必须同时具有检测和ReID标注 | 检测和ReID联合训练,不易调优,泛化性不强| **注意:** @@ -283,6 +285,13 @@ MOT17 year={2022} } +@article{aharon2022bot, + title={BoT-SORT: Robust Associations Multi-Pedestrian Tracking}, + author={Aharon, Nir and Orfaig, Roy and Bobrovsky, Ben-Zion}, + journal={arXiv preprint arXiv:2206.14651}, + year={2022} +} + @article{zhou2020tracking, title={Tracking Objects as Points}, author={Zhou, Xingyi and Koltun, Vladlen and Kr{\"a}henb{\"u}hl, Philipp}, diff --git a/configs/mot/README_en.md b/configs/mot/README_en.md index ec78a85d0..3ae5444eb 100644 --- a/configs/mot/README_en.md +++ b/configs/mot/README_en.md @@ -49,7 +49,7 @@ PP-Tracking supports GUI predict and deployment. Please refer to this [doc](http ## Installation Install all the related dependencies for MOT: ``` -pip install lap motmetrics sklearn filterpy +pip install lap motmetrics sklearn or pip install -r requirements.txt ``` @@ -61,6 +61,7 @@ pip install -r requirements.txt - Base models - [ByteTrack](bytetrack/README.md) - [OC-SORT](ocsort/README.md) + - [BoT-SORT](botsort/README.md) - [DeepSORT](deepsort/README.md) - [JDE](jde/README.md) - [FairMOT](fairmot/README.md) @@ -200,6 +201,13 @@ In the annotation text, each line is describing a bounding box and has the follo year={2022} } +@article{aharon2022bot, + title={BoT-SORT: Robust Associations Multi-Pedestrian Tracking}, + author={Aharon, Nir and Orfaig, Roy and Bobrovsky, Ben-Zion}, + journal={arXiv preprint arXiv:2206.14651}, + year={2022} +} + @article{zhou2020tracking, title={Tracking Objects as Points}, author={Zhou, Xingyi and Koltun, Vladlen and Kr{\"a}henb{\"u}hl, Philipp}, diff --git a/configs/mot/bytetrack/README_cn.md b/configs/mot/bytetrack/README_cn.md index 2a3f8c50d..3e896ec04 100644 --- a/configs/mot/bytetrack/README_cn.md +++ b/configs/mot/bytetrack/README_cn.md @@ -5,15 +5,22 @@ ## 内容 - [简介](#简介) - [模型库](#模型库) + - [行人跟踪](#行人跟踪) + - [人头跟踪](#人头跟踪) +- [多类别适配](#多类别适配) - [快速开始](#快速开始) - [引用](#引用) + ## 简介 [ByteTrack](https://arxiv.org/abs/2110.06864)(ByteTrack: Multi-Object Tracking by Associating Every Detection Box) 通过关联每个检测框来跟踪,而不仅是关联高分的检测框。对于低分数检测框会利用它们与轨迹片段的相似性来恢复真实对象并过滤掉背景检测框。此处提供了几个常用检测器的配置作为参考。由于训练数据集、输入尺度、训练epoch数、NMS阈值设置等的不同均会导致模型精度和性能的差异,请自行根据需求进行适配。 + ## 模型库 -### 基于不同检测器的ByteTrack在MOT-17 half Val Set上结果 +### 行人跟踪 + +#### 基于不同检测器的ByteTrack在 MOT-17 half Val Set 上的结果 | 检测训练数据集 | 检测器 | 输入尺度 | ReID | 检测mAP(0.5:0.95) | MOTA | IDF1 | FPS | 配置文件 | | :-------- | :----- | :----: | :----:|:------: | :----: |:-----: |:----:|:----: | @@ -31,7 +38,7 @@ - **mix_mot_ch**数据集,是MOT17、CrowdHuman组成的联合数据集,**mix_det**数据集是MOT17、CrowdHuman、Cityscapes、ETHZ组成的联合数据集,数据集整理的格式和目录可以参考[此链接](https://github.com/ifzhang/ByteTrack#data-preparation),最终放置于`dataset/mot/`目录下。为了验证精度可以都用**MOT17-half val**数据集去评估。 -### YOLOX-x ByteTrack(mix_det) +#### YOLOX-x ByteTrack(mix_det)在 MOT-16/MOT-17 上的结果 [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/pp-yoloe-an-evolved-version-of-yolo/multi-object-tracking-on-mot16)](https://paperswithcode.com/sota/multi-object-tracking-on-mot16?p=pp-yoloe-an-evolved-version-of-yolo) @@ -51,6 +58,24 @@ - ByteTrack的导出部署,是单独导出检测模型,再组装跟踪器运行的,参照[PP-Tracking](../../../deploy/pptracking/python/README.md)。 +### 人头跟踪 + +#### YOLOX-x ByteTrack 在 HT-21 Test Set上的结果 + +| 模型 | 输入尺寸 | MOTA | IDF1 | IDS | FP | FN | FPS | 下载链接 | 配置文件 | +| :--------------| :------- | :----: | :----: | :---: | :----: | :---: | :------: | :----: |:----: | +| ByteTrack-x | 1440x800 | 64.1 | 63.4 | 4191 | 185162 | 210240 | - | [下载链接](https://paddledet.bj.bcebos.com/models/mot/bytetrack_yolox_ht21.pdparams) | [配置文件](./bytetrack_yolox_ht21.yml) | + +#### YOLOX-x ByteTrack 在 HT-21 Test Set上的结果 + +| 骨干网络 | 输入尺寸 | MOTA | IDF1 | IDS | FP | FN | FPS | 下载链接 | 配置文件 | +| :--------------| :------- | :----: | :----: | :----: | :----: | :----: |:-------: | :----: | :----: | +| ByteTrack-x | 1440x800 | 72.6 | 61.8 | 5163 | 71235 | 154139 | - | [下载链接](https://paddledet.bj.bcebos.com/models/mot/bytetrack_yolox_ht21.pdparams) | [配置文件](./bytetrack_yolox_ht21.yml) | + +**注意:** + - 更多人头跟踪模型可以参考[headtracking21](../headtracking21)。 + + ## 多类别适配 多类别ByteTrack,可以参考 [bytetrack_ppyoloe_ppvehicle9cls.yml](./bytetrack_ppyoloe_ppvehicle9cls.yml),表示使用 [PP-Vehicle](../../ppvehicle/) 中的PPVehicle9cls数据集训好的模型权重去做多类别车辆跟踪。由于没有跟踪的ground truth标签无法做评估,故只做跟踪预测,只需修改`TestMOTDataset`确保路径存在,且其中的`anno_path`表示指定在一个`label_list.txt`中记录具体类别,需要自己手写,一行表示一个种类,注意路径`anno_path`如果写错或找不到则将默认使用COCO数据集80类的类别。 diff --git a/configs/mot/headtracking21/README_cn.md b/configs/mot/headtracking21/README_cn.md index 58c2169c1..b7f9274ee 100644 --- a/configs/mot/headtracking21/README_cn.md +++ b/configs/mot/headtracking21/README_cn.md @@ -10,13 +10,13 @@ ## 模型库 -### FairMOT在HT-21 Training Set上结果 +### FairMOT 和 ByteTrack 在 HT-21 Training Set上的结果 | 模型 | 输入尺寸 | MOTA | IDF1 | IDS | FP | FN | FPS | 下载链接 | 配置文件 | | :--------------| :------- | :----: | :----: | :---: | :----: | :---: | :------: | :----: |:----: | | FairMOT DLA-34 | 1088x608 | 64.7 | 69.0 | 8533 | 148817 | 234970 | - | [下载链接](https://paddledet.bj.bcebos.com/models/mot/fairmot_dla34_30e_1088x608_headtracking21.pdparams) | [配置文件](./fairmot_dla34_30e_1088x608_headtracking21.yml) | | ByteTrack-x | 1440x800 | 64.1 | 63.4 | 4191 | 185162 | 210240 | - | [下载链接](https://paddledet.bj.bcebos.com/models/mot/bytetrack_yolox_ht21.pdparams) | [配置文件](../bytetrack/bytetrack_yolox_ht21.yml) | -### FairMOT在HT-21 Test Set上结果 +### FairMOT 和 ByteTrack 在 HT-21 Test Set上的结果 | 骨干网络 | 输入尺寸 | MOTA | IDF1 | IDS | FP | FN | FPS | 下载链接 | 配置文件 | | :--------------| :------- | :----: | :----: | :----: | :----: | :----: |:-------: | :----: | :----: | | FairMOT DLA-34 | 1088x608 | 60.8 | 62.8 | 12781 | 118109 | 198896 | - | [下载链接](https://paddledet.bj.bcebos.com/models/mot/fairmot_dla34_30e_1088x608_headtracking21.pdparams) | [配置文件](./fairmot_dla34_30e_1088x608_headtracking21.yml) | @@ -76,6 +76,7 @@ python deploy/pptracking/python/mot_jde_infer.py --model_dir=output_inference/fa journal={arXiv preprint arXiv:2004.01888}, year={2020} } + @InProceedings{Sundararaman_2021_CVPR, author = {Sundararaman, Ramana and De Almeida Braga, Cedric and Marchand, Eric and Pettre, Julien}, title = {Tracking Pedestrian Heads in Dense Crowd}, @@ -84,4 +85,11 @@ python deploy/pptracking/python/mot_jde_infer.py --model_dir=output_inference/fa year = {2021}, pages = {3865-3875} } + +@article{zhang2021bytetrack, + title={ByteTrack: Multi-Object Tracking by Associating Every Detection Box}, + author={Zhang, Yifu and Sun, Peize and Jiang, Yi and Yu, Dongdong and Yuan, Zehuan and Luo, Ping and Liu, Wenyu and Wang, Xinggang}, + journal={arXiv preprint arXiv:2110.06864}, + year={2021} +} ``` diff --git a/docs/MODEL_ZOO_cn.md b/docs/MODEL_ZOO_cn.md index 44bd25bce..24973af0c 100644 --- a/docs/MODEL_ZOO_cn.md +++ b/docs/MODEL_ZOO_cn.md @@ -251,6 +251,10 @@ Paddle提供基于ImageNet的骨架网络预训练模型。所有预训练模型 请参考[OC-SORT](https://github.com/PaddlePaddle/PaddleDetection/tree/develop/configs/mot/ocsort) +### BoT-SORT + +请参考[BoT-SORT](https://github.com/PaddlePaddle/PaddleDetection/tree/develop/configs/mot/botsort) + ### CenterTrack 请参考[CenterTrack](https://github.com/PaddlePaddle/PaddleDetection/tree/develop/configs/mot/centertrack) diff --git a/docs/MODEL_ZOO_en.md b/docs/MODEL_ZOO_en.md index 057558f57..80638d76c 100644 --- a/docs/MODEL_ZOO_en.md +++ b/docs/MODEL_ZOO_en.md @@ -250,6 +250,10 @@ Please refer to [ByteTrack](https://github.com/PaddlePaddle/PaddleDetection/tree Please refer to [OC-SORT](https://github.com/PaddlePaddle/PaddleDetection/tree/develop/configs/mot/ocsort) +### BoT-SORT + +Please refer to [BoT-SORT](https://github.com/PaddlePaddle/PaddleDetection/tree/develop/configs/mot/botsort) + ### CenterTrack Please refer to [CenterTrack](https://github.com/PaddlePaddle/PaddleDetection/tree/develop/configs/mot/centertrack) -- GitLab