From f2b20cff3f6a41191dcebeed0e9d529fb9061551 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Thu, 26 May 2022 14:24:06 +0800 Subject: [PATCH] add paddle2onnx tipc chain --- test_tipc/README.md | 8 +- ...V3_large_x1_0_paddle2onnx_infer_python.txt | 15 ++ ...u_general_rec_paddle2onnx_infer_python.txt | 15 ++ ..._mainbody_det_paddle2onnx_infer_python.txt | 15 ++ ...PPHGNet_small_paddle2onnx_infer_python.txt | 15 ++ .../PPHGNet_tiny_paddle2onnx_infer_python.txt | 15 ++ ...PPLCNet_x0_25_paddle2onnx_infer_python.txt | 15 ++ ...PPLCNet_x0_35_paddle2onnx_infer_python.txt | 15 ++ .../PPLCNet_x0_5_paddle2onnx_infer_python.txt | 15 ++ ...PPLCNet_x0_75_paddle2onnx_infer_python.txt | 15 ++ .../PPLCNet_x1_0_paddle2onnx_infer_python.txt | 15 ++ .../PPLCNet_x1_5_paddle2onnx_infer_python.txt | 15 ++ .../PPLCNet_x2_0_paddle2onnx_infer_python.txt | 15 ++ .../PPLCNet_x2_5_paddle2onnx_infer_python.txt | 15 ++ .../ResNet50_paddle2onnx_infer_python.txt | 15 ++ .../ResNet50_vd_paddle2onnx_infer_python.txt | 15 ++ ...4_window7_224_paddle2onnx_infer_python.txt | 15 ++ test_tipc/docs/test_paddle2onnx.md | 49 ++++++ test_tipc/prepare.sh | 157 +++++++++++++++++- test_tipc/test_paddle2onnx.sh | 13 +- 20 files changed, 456 insertions(+), 11 deletions(-) create mode 100644 test_tipc/config/MobileNetV3/MobileNetV3_large_x1_0_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/PP-ShiTu/PPShiTu_general_rec_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/PP-ShiTu/PPShiTu_mainbody_det_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/PPHGNet/PPHGNet_small_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/PPHGNet/PPHGNet_tiny_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/PPLCNet/PPLCNet_x0_25_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/PPLCNet/PPLCNet_x0_35_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/PPLCNet/PPLCNet_x0_5_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/PPLCNet/PPLCNet_x0_75_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/PPLCNet/PPLCNet_x1_0_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/PPLCNet/PPLCNet_x1_5_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/PPLCNet/PPLCNet_x2_0_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/PPLCNet/PPLCNet_x2_5_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/ResNet/ResNet50_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/ResNet/ResNet50_vd_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/SwinTransformer/SwinTransformer_tiny_patch4_window7_224_paddle2onnx_infer_python.txt create mode 100644 test_tipc/docs/test_paddle2onnx.md diff --git a/test_tipc/README.md b/test_tipc/README.md index 4869f6e1..0949c402 100644 --- a/test_tipc/README.md +++ b/test_tipc/README.md @@ -35,11 +35,14 @@ │ ├── MobileNetV3 # MobileNetV3系列模型测试配置文件目录 │ │ ├── MobileNetV3_large_x1_0_train_infer_python.txt #基础训练预测配置文件 │ │ ├── MobileNetV3_large_x1_0_train_linux_gpu_fleet_amp_infer_python_linux_gpu_cpu.txt #多机多卡训练预测配置文件 -│ │ └── MobileNetV3_large_x1_0_train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt #混合精度训练预测配置文件 +│ │ ├── MobileNetV3_large_x1_0_train_linux_gpu_fleet_amp_infer_python_linux_gpu_cpu.txt #多机多卡训练预测配置文件 +│ │ └── MobileNetV3_large_x1_0_paddle2onnx_infer_python.txt #paddle2onnx推理测试配置文件 │ └── ResNet # ResNet系列模型测试配置文件目录 │ ├── ResNet50_vd_train_infer_python.txt #基础训练预测配置文件 │ ├── ResNet50_vd_train_linux_gpu_fleet_amp_infer_python_linux_gpu_cpu.txt #多机多卡训练预测配置文件 -│ └── ResNet50_vd_train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt #混合精度训练预测配置文件 +│ ├── ResNet50_vd_train_linux_gpu_fleet_amp_infer_python_linux_gpu_cpu.txt #多机多卡训练预测配置文件 +│ ├── ResNet50_vd_train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt #混合精度训练预测配置文件 +│ └── ResNet50_vd_paddle2onnx_infer_python.txt #paddle2onnx推理测试配置文件 | ...... ├── docs │ ├── guide.png @@ -47,6 +50,7 @@ ├── prepare.sh # 完成test_*.sh运行所需要的数据和模型下载 ├── README.md # 使用文档 ├── results # 预先保存的预测结果,用于和实际预测结果进行精读比对 +├── test_paddle2onnx.sh # 测试paddle2onnx推理预测的主程序 └── test_train_inference_python.sh # 测试python训练预测的主程序 ``` diff --git a/test_tipc/config/MobileNetV3/MobileNetV3_large_x1_0_paddle2onnx_infer_python.txt b/test_tipc/config/MobileNetV3/MobileNetV3_large_x1_0_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..57f98f82 --- /dev/null +++ b/test_tipc/config/MobileNetV3/MobileNetV3_large_x1_0_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:MobileNetV3_large_x1_0 +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/MobileNetV3_large_x1_0_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/MobileNetV3_large_x1_0_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/MobileNetV3_large_x1_0_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PP-ShiTu/PPShiTu_general_rec_paddle2onnx_infer_python.txt b/test_tipc/config/PP-ShiTu/PPShiTu_general_rec_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..1a8d9f7f --- /dev/null +++ b/test_tipc/config/PP-ShiTu/PPShiTu_general_rec_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:PP-ShiTu_general_rec +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/general_PPLCNet_x2_5_lite_v1.0_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/general_PPLCNet_x2_5_lite_v1.0_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/general_PPLCNet_x2_5_lite_v1.0_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PP-ShiTu/PPShiTu_mainbody_det_paddle2onnx_infer_python.txt b/test_tipc/config/PP-ShiTu/PPShiTu_mainbody_det_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..b87cf70f --- /dev/null +++ b/test_tipc/config/PP-ShiTu/PPShiTu_mainbody_det_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:PP-ShiTu_mainbody_det +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPHGNet/PPHGNet_small_paddle2onnx_infer_python.txt b/test_tipc/config/PPHGNet/PPHGNet_small_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..86e2658a --- /dev/null +++ b/test_tipc/config/PPHGNet/PPHGNet_small_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:PPHGNet_small +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/PPHGNet_small_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/PPHGNet_small_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/PPHGNet_small_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPHGNet/PPHGNet_tiny_paddle2onnx_infer_python.txt b/test_tipc/config/PPHGNet/PPHGNet_tiny_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..c566fcb7 --- /dev/null +++ b/test_tipc/config/PPHGNet/PPHGNet_tiny_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:PPHGNet_tiny +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/PPHGNet_tiny_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/PPHGNet_tiny_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/PPHGNet_tiny_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x0_25_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x0_25_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..6a89762e --- /dev/null +++ b/test_tipc/config/PPLCNet/PPLCNet_x0_25_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:PPLCNet_x0_25 +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/PPLCNet_x0_25_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/PPLCNet_x0_25_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/PPLCNet_x0_25_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x0_35_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x0_35_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..f01063a8 --- /dev/null +++ b/test_tipc/config/PPLCNet/PPLCNet_x0_35_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:PPLCNet_x0_35 +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/PPLCNet_x0_35_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/PPLCNet_x0_35_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/PPLCNet_x0_35_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x0_5_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x0_5_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..4306ff12 --- /dev/null +++ b/test_tipc/config/PPLCNet/PPLCNet_x0_5_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:PPLCNet_x0_5 +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/PPLCNet_x0_5_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/PPLCNet_x0_5_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/PPLCNet_x0_5_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x0_75_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x0_75_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..cd6a79b0 --- /dev/null +++ b/test_tipc/config/PPLCNet/PPLCNet_x0_75_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:PPLCNet_x0_75 +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/PPLCNet_x0_75_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/PPLCNet_x0_75_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/PPLCNet_x0_75_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x1_0_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x1_0_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..519c7558 --- /dev/null +++ b/test_tipc/config/PPLCNet/PPLCNet_x1_0_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:PPLCNet_x1_0 +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/PPLCNet_x1_0_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/PPLCNet_x1_0_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/PPLCNet_x1_0_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x1_5_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x1_5_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..93a7f652 --- /dev/null +++ b/test_tipc/config/PPLCNet/PPLCNet_x1_5_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:PPLCNet_x1_5 +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/PPLCNet_x1_5_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/PPLCNet_x1_5_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/PPLCNet_x1_5_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x2_0_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x2_0_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..7b85765b --- /dev/null +++ b/test_tipc/config/PPLCNet/PPLCNet_x2_0_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:SwinTransformer_tiny_patch4_window7_224 +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/SwinTransformer_tiny_patch4_window7_224_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/SwinTransformer_tiny_patch4_window7_224_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/SwinTransformer_tiny_patch4_window7_224_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x2_5_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x2_5_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..55819c0b --- /dev/null +++ b/test_tipc/config/PPLCNet/PPLCNet_x2_5_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:PPLCNet_x2_5 +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/PPLCNet_x2_5_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/PPLCNet_x2_5_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/PPLCNet_x2_5_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/ResNet/ResNet50_paddle2onnx_infer_python.txt b/test_tipc/config/ResNet/ResNet50_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..d0c81d51 --- /dev/null +++ b/test_tipc/config/ResNet/ResNet50_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:ResNet50 +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/ResNet50_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/ResNet50_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/ResNet50_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/ResNet/ResNet50_vd_paddle2onnx_infer_python.txt b/test_tipc/config/ResNet/ResNet50_vd_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..f4df8f09 --- /dev/null +++ b/test_tipc/config/ResNet/ResNet50_vd_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:ResNet50_vd +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/ResNet50_vd_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/ResNet50_vd_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/ResNet50_vd_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/SwinTransformer/SwinTransformer_tiny_patch4_window7_224_paddle2onnx_infer_python.txt b/test_tipc/config/SwinTransformer/SwinTransformer_tiny_patch4_window7_224_paddle2onnx_infer_python.txt new file mode 100644 index 00000000..7b85765b --- /dev/null +++ b/test_tipc/config/SwinTransformer/SwinTransformer_tiny_patch4_window7_224_paddle2onnx_infer_python.txt @@ -0,0 +1,15 @@ +===========================paddle2onnx_params=========================== +model_name:SwinTransformer_tiny_patch4_window7_224 +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/SwinTransformer_tiny_patch4_window7_224_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/SwinTransformer_tiny_patch4_window7_224_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/SwinTransformer_tiny_patch4_window7_224_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/docs/test_paddle2onnx.md b/test_tipc/docs/test_paddle2onnx.md new file mode 100644 index 00000000..ba055434 --- /dev/null +++ b/test_tipc/docs/test_paddle2onnx.md @@ -0,0 +1,49 @@ +# Paddle2onnx预测功能测试 + +PaddleServing预测功能测试的主程序为`test_paddle2onnx.sh`,可以测试Paddle2ONNX的模型转化功能,并验证正确性。 + +## 1. 测试结论汇总 + +基于训练是否使用量化,进行本测试的模型可以分为`正常模型`和`量化模型`,这两类模型对应的Paddle2ONNX预测功能汇总如下: + +| 模型类型 |device | +| ---- | ---- | +| 正常模型 | GPU | +| 正常模型 | CPU | +| 量化模型 | GPU | +| 量化模型 | CPU | + +## 2. 测试流程 +### 2.1 功能测试 +先运行`prepare.sh`准备数据和模型,然后运行`test_paddle2onnx.sh`进行测试,最终在`test_tipc/output`目录下生成`paddle2onnx_infer_*.log`后缀的日志文件 +下方展示以PPHGNet_small为例的测试命令与结果。 + +```shell +bash test_tipc/prepare.sh ./test_tipc/config/PPHGNet/PPHGNet_small_paddle2onnx_infer_python.txt paddle2onnx_infer + +# 用法: +bash test_tipc/test_paddle2onnx.sh ./test_tipc/config/PPHGNet/PPHGNet_small_paddle2onnx_infer_python.txt +``` + +#### 运行结果 + +各测试的运行情况会打印在 `test_tipc/output/results_paddle2onnx.log` 中: +运行成功时会输出: + +``` +Run successfully with command - paddle2onnx --model_dir=./deploy/models/PPHGNet_tiny_infer/ --model_filename=inference.pdmodel --params_filename=inference.pdiparams --save_file=./deploy/models/PPHGNet_tiny_infer/inference.onnx --opset_version=10 --enable_onnx_checker=True! +Run successfully with command - cd deploy && python3.7 ./python/predict_cls.py -o Global.inference_model_dir=./models/PPHGNet_tiny_infer -o Global.use_onnx=True -o Global.use_gpu=False -c=configs/inference_cls.yaml > ../test_tipc/output/paddle2onnx_infer_cpu.log 2>&1 && cd ../! + +``` + +运行失败时会输出: + +``` +Run failed with command - paddle2onnx --model_dir=./deploy/models/PPHGNet_tiny_infer/ --model_filename=inference.pdmodel --params_filename=inference.pdiparams --save_file=./deploy/models/PPHGNet_tiny_infer/inference.onnx --opset_version=10 --enable_onnx_checker=True! +... +``` + + +## 3. 更多教程 + +本文档为功能测试用,更详细的Paddle2onnx预测使用教程请参考:[Paddle2ONNX](https://github.com/PaddlePaddle/Paddle2ONNX) diff --git a/test_tipc/prepare.sh b/test_tipc/prepare.sh index 70040dc8..cf64c8c6 100644 --- a/test_tipc/prepare.sh +++ b/test_tipc/prepare.sh @@ -176,11 +176,160 @@ if [ ${MODE} = "paddle2onnx_infer" ];then python_name=$(func_parser_value "${lines[2]}") ${python_name} -m pip install install paddle2onnx ${python_name} -m pip install onnxruntime + if [ ${model_name} == "ResNet50" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_infer.tar + tar xf ResNet50_infer.tar + cd ../../ + fi + if [ ${model_name} == "ResNet50_vd" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_vd_infer.tar + tar xf ResNet50_vd_infer.tar + cd ../../ + fi + if [ ${model_name} == "MobileNetV3_large_x1_0" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/MobileNetV3_large_x1_0_infer.tar + tar xf MobileNetV3_large_x1_0_infer.tar + cd ../../ + fi + if [ ${model_name} == "SwinTransformer_tiny_patch4_window7_224" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/SwinTransformer_tiny_patch4_window7_224_infer.tar + tar xf SwinTransformer_tiny_patch4_window7_224_infer.tar + cd ../../ + fi + if [ ${model_name} == "PPLCNet_x0_25" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_25_infer.tar + tar xf PPLCNet_x0_25_infer.tar + cd ../../ + fi + if [ ${model_name} == "PPLCNet_x0_35" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_35_infer.tar + tar xf PPLCNet_x0_35_infer.tar + cd ../../ + fi + if [ ${model_name} == "PPLCNet_x0_5" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_5_infer.tar + tar xf PPLCNet_x0_5_infer.tar + cd ../../ + fi + if [ ${model_name} == "PPLCNet_x0_75" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_75_infer.tar + tar xf PPLCNet_x0_75_infer.tar + cd ../../ + fi + if [ ${model_name} == "PPLCNet_x1_0" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x1_0_infer.tar + tar xf PPLCNet_x1_0_infer.tar + cd ../../ + fi + if [ ${model_name} == "PPLCNet_x1_5" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x1_5_infer.tar + tar xf PPLCNet_x1_5_infer.tar + cd ../../ + fi + if [ ${model_name} == "PPLCNet_x2_0" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x2_0_infer.tar + tar xf PPLCNet_x2_0_infer.tar + cd ../../ + fi + if [ ${model_name} == "PPLCNet_x2_5" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x2_5_infer.tar + tar xf PPLCNet_x2_5_infer.tar + cd ../../ + fi + if [ ${model_name} == "PP-ShiTu_general_rec" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/general_PPLCNet_x2_5_lite_v1.0_infer.tar + tar xf general_PPLCNet_x2_5_lite_v1.0_infer.tar + cd ../../ + fi + if [ ${model_name} == "PP-ShiTu_mainbody_det" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer.tar + tar xf picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer.tar + cd ../../ + fi + if [ ${model_name} == "PPLCNetV2_base" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNetV2_base_infer.tar + tar xf PPLCNetV2_base_infer.tar + cd ../../ + fi + if [ ${model_name} == "PPHGNet_tiny" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPHGNet_tiny_infer.tar + tar xf PPHGNet_tiny_infer.tar + cd ../../ + fi + if [ ${model_name} == "PPHGNet_small" ]; then + # wget model + cd deploy + mkdir models + cd models + wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPHGNet_small_infer.tar + tar xf PPHGNet_small_infer.tar + cd ../../ + fi - # wget model - cd deploy && mkdir models && cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_vd_infer.tar && tar xf ResNet50_vd_infer.tar - cd ../../ fi if [ ${MODE} = "benchmark_train" ];then diff --git a/test_tipc/test_paddle2onnx.sh b/test_tipc/test_paddle2onnx.sh index 850fc904..cc31e3b1 100644 --- a/test_tipc/test_paddle2onnx.sh +++ b/test_tipc/test_paddle2onnx.sh @@ -1,5 +1,5 @@ #!/bin/bash -source test_tipc/common_func.sh +source test_tipc/common_func.sh FILENAME=$1 @@ -11,7 +11,7 @@ python=$(func_parser_value "${lines[2]}") # parser params -dataline=$(awk 'NR==1, NR==14{print}' $FILENAME) +dataline=$(awk 'NR==1, NR==15{print}' $FILENAME) IFS=$'\n' lines=(${dataline}) @@ -31,7 +31,7 @@ opset_version_key=$(func_parser_key "${lines[8]}") opset_version_value=$(func_parser_value "${lines[8]}") enable_onnx_checker_key=$(func_parser_key "${lines[9]}") enable_onnx_checker_value=$(func_parser_value "${lines[9]}") -# parser onnx inference +# parser onnx inference inference_py=$(func_parser_value "${lines[10]}") use_onnx_key=$(func_parser_key "${lines[11]}") use_onnx_value=$(func_parser_value "${lines[11]}") @@ -39,6 +39,8 @@ inference_model_dir_key=$(func_parser_key "${lines[12]}") inference_model_dir_value=$(func_parser_value "${lines[12]}") inference_hardware_key=$(func_parser_key "${lines[13]}") inference_hardware_value=$(func_parser_value "${lines[13]}") +inference_config_key=$(func_parser_key "${lines[14]}") +inference_config_value=$(func_parser_value "${lines[14]}") LOG_PATH="./test_tipc/output" mkdir -p ./test_tipc/output @@ -65,7 +67,8 @@ function func_paddle2onnx(){ set_model_dir=$(func_set_params "${inference_model_dir_key}" "${inference_model_dir_value}") set_use_onnx=$(func_set_params "${use_onnx_key}" "${use_onnx_value}") set_hardware=$(func_set_params "${inference_hardware_key}" "${inference_hardware_value}") - infer_model_cmd="cd deploy && ${python} ${inference_py} -o ${set_model_dir} -o ${set_use_onnx} -o ${set_hardware} >${_save_log_path} 2>&1 && cd ../" + set_inference_config=$(func_set_params "${inference_config_key}" "${inference_config_value}") + infer_model_cmd="cd deploy && ${python} ${inference_py} -o ${set_model_dir} -o ${set_use_onnx} -o ${set_hardware} ${set_inference_config} > ${_save_log_path} 2>&1 && cd ../" eval $infer_model_cmd status_check $last_status "${infer_model_cmd}" "${status_log}" } @@ -75,4 +78,4 @@ echo "################### run test ###################" export Count=0 IFS="|" -func_paddle2onnx \ No newline at end of file +func_paddle2onnx \ No newline at end of file -- GitLab