From 17401161833ff88e87df2e0b88b5a752e6e6b951 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Wed, 1 Jun 2022 00:21:03 +0800 Subject: [PATCH] move model url to config --- ...l_normal_paddle2onnx_python_linux_cpu.txt} | 1 + ...l_normal_paddle2onnx_python_linux_cpu.txt} | 1 + ...l_normal_paddle2onnx_python_linux_cpu.txt} | 1 + ...l_normal_paddle2onnx_python_linux_cpu.txt} | 0 ...l_normal_paddle2onnx_python_linux_cpu.txt} | 0 ...l_normal_paddle2onnx_python_linux_cpu.txt} | 0 ...al_normal_paddle2onnx_python_linux_cpu.txt | 16 ++ ...PPLCNet_x0_35_paddle2onnx_infer_python.txt | 15 -- ...al_normal_paddle2onnx_python_linux_cpu.txt | 16 ++ .../PPLCNet_x0_5_paddle2onnx_infer_python.txt | 15 -- ...l_normal_paddle2onnx_python_linux_cpu.txt} | 1 + ...l_normal_paddle2onnx_python_linux_cpu.txt} | 1 + ...l_normal_paddle2onnx_python_linux_cpu.txt} | 1 + ...al_normal_paddle2onnx_python_linux_cpu.txt | 16 ++ ...l_normal_paddle2onnx_python_linux_cpu.txt} | 1 + ...al_normal_paddle2onnx_python_linux_cpu.txt | 16 ++ ...l_normal_paddle2onnx_python_linux_cpu.txt} | 1 + ...al_normal_paddle2onnx_python_linux_cpu.txt | 2 + .../ResNet50_vd_paddle2onnx_infer_python.txt | 15 -- ...l_normal_paddle2onnx_python_linux_cpu.txt} | 1 + ...4_window7_224_paddle2onnx_infer_python.txt | 15 -- test_tipc/docs/test_paddle2onnx.md | 21 ++- test_tipc/prepare.sh | 163 ++---------------- test_tipc/test_paddle2onnx.sh | 22 +-- 24 files changed, 108 insertions(+), 233 deletions(-) rename test_tipc/config/MobileNetV3/{MobileNetV3_large_x1_0_paddle2onnx_infer_python.txt => MobileNetV3_large_x1_0_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt} (78%) rename test_tipc/config/PP-ShiTu/{PPShiTu_general_rec_paddle2onnx_infer_python.txt => PPShiTu_general_rec_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt} (76%) rename test_tipc/config/PP-ShiTu/{PPShiTu_mainbody_det_paddle2onnx_infer_python.txt => PPShiTu_mainbody_det_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt} (76%) rename test_tipc/config/PPHGNet/{PPHGNet_small_paddle2onnx_infer_python.txt => PPHGNet_small_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt} (100%) rename test_tipc/config/PPHGNet/{PPHGNet_tiny_paddle2onnx_infer_python.txt => PPHGNet_tiny_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt} (100%) rename test_tipc/config/PPLCNet/{PPLCNet_x0_25_paddle2onnx_infer_python.txt => PPLCNet_x0_25_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt} (100%) create mode 100644 test_tipc/config/PPLCNet/PPLCNet_x0_35_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt delete mode 100644 test_tipc/config/PPLCNet/PPLCNet_x0_35_paddle2onnx_infer_python.txt create mode 100644 test_tipc/config/PPLCNet/PPLCNet_x0_5_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt delete mode 100644 test_tipc/config/PPLCNet/PPLCNet_x0_5_paddle2onnx_infer_python.txt rename test_tipc/config/PPLCNet/{PPLCNet_x0_75_paddle2onnx_infer_python.txt => PPLCNet_x0_75_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt} (77%) rename test_tipc/config/PPLCNet/{PPLCNet_x1_0_paddle2onnx_infer_python.txt => PPLCNet_x1_0_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt} (77%) rename test_tipc/config/PPLCNet/{PPLCNet_x1_5_paddle2onnx_infer_python.txt => PPLCNet_x1_5_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt} (77%) create mode 100644 test_tipc/config/PPLCNet/PPLCNet_x2_0_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt rename test_tipc/config/PPLCNet/{PPLCNet_x2_5_paddle2onnx_infer_python.txt => PPLCNet_x2_5_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt} (77%) create mode 100644 test_tipc/config/PPLCNetV2/PPLCNetV2_base_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt rename test_tipc/config/ResNet/{ResNet50_paddle2onnx_infer_python.txt => ResNet50_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt} (77%) delete mode 100644 test_tipc/config/ResNet/ResNet50_vd_paddle2onnx_infer_python.txt rename test_tipc/config/{PPLCNet/PPLCNet_x2_0_paddle2onnx_infer_python.txt => SwinTransformer/SwinTransformer_tiny_patch4_window7_224_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt} (78%) delete mode 100644 test_tipc/config/SwinTransformer/SwinTransformer_tiny_patch4_window7_224_paddle2onnx_infer_python.txt diff --git a/test_tipc/config/MobileNetV3/MobileNetV3_large_x1_0_paddle2onnx_infer_python.txt b/test_tipc/config/MobileNetV3/MobileNetV3_large_x1_0_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt similarity index 78% rename from test_tipc/config/MobileNetV3/MobileNetV3_large_x1_0_paddle2onnx_infer_python.txt rename to test_tipc/config/MobileNetV3/MobileNetV3_large_x1_0_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt index 57f98f82..0212d0c4 100644 --- a/test_tipc/config/MobileNetV3/MobileNetV3_large_x1_0_paddle2onnx_infer_python.txt +++ b/test_tipc/config/MobileNetV3/MobileNetV3_large_x1_0_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -8,6 +8,7 @@ python:python3.7 --save_file:./deploy/models/MobileNetV3_large_x1_0_infer/inference.onnx --opset_version:10 --enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/MobileNetV3_large_x1_0_infer.tar inference:./python/predict_cls.py Global.use_onnx:True Global.inference_model_dir:./models/MobileNetV3_large_x1_0_infer diff --git a/test_tipc/config/PP-ShiTu/PPShiTu_general_rec_paddle2onnx_infer_python.txt b/test_tipc/config/PP-ShiTu/PPShiTu_general_rec_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt similarity index 76% rename from test_tipc/config/PP-ShiTu/PPShiTu_general_rec_paddle2onnx_infer_python.txt rename to test_tipc/config/PP-ShiTu/PPShiTu_general_rec_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt index 1a8d9f7f..56dcff84 100644 --- a/test_tipc/config/PP-ShiTu/PPShiTu_general_rec_paddle2onnx_infer_python.txt +++ b/test_tipc/config/PP-ShiTu/PPShiTu_general_rec_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -8,6 +8,7 @@ python:python3.7 --save_file:./deploy/models/general_PPLCNet_x2_5_lite_v1.0_infer/inference.onnx --opset_version:10 --enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/general_PPLCNet_x2_5_lite_v1.0_infer.tar inference:./python/predict_cls.py Global.use_onnx:True Global.inference_model_dir:./models/general_PPLCNet_x2_5_lite_v1.0_infer diff --git a/test_tipc/config/PP-ShiTu/PPShiTu_mainbody_det_paddle2onnx_infer_python.txt b/test_tipc/config/PP-ShiTu/PPShiTu_mainbody_det_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt similarity index 76% rename from test_tipc/config/PP-ShiTu/PPShiTu_mainbody_det_paddle2onnx_infer_python.txt rename to test_tipc/config/PP-ShiTu/PPShiTu_mainbody_det_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt index b87cf70f..b6606beb 100644 --- a/test_tipc/config/PP-ShiTu/PPShiTu_mainbody_det_paddle2onnx_infer_python.txt +++ b/test_tipc/config/PP-ShiTu/PPShiTu_mainbody_det_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -8,6 +8,7 @@ python:python3.7 --save_file:./deploy/models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer/inference.onnx --opset_version:10 --enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer.tar inference:./python/predict_cls.py Global.use_onnx:True Global.inference_model_dir:./models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer diff --git a/test_tipc/config/PPHGNet/PPHGNet_small_paddle2onnx_infer_python.txt b/test_tipc/config/PPHGNet/PPHGNet_small_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt similarity index 100% rename from test_tipc/config/PPHGNet/PPHGNet_small_paddle2onnx_infer_python.txt rename to test_tipc/config/PPHGNet/PPHGNet_small_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt diff --git a/test_tipc/config/PPHGNet/PPHGNet_tiny_paddle2onnx_infer_python.txt b/test_tipc/config/PPHGNet/PPHGNet_tiny_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt similarity index 100% rename from test_tipc/config/PPHGNet/PPHGNet_tiny_paddle2onnx_infer_python.txt rename to test_tipc/config/PPHGNet/PPHGNet_tiny_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt diff --git a/test_tipc/config/PPLCNet/PPLCNet_x0_25_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x0_25_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt similarity index 100% rename from test_tipc/config/PPLCNet/PPLCNet_x0_25_paddle2onnx_infer_python.txt rename to test_tipc/config/PPLCNet/PPLCNet_x0_25_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt diff --git a/test_tipc/config/PPLCNet/PPLCNet_x0_35_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/config/PPLCNet/PPLCNet_x0_35_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt new file mode 100644 index 00000000..62dcb630 --- /dev/null +++ b/test_tipc/config/PPLCNet/PPLCNet_x0_35_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -0,0 +1,16 @@ +===========================paddle2onnx_params=========================== +model_name:PPLCNet_x0_25 +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/PPLCNet_x0_25_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/PPLCNet_x0_25_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_25_infer.tar +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/PPLCNet_x0_25_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x0_35_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x0_35_paddle2onnx_infer_python.txt deleted file mode 100644 index f01063a8..00000000 --- a/test_tipc/config/PPLCNet/PPLCNet_x0_35_paddle2onnx_infer_python.txt +++ /dev/null @@ -1,15 +0,0 @@ -===========================paddle2onnx_params=========================== -model_name:PPLCNet_x0_35 -python:python3.7 -2onnx: paddle2onnx ---model_dir:./deploy/models/PPLCNet_x0_35_infer/ ---model_filename:inference.pdmodel ---params_filename:inference.pdiparams ---save_file:./deploy/models/PPLCNet_x0_35_infer/inference.onnx ---opset_version:10 ---enable_onnx_checker:True -inference:./python/predict_cls.py -Global.use_onnx:True -Global.inference_model_dir:./models/PPLCNet_x0_35_infer -Global.use_gpu:False --c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x0_5_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/config/PPLCNet/PPLCNet_x0_5_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt new file mode 100644 index 00000000..b6606beb --- /dev/null +++ b/test_tipc/config/PPLCNet/PPLCNet_x0_5_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -0,0 +1,16 @@ +===========================paddle2onnx_params=========================== +model_name:PP-ShiTu_mainbody_det +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer.tar +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x0_5_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x0_5_paddle2onnx_infer_python.txt deleted file mode 100644 index 4306ff12..00000000 --- a/test_tipc/config/PPLCNet/PPLCNet_x0_5_paddle2onnx_infer_python.txt +++ /dev/null @@ -1,15 +0,0 @@ -===========================paddle2onnx_params=========================== -model_name:PPLCNet_x0_5 -python:python3.7 -2onnx: paddle2onnx ---model_dir:./deploy/models/PPLCNet_x0_5_infer/ ---model_filename:inference.pdmodel ---params_filename:inference.pdiparams ---save_file:./deploy/models/PPLCNet_x0_5_infer/inference.onnx ---opset_version:10 ---enable_onnx_checker:True -inference:./python/predict_cls.py -Global.use_onnx:True -Global.inference_model_dir:./models/PPLCNet_x0_5_infer -Global.use_gpu:False --c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x0_75_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x0_75_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt similarity index 77% rename from test_tipc/config/PPLCNet/PPLCNet_x0_75_paddle2onnx_infer_python.txt rename to test_tipc/config/PPLCNet/PPLCNet_x0_75_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt index cd6a79b0..12464850 100644 --- a/test_tipc/config/PPLCNet/PPLCNet_x0_75_paddle2onnx_infer_python.txt +++ b/test_tipc/config/PPLCNet/PPLCNet_x0_75_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -8,6 +8,7 @@ python:python3.7 --save_file:./deploy/models/PPLCNet_x0_75_infer/inference.onnx --opset_version:10 --enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_75_infer.tar inference:./python/predict_cls.py Global.use_onnx:True Global.inference_model_dir:./models/PPLCNet_x0_75_infer diff --git a/test_tipc/config/PPLCNet/PPLCNet_x1_0_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x1_0_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt similarity index 77% rename from test_tipc/config/PPLCNet/PPLCNet_x1_0_paddle2onnx_infer_python.txt rename to test_tipc/config/PPLCNet/PPLCNet_x1_0_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt index 519c7558..d994671b 100644 --- a/test_tipc/config/PPLCNet/PPLCNet_x1_0_paddle2onnx_infer_python.txt +++ b/test_tipc/config/PPLCNet/PPLCNet_x1_0_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -8,6 +8,7 @@ python:python3.7 --save_file:./deploy/models/PPLCNet_x1_0_infer/inference.onnx --opset_version:10 --enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x1_0_infer.tar inference:./python/predict_cls.py Global.use_onnx:True Global.inference_model_dir:./models/PPLCNet_x1_0_infer diff --git a/test_tipc/config/PPLCNet/PPLCNet_x1_5_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x1_5_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt similarity index 77% rename from test_tipc/config/PPLCNet/PPLCNet_x1_5_paddle2onnx_infer_python.txt rename to test_tipc/config/PPLCNet/PPLCNet_x1_5_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt index 93a7f652..8f8646f8 100644 --- a/test_tipc/config/PPLCNet/PPLCNet_x1_5_paddle2onnx_infer_python.txt +++ b/test_tipc/config/PPLCNet/PPLCNet_x1_5_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -8,6 +8,7 @@ python:python3.7 --save_file:./deploy/models/PPLCNet_x1_5_infer/inference.onnx --opset_version:10 --enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x1_5_infer.tar inference:./python/predict_cls.py Global.use_onnx:True Global.inference_model_dir:./models/PPLCNet_x1_5_infer diff --git a/test_tipc/config/PPLCNet/PPLCNet_x2_0_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/config/PPLCNet/PPLCNet_x2_0_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt new file mode 100644 index 00000000..19336133 --- /dev/null +++ b/test_tipc/config/PPLCNet/PPLCNet_x2_0_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -0,0 +1,16 @@ +===========================paddle2onnx_params=========================== +model_name:PPLCNet_x2_0 +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/PPLCNet_x2_0_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/PPLCNet_x2_0_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x2_0_infer.tar +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/PPLCNet_x2_0_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x2_5_paddle2onnx_infer_python.txt b/test_tipc/config/PPLCNet/PPLCNet_x2_5_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt similarity index 77% rename from test_tipc/config/PPLCNet/PPLCNet_x2_5_paddle2onnx_infer_python.txt rename to test_tipc/config/PPLCNet/PPLCNet_x2_5_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt index 55819c0b..1e08ad41 100644 --- a/test_tipc/config/PPLCNet/PPLCNet_x2_5_paddle2onnx_infer_python.txt +++ b/test_tipc/config/PPLCNet/PPLCNet_x2_5_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -8,6 +8,7 @@ python:python3.7 --save_file:./deploy/models/PPLCNet_x2_5_infer/inference.onnx --opset_version:10 --enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x2_5_infer.tar inference:./python/predict_cls.py Global.use_onnx:True Global.inference_model_dir:./models/PPLCNet_x2_5_infer diff --git a/test_tipc/config/PPLCNetV2/PPLCNetV2_base_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/config/PPLCNetV2/PPLCNetV2_base_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt new file mode 100644 index 00000000..b5047248 --- /dev/null +++ b/test_tipc/config/PPLCNetV2/PPLCNetV2_base_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -0,0 +1,16 @@ +===========================paddle2onnx_params=========================== +model_name:PPLCNetV2_base +python:python3.7 +2onnx: paddle2onnx +--model_dir:./deploy/models/PPLCNetV2_base_infer/ +--model_filename:inference.pdmodel +--params_filename:inference.pdiparams +--save_file:./deploy/models/PPLCNetV2_base_infer/inference.onnx +--opset_version:10 +--enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNetV2_base_infer.tar +inference:./python/predict_cls.py +Global.use_onnx:True +Global.inference_model_dir:./models/PPLCNetV2_base_infer +Global.use_gpu:False +-c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/ResNet/ResNet50_paddle2onnx_infer_python.txt b/test_tipc/config/ResNet/ResNet50_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt similarity index 77% rename from test_tipc/config/ResNet/ResNet50_paddle2onnx_infer_python.txt rename to test_tipc/config/ResNet/ResNet50_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt index d0c81d51..784f1eb2 100644 --- a/test_tipc/config/ResNet/ResNet50_paddle2onnx_infer_python.txt +++ b/test_tipc/config/ResNet/ResNet50_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -8,6 +8,7 @@ python:python3.7 --save_file:./deploy/models/ResNet50_infer/inference.onnx --opset_version:10 --enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_infer.tar inference:./python/predict_cls.py Global.use_onnx:True Global.inference_model_dir:./models/ResNet50_infer diff --git a/test_tipc/config/ResNet/ResNet50_vd_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/config/ResNet/ResNet50_vd_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt index 163bb484..64ca6449 100644 --- a/test_tipc/config/ResNet/ResNet50_vd_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt +++ b/test_tipc/config/ResNet/ResNet50_vd_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -8,7 +8,9 @@ python:python3.7 --save_file:./deploy/models/ResNet50_vd_infer/inference.onnx --opset_version:10 --enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_vd_infer.tar inference: python/predict_cls.py -c configs/inference_cls.yaml Global.use_onnx:True Global.inference_model_dir:models/ResNet50_vd_infer/ Global.use_gpu:False +-c:configs/inference_cls.yaml diff --git a/test_tipc/config/ResNet/ResNet50_vd_paddle2onnx_infer_python.txt b/test_tipc/config/ResNet/ResNet50_vd_paddle2onnx_infer_python.txt deleted file mode 100644 index f4df8f09..00000000 --- a/test_tipc/config/ResNet/ResNet50_vd_paddle2onnx_infer_python.txt +++ /dev/null @@ -1,15 +0,0 @@ -===========================paddle2onnx_params=========================== -model_name:ResNet50_vd -python:python3.7 -2onnx: paddle2onnx ---model_dir:./deploy/models/ResNet50_vd_infer/ ---model_filename:inference.pdmodel ---params_filename:inference.pdiparams ---save_file:./deploy/models/ResNet50_vd_infer/inference.onnx ---opset_version:10 ---enable_onnx_checker:True -inference:./python/predict_cls.py -Global.use_onnx:True -Global.inference_model_dir:./models/ResNet50_vd_infer -Global.use_gpu:False --c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/config/PPLCNet/PPLCNet_x2_0_paddle2onnx_infer_python.txt b/test_tipc/config/SwinTransformer/SwinTransformer_tiny_patch4_window7_224_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt similarity index 78% rename from test_tipc/config/PPLCNet/PPLCNet_x2_0_paddle2onnx_infer_python.txt rename to test_tipc/config/SwinTransformer/SwinTransformer_tiny_patch4_window7_224_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt index 7b85765b..82ea03ce 100644 --- a/test_tipc/config/PPLCNet/PPLCNet_x2_0_paddle2onnx_infer_python.txt +++ b/test_tipc/config/SwinTransformer/SwinTransformer_tiny_patch4_window7_224_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt @@ -8,6 +8,7 @@ python:python3.7 --save_file:./deploy/models/SwinTransformer_tiny_patch4_window7_224_infer/inference.onnx --opset_version:10 --enable_onnx_checker:True +inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/SwinTransformer_tiny_patch4_window7_224_infer.tar inference:./python/predict_cls.py Global.use_onnx:True Global.inference_model_dir:./models/SwinTransformer_tiny_patch4_window7_224_infer diff --git a/test_tipc/config/SwinTransformer/SwinTransformer_tiny_patch4_window7_224_paddle2onnx_infer_python.txt b/test_tipc/config/SwinTransformer/SwinTransformer_tiny_patch4_window7_224_paddle2onnx_infer_python.txt deleted file mode 100644 index 7b85765b..00000000 --- a/test_tipc/config/SwinTransformer/SwinTransformer_tiny_patch4_window7_224_paddle2onnx_infer_python.txt +++ /dev/null @@ -1,15 +0,0 @@ -===========================paddle2onnx_params=========================== -model_name:SwinTransformer_tiny_patch4_window7_224 -python:python3.7 -2onnx: paddle2onnx ---model_dir:./deploy/models/SwinTransformer_tiny_patch4_window7_224_infer/ ---model_filename:inference.pdmodel ---params_filename:inference.pdiparams ---save_file:./deploy/models/SwinTransformer_tiny_patch4_window7_224_infer/inference.onnx ---opset_version:10 ---enable_onnx_checker:True -inference:./python/predict_cls.py -Global.use_onnx:True -Global.inference_model_dir:./models/SwinTransformer_tiny_patch4_window7_224_infer -Global.use_gpu:False --c:configs/inference_cls.yaml \ No newline at end of file diff --git a/test_tipc/docs/test_paddle2onnx.md b/test_tipc/docs/test_paddle2onnx.md index ba055434..f3c292cf 100644 --- a/test_tipc/docs/test_paddle2onnx.md +++ b/test_tipc/docs/test_paddle2onnx.md @@ -10,36 +10,39 @@ PaddleServing预测功能测试的主程序为`test_paddle2onnx.sh`,可以测 | ---- | ---- | | 正常模型 | GPU | | 正常模型 | CPU | -| 量化模型 | GPU | -| 量化模型 | CPU | + ## 2. 测试流程 + +以下内容以`ResNet50`模型的paddle2onnx测试为例 + ### 2.1 功能测试 -先运行`prepare.sh`准备数据和模型,然后运行`test_paddle2onnx.sh`进行测试,最终在`test_tipc/output`目录下生成`paddle2onnx_infer_*.log`后缀的日志文件 +先运行`prepare.sh`准备数据和模型,然后运行`test_paddle2onnx.sh`进行测试,最终在`test_tipc/output/ResNet50`目录下生成`paddle2onnx_infer_*.log`后缀的日志文件 下方展示以PPHGNet_small为例的测试命令与结果。 ```shell -bash test_tipc/prepare.sh ./test_tipc/config/PPHGNet/PPHGNet_small_paddle2onnx_infer_python.txt paddle2onnx_infer +bash test_tipc/prepare.sh ./test_tipc/config/ResNet/ResNet50_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt paddle2onnx_infer # 用法: -bash test_tipc/test_paddle2onnx.sh ./test_tipc/config/PPHGNet/PPHGNet_small_paddle2onnx_infer_python.txt +bash test_tipc/test_paddle2onnx.sh ./test_tipc/config/ResNet/ResNet50_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt ``` #### 运行结果 -各测试的运行情况会打印在 `test_tipc/output/results_paddle2onnx.log` 中: +各测试的运行情况会打印在 `./test_tipc/output/ResNet50/results_paddle2onnx.log` 中: 运行成功时会输出: ``` -Run successfully with command - paddle2onnx --model_dir=./deploy/models/PPHGNet_tiny_infer/ --model_filename=inference.pdmodel --params_filename=inference.pdiparams --save_file=./deploy/models/PPHGNet_tiny_infer/inference.onnx --opset_version=10 --enable_onnx_checker=True! -Run successfully with command - cd deploy && python3.7 ./python/predict_cls.py -o Global.inference_model_dir=./models/PPHGNet_tiny_infer -o Global.use_onnx=True -o Global.use_gpu=False -c=configs/inference_cls.yaml > ../test_tipc/output/paddle2onnx_infer_cpu.log 2>&1 && cd ../! +Run successfully with command - paddle2onnx --model_dir=./deploy/models/ResNet50_infer/ --model_filename=inference.pdmodel --params_filename=inference.pdiparams --save_file=./deploy/models/ResNet50_infer/inference.onnx --opset_version=10 --enable_onnx_checker=True! +Run successfully with command - cd deploy && python3.7 ./python/predict_cls.py -o Global.inference_model_dir=./models/ResNet50_infer -o Global.use_onnx=True -o Global.use_gpu=False -c=configs/inference_cls.yaml > ../test_tipc/output/ResNet50/paddle2onnx_infer_cpu.log 2>&1 && cd ../! ``` 运行失败时会输出: ``` -Run failed with command - paddle2onnx --model_dir=./deploy/models/PPHGNet_tiny_infer/ --model_filename=inference.pdmodel --params_filename=inference.pdiparams --save_file=./deploy/models/PPHGNet_tiny_infer/inference.onnx --opset_version=10 --enable_onnx_checker=True! +Run failed with command - paddle2onnx --model_dir=./deploy/models/ResNet50_infer/ --model_filename=inference.pdmodel --params_filename=inference.pdiparams --save_file=./deploy/models/ResNet50_infer/inference.onnx --opset_version=10 --enable_onnx_checker=True! +Run failed with command - cd deploy && python3.7 ./python/predict_cls.py -o Global.inference_model_dir=./models/ResNet50_infer -o Global.use_onnx=True -o Global.use_gpu=False -c=configs/inference_cls.yaml > ../test_tipc/output/ResNet50/paddle2onnx_infer_cpu.log 2>&1 && cd ../! ... ``` diff --git a/test_tipc/prepare.sh b/test_tipc/prepare.sh index cf64c8c6..27af4e17 100644 --- a/test_tipc/prepare.sh +++ b/test_tipc/prepare.sh @@ -174,161 +174,18 @@ fi if [ ${MODE} = "paddle2onnx_infer" ];then # prepare paddle2onnx env python_name=$(func_parser_value "${lines[2]}") + inference_model_url=$(func_parser_value "${lines[10]}") + tar_name=${inference_model_url##*/} + ${python_name} -m pip install install paddle2onnx ${python_name} -m pip install onnxruntime - if [ ${model_name} == "ResNet50" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_infer.tar - tar xf ResNet50_infer.tar - cd ../../ - fi - if [ ${model_name} == "ResNet50_vd" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_vd_infer.tar - tar xf ResNet50_vd_infer.tar - cd ../../ - fi - if [ ${model_name} == "MobileNetV3_large_x1_0" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/MobileNetV3_large_x1_0_infer.tar - tar xf MobileNetV3_large_x1_0_infer.tar - cd ../../ - fi - if [ ${model_name} == "SwinTransformer_tiny_patch4_window7_224" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/SwinTransformer_tiny_patch4_window7_224_infer.tar - tar xf SwinTransformer_tiny_patch4_window7_224_infer.tar - cd ../../ - fi - if [ ${model_name} == "PPLCNet_x0_25" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_25_infer.tar - tar xf PPLCNet_x0_25_infer.tar - cd ../../ - fi - if [ ${model_name} == "PPLCNet_x0_35" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_35_infer.tar - tar xf PPLCNet_x0_35_infer.tar - cd ../../ - fi - if [ ${model_name} == "PPLCNet_x0_5" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_5_infer.tar - tar xf PPLCNet_x0_5_infer.tar - cd ../../ - fi - if [ ${model_name} == "PPLCNet_x0_75" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_75_infer.tar - tar xf PPLCNet_x0_75_infer.tar - cd ../../ - fi - if [ ${model_name} == "PPLCNet_x1_0" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x1_0_infer.tar - tar xf PPLCNet_x1_0_infer.tar - cd ../../ - fi - if [ ${model_name} == "PPLCNet_x1_5" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x1_5_infer.tar - tar xf PPLCNet_x1_5_infer.tar - cd ../../ - fi - if [ ${model_name} == "PPLCNet_x2_0" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x2_0_infer.tar - tar xf PPLCNet_x2_0_infer.tar - cd ../../ - fi - if [ ${model_name} == "PPLCNet_x2_5" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x2_5_infer.tar - tar xf PPLCNet_x2_5_infer.tar - cd ../../ - fi - if [ ${model_name} == "PP-ShiTu_general_rec" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/general_PPLCNet_x2_5_lite_v1.0_infer.tar - tar xf general_PPLCNet_x2_5_lite_v1.0_infer.tar - cd ../../ - fi - if [ ${model_name} == "PP-ShiTu_mainbody_det" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer.tar - tar xf picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer.tar - cd ../../ - fi - if [ ${model_name} == "PPLCNetV2_base" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNetV2_base_infer.tar - tar xf PPLCNetV2_base_infer.tar - cd ../../ - fi - if [ ${model_name} == "PPHGNet_tiny" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPHGNet_tiny_infer.tar - tar xf PPHGNet_tiny_infer.tar - cd ../../ - fi - if [ ${model_name} == "PPHGNet_small" ]; then - # wget model - cd deploy - mkdir models - cd models - wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPHGNet_small_infer.tar - tar xf PPHGNet_small_infer.tar - cd ../../ - fi + cd deploy + mkdir models + cd models + wget -nc ${inference_model_url} + tar xf ${tar_name} + cd ../../ + fi diff --git a/test_tipc/test_paddle2onnx.sh b/test_tipc/test_paddle2onnx.sh index cc31e3b1..fd2f1d90 100644 --- a/test_tipc/test_paddle2onnx.sh +++ b/test_tipc/test_paddle2onnx.sh @@ -11,7 +11,7 @@ python=$(func_parser_value "${lines[2]}") # parser params -dataline=$(awk 'NR==1, NR==15{print}' $FILENAME) +dataline=$(awk 'NR==1, NR==16{print}' $FILENAME) IFS=$'\n' lines=(${dataline}) @@ -32,17 +32,17 @@ opset_version_value=$(func_parser_value "${lines[8]}") enable_onnx_checker_key=$(func_parser_key "${lines[9]}") enable_onnx_checker_value=$(func_parser_value "${lines[9]}") # parser onnx inference -inference_py=$(func_parser_value "${lines[10]}") -use_onnx_key=$(func_parser_key "${lines[11]}") -use_onnx_value=$(func_parser_value "${lines[11]}") -inference_model_dir_key=$(func_parser_key "${lines[12]}") -inference_model_dir_value=$(func_parser_value "${lines[12]}") -inference_hardware_key=$(func_parser_key "${lines[13]}") -inference_hardware_value=$(func_parser_value "${lines[13]}") -inference_config_key=$(func_parser_key "${lines[14]}") -inference_config_value=$(func_parser_value "${lines[14]}") +inference_py=$(func_parser_value "${lines[11]}") +use_onnx_key=$(func_parser_key "${lines[12]}") +use_onnx_value=$(func_parser_value "${lines[12]}") +inference_model_dir_key=$(func_parser_key "${lines[13]}") +inference_model_dir_value=$(func_parser_value "${lines[13]}") +inference_hardware_key=$(func_parser_key "${lines[14]}") +inference_hardware_value=$(func_parser_value "${lines[14]}") +inference_config_key=$(func_parser_key "${lines[15]}") +inference_config_value=$(func_parser_value "${lines[15]}") -LOG_PATH="./test_tipc/output" +LOG_PATH="./test_tipc/output/${model_name}" mkdir -p ./test_tipc/output status_log="${LOG_PATH}/results_paddle2onnx.log" -- GitLab