From 597ee656bd2f8ab4da5e5d0f7b1dbd1d3dd37ff0 Mon Sep 17 00:00:00 2001 From: duanyanhui <45005871+YanhuiDua@users.noreply.github.com> Date: Tue, 9 May 2023 10:43:18 +0800 Subject: [PATCH] fix npu tipc script (#8213) --- deploy/pptracking/python/mot_jde_infer.py | 6 +++--- deploy/pptracking/python/mot_sde_infer.py | 6 +++--- deploy/pptracking/python/mot_utils.py | 2 +- test_tipc/test_train_inference_python_npu.sh | 2 ++ 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/deploy/pptracking/python/mot_jde_infer.py b/deploy/pptracking/python/mot_jde_infer.py index e3a9958f7..d381a211f 100644 --- a/deploy/pptracking/python/mot_jde_infer.py +++ b/deploy/pptracking/python/mot_jde_infer.py @@ -45,7 +45,7 @@ class JDE_Detector(Detector): """ Args: model_dir (str): root path of model.pdiparams, model.pdmodel and infer_cfg.yml - device (str): Choose the device you want to run, it can be: CPU/GPU/XPU, default is CPU + device (str): Choose the device you want to run, it can be: CPU/GPU/XPU/NPU, default is CPU run_mode (str): mode of running(paddle/trt_fp32/trt_fp16) batch_size (int): size of pre batch in inference trt_min_shape (int): min shape for dynamic shape in trt @@ -502,7 +502,7 @@ if __name__ == '__main__': FLAGS = parser.parse_args() print_arguments(FLAGS) FLAGS.device = FLAGS.device.upper() - assert FLAGS.device in ['CPU', 'GPU', 'XPU' - ], "device should be CPU, GPU or XPU" + assert FLAGS.device in ['CPU', 'GPU', 'XPU', 'NPU' + ], "device should be CPU, GPU, NPU or XPU" main() diff --git a/deploy/pptracking/python/mot_sde_infer.py b/deploy/pptracking/python/mot_sde_infer.py index 499ee2c2d..5a2693eec 100644 --- a/deploy/pptracking/python/mot_sde_infer.py +++ b/deploy/pptracking/python/mot_sde_infer.py @@ -46,7 +46,7 @@ class SDE_Detector(Detector): Args: model_dir (str): root path of model.pdiparams, model.pdmodel and infer_cfg.yml tracker_config (str): tracker config path - device (str): Choose the device you want to run, it can be: CPU/GPU/XPU, default is CPU + device (str): Choose the device you want to run, it can be: CPU/GPU/XPU/NPU, default is CPU run_mode (str): mode of running(paddle/trt_fp32/trt_fp16) batch_size (int): size of pre batch in inference trt_min_shape (int): min shape for dynamic shape in trt @@ -946,7 +946,7 @@ if __name__ == '__main__': FLAGS = parser.parse_args() print_arguments(FLAGS) FLAGS.device = FLAGS.device.upper() - assert FLAGS.device in ['CPU', 'GPU', 'XPU' - ], "device should be CPU, GPU or XPU" + assert FLAGS.device in ['CPU', 'GPU', 'XPU', 'NPU' + ], "device should be CPU, GPU, NPU or XPU" main() diff --git a/deploy/pptracking/python/mot_utils.py b/deploy/pptracking/python/mot_utils.py index 9d7b18f92..055d3817a 100644 --- a/deploy/pptracking/python/mot_utils.py +++ b/deploy/pptracking/python/mot_utils.py @@ -64,7 +64,7 @@ def argsparser(): "--device", type=str, default='cpu', - help="Choose the device you want to run, it can be: CPU/GPU/XPU, default is CPU." + help="Choose the device you want to run, it can be: CPU/GPU/XPU/NPU, default is CPU." ) parser.add_argument( "--use_gpu", diff --git a/test_tipc/test_train_inference_python_npu.sh b/test_tipc/test_train_inference_python_npu.sh index 9ca6ee3a7..e759738d3 100644 --- a/test_tipc/test_train_inference_python_npu.sh +++ b/test_tipc/test_train_inference_python_npu.sh @@ -33,6 +33,8 @@ FILENAME=$1 # change gpu to npu in tipc txt configs sed -i "s/use_gpu:True/use_npu:True/g" $FILENAME sed -i "s/--device:gpu|cpu/--device:npu|cpu/g" $FILENAME + sed -i "s/--device:gpu/--device:npu/g" $FILENAME + sed -i "s/--device:cpu|gpu/--device:cpu|npu/g" $FILENAME sed -i "s/trainer:pact_train/trainer:norm_train/g" $FILENAME sed -i "s/trainer:fpgm_train/trainer:norm_train/g" $FILENAME sed -i "s/--slim_config _template_pact/ /g" $FILENAME -- GitLab