diff --git a/tools/infer/utility.py b/tools/infer/utility.py index b9793123163185d823cd0bd3fe53b99719f66669..5bca1193b15a5e206b50cf885b13c389757b8e3c 100644 --- a/tools/infer/utility.py +++ b/tools/infer/utility.py @@ -38,7 +38,6 @@ def init_args(): parser.add_argument("--ir_optim", type=str2bool, default=True) parser.add_argument("--use_tensorrt", type=str2bool, default=False) parser.add_argument("--min_subgraph_size", type=int, default=15) - parser.add_argument("--shape_info_filename", type=str, default=None) parser.add_argument("--precision", type=str, default="fp32") parser.add_argument("--gpu_mem", type=int, default=500) @@ -226,22 +225,25 @@ def create_predictor(args, mode, logger): use_calib_mode=False) # collect shape - trt_shape_f = f"{os.path.dirname(args.shape_info_filename)}/{mode}_{os.path.basename(args.shape_info_filename)}" - if trt_shape_f is not None: - if not os.path.exists(trt_shape_f): - config.collect_shape_range_info(trt_shape_f) - logger.info( - f"collect dynamic shape info into : {trt_shape_f}" - ) - else: - logger.info( - f"dynamic shape info file( {trt_shape_f} ) already exists, not need to generate again." - ) - config.enable_tuned_tensorrt_dynamic_shape(trt_shape_f, True) + model_name = os.path.basename( + model_dir[:-1]) if model_dir.endswith( + "/") else os.path.basename(model_dir) + trt_shape_f = f"{mode}_{model_name}.txt" + + if not os.path.exists(trt_shape_f): + config.collect_shape_range_info(trt_shape_f) + logger.info( + f"collect dynamic shape info into : {trt_shape_f}") else: logger.info( - f"when using tensorrt, dynamic shape is a suggested option, you can use '--shape_info_filename=shape.txt' for offline dygnamic shape tuning" + f"dynamic shape info file( {trt_shape_f} ) already exists, not need to generate again." ) + try: + config.enable_tuned_tensorrt_dynamic_shape(trt_shape_f, + True) + except Exception as E: + logger.info(E) + logger.info("Please keep your paddlepaddle-gpu >= 2.3.0!") elif args.use_xpu: config.enable_xpu(10 * 1024 * 1024)