提交 98811562 编写于 作者: L LDOUBLEV

delete shape_info_name

上级 1b07e429
......@@ -40,7 +40,6 @@ def init_args():
parser.add_argument("--ir_optim", type=str2bool, default=True)
parser.add_argument("--use_tensorrt", type=str2bool, default=False)
parser.add_argument("--min_subgraph_size", type=int, default=15)
parser.add_argument("--shape_info_filename", type=str, default=None)
parser.add_argument("--precision", type=str, default="fp32")
parser.add_argument("--gpu_mem", type=int, default=500)
......@@ -228,7 +227,8 @@ def create_predictor(args, mode, logger):
use_calib_mode=False)
# collect shape
trt_shape_f = f"{os.path.dirname(args.shape_info_filename)}/{mode}_{os.path.basename(args.shape_info_filename)}"
model_name = os.path.basename(model_dir[:-1]) if model_dir.endswith("/") else os.path.basename(model_dir)
trt_shape_f = f"{mode}_{model_name}"
if trt_shape_f is not None:
if not os.path.exists(trt_shape_f):
config.collect_shape_range_info(trt_shape_f)
......@@ -240,10 +240,6 @@ def create_predictor(args, mode, logger):
f"dynamic shape info file( {trt_shape_f} ) already exists, not need to generate again."
)
config.enable_tuned_tensorrt_dynamic_shape(trt_shape_f, True)
else:
logger.info(
f"when using tensorrt, dynamic shape is a suggested option, you can use '--shape_info_filename=shape.txt' for offline dygnamic shape tuning"
)
elif args.use_npu:
config.enable_npu()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册