From 1e8976e85441d59ffc31167375c148fdfd66d377 Mon Sep 17 00:00:00 2001 From: Yuanle Liu Date: Fri, 6 Jan 2023 13:46:11 +0800 Subject: [PATCH] fix trt engine memory sharing (#49584) --- paddle/fluid/inference/api/analysis_config.cc | 1 + paddle/fluid/inference/api/analysis_predictor.h | 9 +++++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/paddle/fluid/inference/api/analysis_config.cc b/paddle/fluid/inference/api/analysis_config.cc index 5d71c7cee1d..2975322dece 100644 --- a/paddle/fluid/inference/api/analysis_config.cc +++ b/paddle/fluid/inference/api/analysis_config.cc @@ -434,6 +434,7 @@ AnalysisConfig::AnalysisConfig(const AnalysisConfig &other) { CP_MEMBER(shape_range_info_path_); CP_MEMBER(trt_use_inspector_); CP_MEMBER(trt_engine_memory_sharing_); + CP_MEMBER(trt_engine_memory_sharing_identifier_); // Dlnne related CP_MEMBER(use_dlnne_); CP_MEMBER(dlnne_min_subgraph_size_); diff --git a/paddle/fluid/inference/api/analysis_predictor.h b/paddle/fluid/inference/api/analysis_predictor.h index c13b7624a3b..95a58d856f3 100644 --- a/paddle/fluid/inference/api/analysis_predictor.h +++ b/paddle/fluid/inference/api/analysis_predictor.h @@ -103,16 +103,17 @@ class AnalysisPredictor : public PaddlePredictor { if (config_.shape_range_info_collected()) { config_.SwitchIrOptim(false); } - auto trt_identifier = config_.trt_engine_memory_sharing_identifier_; + int trt_identifier = config_.trt_engine_memory_sharing_identifier_; if (trt_identifier > 0) { // NOTE(liuyuanle): For convenience, we set the id of the predictor to // negative sharing_identifier directly. In the future, this may affect // the meaning of negative predictor id. predictor_id_ = -trt_identifier; - LOG_FIRST_N(WARNING, 1) + LOG(WARNING) << "Since the engine context memory of multiple predictors " - "is enabled in Paddle-TRT, we set the id of current predictor to " - "negative sharing_identifier you specified."; + "is enabled in Paddle-TRT, we set the id of these predictors to " + "negative sharing_identifier you specified : " + << predictor_id_; } else { predictor_id_ = inference::GetUniqueId(); } -- GitLab