diff --git a/cmake/external/lite.cmake b/cmake/external/lite.cmake index ffadf7bea93768090ee8824a2b6df7e7fc3a2592..1a5c796f21082e2f32bfeea65d08cabe449861b9 100644 --- a/cmake/external/lite.cmake +++ b/cmake/external/lite.cmake @@ -50,7 +50,7 @@ if (NOT LITE_SOURCE_DIR OR NOT LITE_BINARY_DIR) set(LITE_INSTALL_DIR ${THIRD_PARTY_PATH}/install/lite) if(NOT LITE_GIT_TAG) - set(LITE_GIT_TAG d3a3a6931b6d22d504d21ba32b3ae972770e9204) + set(LITE_GIT_TAG 4ab64daecc11fbf74fffdc6a4733f388472e7d5d) endif() if(NOT CUDA_ARCH_NAME) diff --git a/paddle/fluid/inference/api/analysis_predictor.cc b/paddle/fluid/inference/api/analysis_predictor.cc index 6ebb2193e21187af8e439ad735ab864355808966..804f035a2e2cacb51be12a8be275bd1efccb9323 100644 --- a/paddle/fluid/inference/api/analysis_predictor.cc +++ b/paddle/fluid/inference/api/analysis_predictor.cc @@ -686,9 +686,24 @@ void AnalysisPredictor::OptimizeInferenceProgram() { // Note, please do NOT use any member variables, because member variables may // have been destructed in multiple threads. #if PADDLE_WITH_TENSORRT - paddle::inference::Singleton< - inference::tensorrt::TRTEngineManager>::Global() - .DeleteAll(); + auto &block = prog->Block(0); + for (auto &op_desc : block.AllOps()) { + if (op_desc->Type() == "tensorrt_engine") { + std::string engine_key = + BOOST_GET_CONST(std::string, op_desc->GetAttr("engine_key")); + int engine_predictor_id = + BOOST_GET_CONST(int, op_desc->GetAttr("predictor_id")); + std::string engine_name = + engine_key + std::to_string(engine_predictor_id); + if (paddle::inference::Singleton< + inference::tensorrt::TRTEngineManager>::Global() + .Has(engine_name)) { + paddle::inference::Singleton< + inference::tensorrt::TRTEngineManager>::Global() + .DeleteKey(engine_name); + } + } + } #endif delete prog; }); diff --git a/paddle/fluid/inference/tensorrt/engine.h b/paddle/fluid/inference/tensorrt/engine.h index 29324f290064cb2f22dcdebe8fb99e7e99748719..e22c2488d3b8b63746ad9fd19eaa724ce2efa8f7 100644 --- a/paddle/fluid/inference/tensorrt/engine.h +++ b/paddle/fluid/inference/tensorrt/engine.h @@ -631,6 +631,14 @@ class TRTEngineManager { } } + void DeleteKey(const std::string& key) { + auto iter = engines_.find(key); + if (iter != engines_.end()) { + iter->second.reset(nullptr); + engines_.erase(iter); + } + } + private: std::unordered_map> engines_; };