diff --git a/cmake/external/lite.cmake b/cmake/external/lite.cmake index 389ca9a799424e2cac250fdc2271c11773507838..1a5c796f21082e2f32bfeea65d08cabe449861b9 100644 --- a/cmake/external/lite.cmake +++ b/cmake/external/lite.cmake @@ -50,7 +50,7 @@ if (NOT LITE_SOURCE_DIR OR NOT LITE_BINARY_DIR) set(LITE_INSTALL_DIR ${THIRD_PARTY_PATH}/install/lite) if(NOT LITE_GIT_TAG) - set(LITE_GIT_TAG 1c4698c6efd9a5f57a4f8369bd5b6374166f5ba4) + set(LITE_GIT_TAG 4ab64daecc11fbf74fffdc6a4733f388472e7d5d) endif() if(NOT CUDA_ARCH_NAME) diff --git a/cmake/external/xpu.cmake b/cmake/external/xpu.cmake index 02abf08a99ce8fcb1b3ca7d8e38c0b3103a6bb46..70bdc67980c038e61f2d9821b837cad54046c806 100644 --- a/cmake/external/xpu.cmake +++ b/cmake/external/xpu.cmake @@ -35,7 +35,7 @@ ELSE () ENDIF() SET(XPU_BASE_URL_WITHOUT_DATE "https://baidu-kunlun-product.cdn.bcebos.com/KL-SDK/klsdk-dev") -SET(XPU_BASE_URL "${XPU_BASE_URL_WITHOUT_DATE}/20210909") +SET(XPU_BASE_URL "${XPU_BASE_URL_WITHOUT_DATE}/20210921") SET(XPU_XRE_URL "${XPU_BASE_URL}/${XPU_XRE_DIR_NAME}.tar.gz" CACHE STRING "" FORCE) SET(XPU_XDNN_URL "${XPU_BASE_URL}/${XPU_XDNN_DIR_NAME}.tar.gz" CACHE STRING "" FORCE) SET(XPU_XCCL_URL "${XPU_BASE_URL_WITHOUT_DATE}/20210623/${XPU_XCCL_DIR_NAME}.tar.gz" CACHE STRING "" FORCE) diff --git a/paddle/fluid/inference/api/analysis_predictor.cc b/paddle/fluid/inference/api/analysis_predictor.cc index 6ebb2193e21187af8e439ad735ab864355808966..804f035a2e2cacb51be12a8be275bd1efccb9323 100644 --- a/paddle/fluid/inference/api/analysis_predictor.cc +++ b/paddle/fluid/inference/api/analysis_predictor.cc @@ -686,9 +686,24 @@ void AnalysisPredictor::OptimizeInferenceProgram() { // Note, please do NOT use any member variables, because member variables may // have been destructed in multiple threads. #if PADDLE_WITH_TENSORRT - paddle::inference::Singleton< - inference::tensorrt::TRTEngineManager>::Global() - .DeleteAll(); + auto &block = prog->Block(0); + for (auto &op_desc : block.AllOps()) { + if (op_desc->Type() == "tensorrt_engine") { + std::string engine_key = + BOOST_GET_CONST(std::string, op_desc->GetAttr("engine_key")); + int engine_predictor_id = + BOOST_GET_CONST(int, op_desc->GetAttr("predictor_id")); + std::string engine_name = + engine_key + std::to_string(engine_predictor_id); + if (paddle::inference::Singleton< + inference::tensorrt::TRTEngineManager>::Global() + .Has(engine_name)) { + paddle::inference::Singleton< + inference::tensorrt::TRTEngineManager>::Global() + .DeleteKey(engine_name); + } + } + } #endif delete prog; }); diff --git a/paddle/fluid/inference/tensorrt/engine.h b/paddle/fluid/inference/tensorrt/engine.h index 29324f290064cb2f22dcdebe8fb99e7e99748719..e22c2488d3b8b63746ad9fd19eaa724ce2efa8f7 100644 --- a/paddle/fluid/inference/tensorrt/engine.h +++ b/paddle/fluid/inference/tensorrt/engine.h @@ -631,6 +631,14 @@ class TRTEngineManager { } } + void DeleteKey(const std::string& key) { + auto iter = engines_.find(key); + if (iter != engines_.end()) { + iter->second.reset(nullptr); + engines_.erase(iter); + } + } + private: std::unordered_map> engines_; };