diff --git a/CMakeLists.txt b/CMakeLists.txt index 6b02477e4b454b684bcdcc55ca2e53995accc557..7d61ce76fd9ddd2c70fced654efd08b02ce94b52 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -107,7 +107,6 @@ if (SERVER OR CLIENT) include(external/gflags) include(external/glog) include(external/utf8proc) - include(external/jemalloc) if (WITH_PYTHON) include(external/pybind11) include(external/python) diff --git a/cmake/external/jemalloc.cmake b/cmake/external/jemalloc.cmake deleted file mode 100644 index deac8175b07cc1f2312a5420c49e1ed069eb3a7f..0000000000000000000000000000000000000000 --- a/cmake/external/jemalloc.cmake +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -INCLUDE(ExternalProject) - -SET(JEMALLOC_SOURCES_DIR ${THIRD_PARTY_PATH}/jemalloc) -SET(JEMALLOC_INSTALL_DIR ${THIRD_PARTY_PATH}/install/jemalloc) -SET(JEMALLOC_INCLUDE_DIR "${JEMALLOC_INSTALL_DIR}/include" CACHE PATH "jemalloc include directory." FORCE) - -ExternalProject_Add( - extern_jemalloc - ${EXTERNAL_PROJECT_LOG_ARGS} - GIT_REPOSITORY "https://github.com/jemalloc/jemalloc.git" - GIT_TAG "5.2.1" - PREFIX ${JEMALLOC_SOURCES_DIR} - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND cd ${JEMALLOC_SOURCES_DIR}/src/extern_jemalloc/ && sh autogen.sh - && make - && mkdir -p ${JEMALLOC_INSTALL_DIR}/lib/ - && cp ${JEMALLOC_SOURCES_DIR}/src/extern_jemalloc/lib/libjemalloc.a ${JEMALLOC_INSTALL_DIR}/lib - && cp ${JEMALLOC_SOURCES_DIR}/src/extern_jemalloc/lib/libjemalloc_pic.a ${JEMALLOC_INSTALL_DIR}/lib - && cp -r ${JEMALLOC_SOURCES_DIR}/src/extern_jemalloc/include/jemalloc ${JEMALLOC_INCLUDE_DIR} - TEST_COMMAND "" -) - diff --git a/paddle_inference/paddle/include/paddle_engine.h b/paddle_inference/paddle/include/paddle_engine.h index 14fa447c6fd18b1e6a36b5106ed2851125a00823..bf8c98ede60bc4266965d1aa12e2627dd0d0647a 100644 --- a/paddle_inference/paddle/include/paddle_engine.h +++ b/paddle_inference/paddle/include/paddle_engine.h @@ -84,9 +84,12 @@ const std::string getFileBySuffix( while ((dirp = readdir(dp)) != nullptr) { if (dirp->d_type == DT_REG) { for (int idx = 0; idx < suffixVector.size(); ++idx) { - if (std::string(dirp->d_name).find(suffixVector[idx]) != - std::string::npos) { - fileName = static_cast(dirp->d_name); + std::string fileName_in_Dir = static_cast(dirp->d_name); + if (fileName_in_Dir.length() >= suffixVector[idx].length() && + fileName_in_Dir.substr( + fileName_in_Dir.length() - suffixVector[idx].length(), + suffixVector[idx].length()) == suffixVector[idx]) { + fileName = fileName_in_Dir; break; } } @@ -166,8 +169,10 @@ class PaddleInferenceEngine : public EngineCore { } Config config; - std::vector suffixParaVector = {".pdiparams", "__params__", "params"}; - std::vector suffixModelVector = {".pdmodel", "__model__", "model"}; + std::vector suffixParaVector = { + ".pdiparams", "__params__", "params"}; + std::vector suffixModelVector = { + ".pdmodel", "__model__", "model"}; std::string paraFileName = getFileBySuffix(model_path, suffixParaVector); std::string modelFileName = getFileBySuffix(model_path, suffixModelVector); @@ -273,23 +278,20 @@ class PaddleInferenceEngine : public EngineCore { config.SetXpuDeviceId(gpu_id); } - if (engine_conf.has_use_ascend_cl() && - engine_conf.use_ascend_cl()) { + if (engine_conf.has_use_ascend_cl() && engine_conf.use_ascend_cl()) { if (engine_conf.has_use_lite() && engine_conf.use_lite()) { - // for ascend 310 + // for ascend 310 FLAGS_nnadapter_device_names = "huawei_ascend_npu"; FLAGS_nnadapter_context_properties = - "HUAWEI_ASCEND_NPU_SELECTED_DEVICE_IDS=" + - std::to_string(gpu_id); + "HUAWEI_ASCEND_NPU_SELECTED_DEVICE_IDS=" + std::to_string(gpu_id); FLAGS_nnadapter_model_cache_dir = ""; config.NNAdapter() - .Enable() - .SetDeviceNames({FLAGS_nnadapter_device_names}) - .SetContextProperties(FLAGS_nnadapter_context_properties) - .SetModelCacheDir(FLAGS_nnadapter_model_cache_dir); + .Enable() + .SetDeviceNames({FLAGS_nnadapter_device_names}) + .SetContextProperties(FLAGS_nnadapter_context_properties) + .SetModelCacheDir(FLAGS_nnadapter_model_cache_dir); LOG(INFO) << "Enable Lite NNAdapter for Ascend," - << "nnadapter_device_names=" - << FLAGS_nnadapter_device_names + << "nnadapter_device_names=" << FLAGS_nnadapter_device_names << ",nnadapter_context_properties=" << FLAGS_nnadapter_context_properties << ",nnadapter_model_cache_dir="