diff --git a/paddle/fluid/inference/api/demo_ci/CMakeLists.txt b/paddle/fluid/inference/api/demo_ci/CMakeLists.txt index 7edb6270e2bf5ad190c9f05696e91816a642a270..08a1a5428193c2d506f511112e4a26d73c382ff1 100644 --- a/paddle/fluid/inference/api/demo_ci/CMakeLists.txt +++ b/paddle/fluid/inference/api/demo_ci/CMakeLists.txt @@ -72,7 +72,10 @@ if(WITH_GPU) endif() if (USE_TENSORRT AND WITH_GPU) - set(TENSORRT_ROOT ${PADDLE_LIB_THIRD_PARTY_PATH}tensorrt) + set(TENSORRT_ROOT "" CACHE STRING "The root directory of TensorRT library") + if("${TENSORRT_ROOT}" STREQUAL "") + message(FATAL_ERROR "The TENSORRT_ROOT is empty, you must assign it a value with CMake command. Such as: -DTENSORRT_ROOT=TENSORRT_ROOT_PATH ") + endif() set(TENSORRT_INCLUDE_DIR ${TENSORRT_ROOT}/include) set(TENSORRT_LIB_DIR ${TENSORRT_ROOT}/lib) endif() diff --git a/paddle/fluid/inference/api/demo_ci/run_windows_demo.bat b/paddle/fluid/inference/api/demo_ci/run_windows_demo.bat index fe0a3dc3f26241e3520f1068c583d7f1a4560c1f..5199b83413af87eacba6f26f4fc0a9acb6a39808 100644 --- a/paddle/fluid/inference/api/demo_ci/run_windows_demo.bat +++ b/paddle/fluid/inference/api/demo_ci/run_windows_demo.bat @@ -87,14 +87,7 @@ IF NOT EXIST "%source_path%\%demo_name%.cc" ( if "%demo_name%"=="windows_mobilenet" set model_name=mobilenet if "%demo_name%"=="vis_demo" set model_name=mobilenet if "%demo_name%"=="simple_on_word2vec" set model_name=word2vec.inference.model -if "%demo_name%"=="trt_mobilenet_demo" ( - echo "The trt_mobilenet_demo need tensorRT inference library" - if NOT exist "%paddle_infernece_lib%\third_party\install\tensorrt" ( - echo "------------It's not a tensorRT inference library------------" - goto:eof - ) - set model_name=mobilenet -) +if "%demo_name%"=="trt_mobilenet_demo" set model_name=mobilenet rem download model if NOT EXIST "%source_path%\%model_name%.tar.gz" ( diff --git a/python/setup.py.in b/python/setup.py.in index 56de5ad86c6e61bebb5f273f58f16800e48a582d..f1e9457c19db5c085eb24138c1450f468baa1ec4 100644 --- a/python/setup.py.in +++ b/python/setup.py.in @@ -252,11 +252,6 @@ package_data['paddle.libs']= [] package_data['paddle.libs']=[('libwarpctc' if os.name != 'nt' else 'warpctc') + ext_name] shutil.copy('${WARPCTC_LIBRARIES}', libs_path) -if '${TENSORRT_FOUND}' == 'ON' and os.name == 'nt': - shutil.copy(os.path.join('${TENSORRT_LIBRARY_DIR}', '${TR_INFER_RT}'), libs_path) - shutil.copy(os.path.join('${TENSORRT_LIBRARY_DIR}', '${TR_INFER_PLUGIN_RT}'), libs_path) - package_data['paddle.libs'] += ['${TR_INFER_RT}', '${TR_INFER_PLUGIN_RT}'] - if '${WITH_MKL}' == 'ON': shutil.copy('${MKLML_SHARED_LIB}', libs_path) shutil.copy('${MKLML_SHARED_IOMP_LIB}', libs_path)