diff --git a/deploy/cpp/scripts/build.sh b/deploy/cpp/scripts/build.sh index ffc9a4ff2d4fcf44a0d40cfa611a2d811f3165a8..fb6ca625e9bcc1a84dbf6728e809b08e6432d075 100644 --- a/deploy/cpp/scripts/build.sh +++ b/deploy/cpp/scripts/build.sh @@ -7,6 +7,9 @@ WITH_MKL=ON # 是否集成 TensorRT(仅WITH_GPU=ON 有效) WITH_TENSORRT=OFF +# 是否使用2.0rc1预测库 +USE_PADDLE_20RC1=OFF + # TensorRT 的include路径 TENSORRT_INC_DIR=/path/to/tensorrt/lib diff --git a/dygraph/deploy/cpp/CMakeLists.txt b/dygraph/deploy/cpp/CMakeLists.txt index 96457158f6b38b0429e3f138adb2e3c203216709..0acb361546b535d209755d3274064fde4261be61 100644 --- a/dygraph/deploy/cpp/CMakeLists.txt +++ b/dygraph/deploy/cpp/CMakeLists.txt @@ -3,8 +3,10 @@ project(PaddleObjectDetector CXX C) option(WITH_MKL "Compile demo with MKL/OpenBlas support,defaultuseMKL." ON) option(WITH_GPU "Compile demo with GPU/CPU, default use CPU." ON) -option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static." ON) -option(WITH_TENSORRT "Compile demo with TensorRT." OFF) +option(WITH_STATIC_LIB "Compile demo with static/shared library, default use shared." OFF) +option(WITH_TENSORRT "Compile demo with TensorRT." OFF) +option(USE_PADDLE_20RC1 "Compile demo with paddle_inference_lib 2.0rc1" ON) + SET(PADDLE_DIR "" CACHE PATH "Location of libraries") SET(OPENCV_DIR "" CACHE PATH "Location of libraries") @@ -36,6 +38,7 @@ endif() if (NOT DEFINED PADDLE_DIR OR ${PADDLE_DIR} STREQUAL "") message(FATAL_ERROR "please set PADDLE_DIR with -DPADDLE_DIR=/path/paddle_influence_dir") endif() +message("PADDLE_DIR IS:"${PADDLE_DIR}) if (NOT DEFINED OPENCV_DIR OR ${OPENCV_DIR} STREQUAL "") message(FATAL_ERROR "please set OPENCV_DIR with -DOPENCV_DIR=/path/opencv") @@ -70,6 +73,8 @@ link_directories("${PADDLE_DIR}/third_party/install/xxhash/lib") link_directories("${PADDLE_DIR}/paddle/lib/") link_directories("${CMAKE_CURRENT_BINARY_DIR}") + + if (WIN32) include_directories("${PADDLE_DIR}/paddle/fluid/inference") include_directories("${PADDLE_DIR}/paddle/include") @@ -151,24 +156,62 @@ else() set(MATH_LIB ${PADDLE_DIR}/third_party/install/openblas/lib/libopenblas${CMAKE_STATIC_LIBRARY_SUFFIX}) endif() + if (WIN32) - if(EXISTS "${PADDLE_DIR}/paddle/fluid/inference/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}") + if (USE_PADDLE_20RC1) + # 2.0rc1 win32 shared lib name is paddle_fluid.dll and paddle_fluid.lib + if(EXISTS "${PADDLE_DIR}/paddle/fluid/inference/paddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}") set(DEPS - ${PADDLE_DIR}/paddle/fluid/inference/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + ${PADDLE_DIR}/paddle/fluid/inference/paddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + else() + set(DEPS + ${PADDLE_DIR}/paddle/lib/paddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + endif() else() - set(DEPS - ${PADDLE_DIR}/paddle/lib/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + # before 2.0rc1 win32 shared lib name is libpaddle_fluid.dll and libpaddle_fluid.lib + if(EXISTS "${PADDLE_DIR}/paddle/fluid/inference/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}") + set(DEPS + ${PADDLE_DIR}/paddle/fluid/inference/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + else() + set(DEPS + ${PADDLE_DIR}/paddle/lib/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + endif() + endif() +endif() + + +if (WITH_STATIC_LIB) + if (WIN32 AND USE_PADDLE_20RC1) + message("This situation is actually in dynamic build mode") + set(DEPS ${PADDLE_LIB}/paddle/lib/addle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + else() + set(DEPS ${PADDLE_LIB}/paddle/lib/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + endif() +else() + if (WIN32) + if (USE_PADDLE_20RC1) + # 2.0rc1 win32 shared lib name is paddle_fluid.dll and paddle_fluid.lib + set(DEPS ${PADDLE_LIB}/paddle/lib/paddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + else() + # before 2.0rc1 win32 shared lib name is libpaddle_fluid.dll and libpaddle_fluid.lib + set(DEPS ${PADDLE_LIB}/paddle/lib/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + endif() + else() + # linux shared lib name is libpaddle_fluid.so + set(DEPS ${PADDLE_LIB}/paddle/lib/libpaddle_fluid${CMAKE_SHARED_LIBRARY_SUFFIX}) endif() endif() + if(WITH_STATIC_LIB) set(DEPS ${PADDLE_DIR}/paddle/lib/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) else() set(DEPS - ${PADDLE_DIR}/paddle/lib/libpaddle_fluid${CMAKE_SHARED_LIBRARY_SUFFIX}) + ${PADDLE_DIR}/paddle/lib/${WIN32_PADDLE_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}) endif() + if (NOT WIN32) set(DEPS ${DEPS} ${MATH_LIB} ${MKLDNN_LIB} @@ -228,3 +271,10 @@ if (WIN32 AND WITH_MKL) COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./release/mkldnn.dll ) endif() + + +if (WIN32 AND USE_PADDLE_20RC1) + add_custom_command(TARGET main POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/paddle/lib/paddle_fluid.dll ./release/paddle_fluid.dll + ) +endif() diff --git a/dygraph/deploy/cpp/docs/linux_build.md b/dygraph/deploy/cpp/docs/linux_build.md index 7ea27d1e255c8d0467c9aa5e086e386c16b8d198..f22739638d760f62fda6d6c5938891978fdc77b6 100644 --- a/dygraph/deploy/cpp/docs/linux_build.md +++ b/dygraph/deploy/cpp/docs/linux_build.md @@ -19,7 +19,7 @@ ### Step2: 下载PaddlePaddle C++ 预测库 fluid_inference -PaddlePaddle C++ 预测库针对不同的`CPU`和`CUDA`版本提供了不同的预编译版本,请根据实际情况下载: [C++预测库下载列表](https://www.paddlepaddle.org.cn/documentation/docs/zh/develop/advanced_guide/inference_deployment/inference/build_and_install_lib_cn.html) +PaddlePaddle C++ 预测库针对不同的`CPU`和`CUDA`版本提供了不同的预编译版本,请根据实际情况下载: [C++预测库下载列表](https://www.paddlepaddle.org.cn/documentation/docs/zh/2.0-rc1/guides/05_inference_deployment/inference/build_and_install_lib_cn.html) 下载并解压后`/root/projects/fluid_inference`目录包含内容为: diff --git a/dygraph/deploy/cpp/docs/windows_vs2019_build.md b/dygraph/deploy/cpp/docs/windows_vs2019_build.md index a4f4b1973ea2bba3a2c9ca1c71f93e3936ce3159..108dd71efa061ce355c1de4ac5eaa21e69c59de1 100644 --- a/dygraph/deploy/cpp/docs/windows_vs2019_build.md +++ b/dygraph/deploy/cpp/docs/windows_vs2019_build.md @@ -24,7 +24,7 @@ git clone https://github.com/PaddlePaddle/PaddleDetection.git ### Step2: 下载PaddlePaddle C++ 预测库 fluid_inference -PaddlePaddle C++ 预测库针对不同的`CPU`和`CUDA`版本提供了不同的预编译版本,请根据实际情况下载: [C++预测库下载列表](https://www.paddlepaddle.org.cn/documentation/docs/zh/develop/advanced_guide/inference_deployment/inference/windows_cpp_inference.html) +PaddlePaddle C++ 预测库针对不同的`CPU`和`CUDA`版本提供了不同的预编译版本,请根据实际情况下载: [C++预测库下载列表](https://www.paddlepaddle.org.cn/documentation/docs/zh/2.0-rc1/guides/05_inference_deployment/inference/windows_cpp_inference.html) 解压后`D:\projects\fluid_inference`目录包含内容为: ``` @@ -72,6 +72,7 @@ fluid_inference | *CUDNN_LIB | CUDNN的库路径 | | OPENCV_DIR | OpenCV的安装路径, | | PADDLE_DIR | Paddle预测库的路径 | +| USE_PADDLE_20RC1 | 是否使用2.0rc1预测库。如果使用2.0rc1,在windows环境下预测库名称发生变化,且仅支持动态库方式编译 | **注意:** 1. 使用`CPU`版预测库,请把`WITH_GPU`的勾去掉 2. 如果使用的是`openblas`版本,请把`WITH_MKL`勾去掉 ![step4](https://paddleseg.bj.bcebos.com/inference/vs2019_step5.png) diff --git a/dygraph/deploy/cpp/scripts/build.sh b/dygraph/deploy/cpp/scripts/build.sh index ffc9a4ff2d4fcf44a0d40cfa611a2d811f3165a8..73ba6527c2c15fd7eab715ddc9aaf704bd0e3ee6 100644 --- a/dygraph/deploy/cpp/scripts/build.sh +++ b/dygraph/deploy/cpp/scripts/build.sh @@ -7,6 +7,9 @@ WITH_MKL=ON # 是否集成 TensorRT(仅WITH_GPU=ON 有效) WITH_TENSORRT=OFF +# 是否使用2.0rc1预测库 +USE_PADDLE_20RC1=ON + # TensorRT 的include路径 TENSORRT_INC_DIR=/path/to/tensorrt/lib