diff --git a/deploy/cpp_infer/CMakeLists.txt b/deploy/cpp_infer/CMakeLists.txt index 1188336730ea9fbaac11a84250a3be3e418ec5f5..90f62345de8524143bd7a6f2631b64f3f8fb0d02 100644 --- a/deploy/cpp_infer/CMakeLists.txt +++ b/deploy/cpp_infer/CMakeLists.txt @@ -40,6 +40,7 @@ endif() if (WIN32) include_directories("${PADDLE_LIB}/paddle/fluid/inference") include_directories("${PADDLE_LIB}/paddle/include") + link_directories("${PADDLE_LIB}/paddle/lib") link_directories("${PADDLE_LIB}/paddle/fluid/inference") find_package(OpenCV REQUIRED PATHS ${OPENCV_DIR}/build/ NO_DEFAULT_PATH) @@ -136,22 +137,22 @@ else() set(MATH_LIB ${PADDLE_LIB}/third_party/install/openblas/lib/libopenblas${CMAKE_STATIC_LIBRARY_SUFFIX}) endif() -# Note: libpaddle_inference_api.so/a must put before libpaddle_fluid.so/a +# Note: libpaddle_inference_api.so/a must put before libpaddle_inference.so/a if(WITH_STATIC_LIB) if(WIN32) set(DEPS - ${PADDLE_LIB}/paddle/lib/paddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + ${PADDLE_LIB}/paddle/lib/paddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX}) else() set(DEPS - ${PADDLE_LIB}/paddle/lib/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + ${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX}) endif() else() if(WIN32) set(DEPS - ${PADDLE_LIB}/paddle/lib/paddle_fluid${CMAKE_SHARED_LIBRARY_SUFFIX}) + ${PADDLE_LIB}/paddle/lib/paddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX}) else() set(DEPS - ${PADDLE_LIB}/paddle/lib/libpaddle_fluid${CMAKE_SHARED_LIBRARY_SUFFIX}) + ${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX}) endif() endif(WITH_STATIC_LIB) diff --git a/deploy/cpp_infer/readme.md b/deploy/cpp_infer/readme.md index f81d9c75e93808b81f0659ccf46b629091b2c9fb..3e5c12867d2845d46972b435a8ec85eed226f0ba 100644 --- a/deploy/cpp_infer/readme.md +++ b/deploy/cpp_infer/readme.md @@ -119,7 +119,8 @@ build/paddle_inference_install_dir/ #### 1.2.2 直接下载安装 -* [Paddle预测库官网](https://www.paddlepaddle.org.cn/documentation/docs/zh/advanced_guide/inference_deployment/inference/build_and_install_lib_cn.html)上提供了不同cuda版本的Linux预测库,可以在官网查看并选择合适的预测库版本。 +* [Paddle预测库官网](https://www.paddlepaddle.org.cn/documentation/docs/zh/advanced_guide/inference_deployment/inference/build_and_install_lib_cn.html)上提供了不同cuda版本的Linux预测库,可以在官网查看并选择合适的预测库版本(*建议选择paddle版本>=2.0.1版本的预测库* )。 + * 下载之后使用下面的方法解压。 diff --git a/deploy/cpp_infer/readme_en.md b/deploy/cpp_infer/readme_en.md index 8a0bd62ecc6bf617c6e2954d1080bf97a6582acd..a51977326c2ee848773be34b17e396b6a166f80b 100644 --- a/deploy/cpp_infer/readme_en.md +++ b/deploy/cpp_infer/readme_en.md @@ -78,7 +78,7 @@ opencv3/ #### 1.2.1 Compile from the source code -* If you want to get the latest Paddle inference library features, you can download the latest code from Paddle github repository and compile the inference library from the source code. +* If you want to get the latest Paddle inference library features, you can download the latest code from Paddle github repository and compile the inference library from the source code. It is recommended to download the inference library with paddle version greater than or equal to 2.0.1. * You can refer to [Paddle inference library] (https://www.paddlepaddle.org.cn/documentation/docs/en/advanced_guide/inference_deployment/inference/build_and_install_lib_en.html) to get the Paddle source code from github, and then compile To generate the latest inference library. The method of using git to access the code is as follows. diff --git a/deploy/cpp_infer/tools/config.txt b/deploy/cpp_infer/tools/config.txt index 24e4ef0de7d844ba4bd6c11f2cba08766c0e5ddf..28085ca408d279fc61a1bce1abf1df9c05115c78 100644 --- a/deploy/cpp_infer/tools/config.txt +++ b/deploy/cpp_infer/tools/config.txt @@ -3,7 +3,7 @@ use_gpu 0 gpu_id 0 gpu_mem 4000 cpu_math_library_num_threads 10 -use_mkldnn 0 +use_mkldnn 1 # det config max_side_len 960