diff --git a/deploy/cpp_infer/CMakeLists.txt b/deploy/cpp_infer/CMakeLists.txt index 120cf06a04c8f1769bdcd153a74cd6ff0665cf93..bdcfefee2eb4c3fdd12efd021fdbdcfce648fd39 100644 --- a/deploy/cpp_infer/CMakeLists.txt +++ b/deploy/cpp_infer/CMakeLists.txt @@ -40,6 +40,7 @@ endif() if (WIN32) include_directories("${PADDLE_LIB}/paddle/fluid/inference") include_directories("${PADDLE_LIB}/paddle/include") + link_directories("${PADDLE_LIB}/paddle/lib") link_directories("${PADDLE_LIB}/paddle/fluid/inference") find_package(OpenCV REQUIRED PATHS ${OPENCV_DIR}/build/ NO_DEFAULT_PATH) @@ -140,22 +141,22 @@ else() endif () endif() -# Note: libpaddle_inference_api.so/a must put before libpaddle_fluid.so/a +# Note: libpaddle_inference_api.so/a must put before libpaddle_inference.so/a if(WITH_STATIC_LIB) if(WIN32) set(DEPS - ${PADDLE_LIB}/paddle/lib/paddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + ${PADDLE_LIB}/paddle/lib/paddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX}) else() set(DEPS - ${PADDLE_LIB}/paddle/lib/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + ${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX}) endif() else() if(WIN32) set(DEPS - ${PADDLE_LIB}/paddle/lib/paddle_fluid${CMAKE_SHARED_LIBRARY_SUFFIX}) + ${PADDLE_LIB}/paddle/lib/paddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX}) else() set(DEPS - ${PADDLE_LIB}/paddle/lib/libpaddle_fluid${CMAKE_SHARED_LIBRARY_SUFFIX}) + ${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX}) endif() endif(WITH_STATIC_LIB) diff --git a/deploy/cpp_infer/readme.md b/deploy/cpp_infer/readme.md index eaa841dc670af1799943ad920811a05816b4b41a..b62b1a4cf10b5abd345528ebf48e6a8de4387469 100644 --- a/deploy/cpp_infer/readme.md +++ b/deploy/cpp_infer/readme.md @@ -74,9 +74,10 @@ opencv3/ * 有2种方式获取Paddle预测库,下面进行详细介绍。 + #### 1.2.1 直接下载安装 -* [Paddle预测库官网](https://www.paddlepaddle.org.cn/documentation/docs/zh/develop/guides/05_inference_deployment/inference/build_and_install_lib_cn.html)上提供了不同cuda版本的Linux预测库,可以在官网查看并选择合适的预测库版本。 +* [Paddle预测库官网](https://www.paddlepaddle.org.cn/documentation/docs/zh/advanced_guide/inference_deployment/inference/build_and_install_lib_cn.html)上提供了不同cuda版本的Linux预测库,可以在官网查看并选择合适的预测库版本(*建议选择paddle版本>=2.0.1版本的预测库* )。 * 下载之后使用下面的方法解压。 @@ -130,8 +131,6 @@ build/paddle_inference_install_dir/ 其中`paddle`就是C++预测所需的Paddle库,`version.txt`中包含当前预测库的版本信息。 - - ## 2 开始运行 ### 2.1 将模型导出为inference model diff --git a/deploy/cpp_infer/readme_en.md b/deploy/cpp_infer/readme_en.md index bdfcb0e6b26dfafbfcee93a947d08d92bfc36619..cfda7ca05d49b9ba2534aa6bfc797425bb0dc6c5 100644 --- a/deploy/cpp_infer/readme_en.md +++ b/deploy/cpp_infer/readme_en.md @@ -91,8 +91,8 @@ tar -xf paddle_inference.tgz Finally you can see the following files in the folder of `paddle_inference/`. #### 1.2.2 Compile from the source code -* If you want to get the latest Paddle inference library features, you can download the latest code from Paddle github repository and compile the inference library from the source code. -* You can refer to [Paddle inference library] (https://www.paddlepaddle.org.cn/documentation/docs/en/develop/guides/05_inference_deployment/inference/build_and_install_lib_en.html) to get the Paddle source code from github, and then compile To generate the latest inference library. The method of using git to access the code is as follows. +* If you want to get the latest Paddle inference library features, you can download the latest code from Paddle github repository and compile the inference library from the source code. It is recommended to download the inference library with paddle version greater than or equal to 2.0.1. +* You can refer to [Paddle inference library] (https://www.paddlepaddle.org.cn/documentation/docs/en/advanced_guide/inference_deployment/inference/build_and_install_lib_en.html) to get the Paddle source code from github, and then compile To generate the latest inference library. The method of using git to access the code is as follows. ```shell