diff --git a/deploy/cpp/CMakeLists.txt b/deploy/cpp/CMakeLists.txt index e27cbf8ccb0c2d4ee2a4ae08764d56d4c9aaff4d..a5e892a802bde82a0720ab094071238dc7bbe845 100644 --- a/deploy/cpp/CMakeLists.txt +++ b/deploy/cpp/CMakeLists.txt @@ -9,6 +9,7 @@ option(WITH_TENSORRT "Compile demo with TensorRT." OFF) SET(PADDLE_DIR "" CACHE PATH "Location of libraries") SET(OPENCV_DIR "" CACHE PATH "Location of libraries") SET(CUDA_LIB "" CACHE PATH "Location of libraries") +SET(CUDNN_LIB "" CACHE PATH "Location of libraries") SET(TENSORRT_DIR "" CACHE PATH "Compile demo with TensorRT") include(cmake/yaml-cpp.cmake) @@ -51,7 +52,6 @@ endif() if(EXISTS "${PADDLE_DIR}/third_party/install/snappystream/include") include_directories("${PADDLE_DIR}/third_party/install/snappystream/include") endif() -include_directories("${PADDLE_DIR}/third_party/install/zlib/include") include_directories("${PADDLE_DIR}/third_party/boost") include_directories("${PADDLE_DIR}/third_party/eigen3") @@ -62,7 +62,6 @@ if(EXISTS "${PADDLE_DIR}/third_party/install/snappystream/lib") link_directories("${PADDLE_DIR}/third_party/install/snappystream/lib") endif() -link_directories("${PADDLE_DIR}/third_party/install/zlib/lib") link_directories("${PADDLE_DIR}/third_party/install/protobuf/lib") link_directories("${PADDLE_DIR}/third_party/install/glog/lib") link_directories("${PADDLE_DIR}/third_party/install/gflags/lib") @@ -183,7 +182,7 @@ if (NOT WIN32) else() set(DEPS ${DEPS} ${MATH_LIB} ${MKLDNN_LIB} - glog gflags_static libprotobuf zlibstatic xxhash libyaml-cppmt) + glog gflags_static libprotobuf xxhash libyaml-cppmt) set(DEPS ${DEPS} libcmt shlwapi) if (EXISTS "${PADDLE_DIR}/third_party/install/snappy/lib") set(DEPS ${DEPS} snappy) diff --git a/deploy/cpp/docs/linux_build.md b/deploy/cpp/docs/linux_build.md index d0667db5c95e19e1063a6315bda2cae59a2a2c23..70788adfd1aa1fdfe8ef0da85794b8fe5773276e 100644 --- a/deploy/cpp/docs/linux_build.md +++ b/deploy/cpp/docs/linux_build.md @@ -106,7 +106,7 @@ make ./build/main --model_dir=/root/projects/models/yolov3_darknet --image_path=/root/projects/images/test.jpeg ``` -图片文件`可视化预测结果`会保存在当前目录下`result.jpeg`文件中。 +图片文件`可视化预测结果`会保存在当前目录下`output.jpeg`文件中。 `样例二`: @@ -114,4 +114,4 @@ make #使用 `GPU`预测视频`/root/projects/videos/test.avi` ./build/main --model_dir=/root/projects/models/yolov3_darknet --video_path=/root/projects/images/test.avi --use_gpu=1 ``` -视频文件`可视化预测结果`会保存在当前目录下`result.avi`文件中。 +视频文件`可视化预测结果`会保存在当前目录下`output.avi`文件中。 diff --git a/deploy/cpp/docs/windows_vs2019_build.md b/deploy/cpp/docs/windows_vs2019_build.md index 0f5b8691717213f15080448017bb90d8d940f920..7f23a983ce0f7f50545d5b4dd7b63746955aa209 100644 --- a/deploy/cpp/docs/windows_vs2019_build.md +++ b/deploy/cpp/docs/windows_vs2019_build.md @@ -67,6 +67,7 @@ fluid_inference | 参数名 | 含义 | | ---- | ---- | | *CUDA_LIB | CUDA的库路径 | +| CUDNN_LIB | CUDNN的库路径 | | OPENCV_DIR | OpenCV的安装路径, | | PADDLE_DIR | Paddle预测库的路径 | @@ -85,7 +86,7 @@ fluid_inference 上述`Visual Studio 2019`编译产出的可执行文件在`out\build\x64-Release`目录下,打开`cmd`,并切换到该目录: ``` -cd D:\projects\PaddleDetection\inference\out\build\x64-Release +cd D:\projects\PaddleDetection\deploy\cpp\out\build\x64-Release ``` 可执行文件`main`即为样例的预测程序,其主要的命令行参数如下: @@ -105,7 +106,7 @@ cd D:\projects\PaddleDetection\inference\out\build\x64-Release .\main --model_dir=D:\\models\\yolov3_darknet --image_path=D:\\images\\test.jpeg ``` -图片文件`可视化预测结果`会保存在当前目录下`result.jpeg`文件中。 +图片文件`可视化预测结果`会保存在当前目录下`output.jpeg`文件中。 `样例二`: @@ -114,4 +115,4 @@ cd D:\projects\PaddleDetection\inference\out\build\x64-Release .\main --model_dir=D:\\models\\yolov3_darknet --video_path=D:\\videos\\test.jpeg --use_gpu=1 ``` -视频文件`可视化预测结果`会保存在当前目录下`result.avi`文件中。 +视频文件`可视化预测结果`会保存在当前目录下`output.avi`文件中。