From b4f078aa9b83e7f95f6f21d0cbdae06237ee4a93 Mon Sep 17 00:00:00 2001 From: Thomas Young Date: Fri, 11 Dec 2020 18:03:43 +0800 Subject: [PATCH] fix the compile and the dockerfile document --- doc/COMPILE.md | 16 ++++++++++++++-- doc/COMPILE_CN.md | 13 ++++++++++++- doc/DOCKER_IMAGES.md | 1 + doc/DOCKER_IMAGES_CN.md | 1 + 4 files changed, 28 insertions(+), 3 deletions(-) diff --git a/doc/COMPILE.md b/doc/COMPILE.md index cf0bfdf2..fe8325a7 100644 --- a/doc/COMPILE.md +++ b/doc/COMPILE.md @@ -100,14 +100,21 @@ make -j10 you can execute `make install` to put targets under directory `./output`, you need to add`-DCMAKE_INSTALL_PREFIX=./output`to specify output path to cmake command shown above. ### Integrated GPU version paddle inference library - +### CUDA_PATH is the cuda install path,use the command(whereis cuda) to check,it should be /usr/local/cuda. +### CUDNN_LIBRARY && CUDA_CUDART_LIBRARY is the lib path, it should be /usr/local/cuda/lib64/ + ``` shell +export CUDA_PATH='/usr/local' +export CUDNN_LIBRARY='/usr/local/cuda/lib64/' +export CUDA_CUDART_LIBRARY="/usr/local/cuda/lib64/" + mkdir server-build-gpu && cd server-build-gpu cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \ -DPYTHON_LIBRARIES=$PYTHONROOT/lib/libpython2.7.so \ -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \ -DCUDA_TOOLKIT_ROOT_DIR=${CUDA_PATH} \ - -DCUDNN_LIBRARY=${CUDNN_LIBRARY} \ + -DCUDNN_LIBRARY=${CUDNN_LIBRARY} \ + -DCUDA_CUDART_LIBRARY=${CUDA_CUDART_LIBRARY} \ -DSERVER=ON \ -DWITH_GPU=ON .. make -j10 @@ -116,6 +123,10 @@ make -j10 ### Integrated TRT version paddle inference library ``` +export CUDA_PATH='/usr/local' +export CUDNN_LIBRARY='/usr/local/cuda/lib64/' +export CUDA_CUDART_LIBRARY="/usr/local/cuda/lib64/" + mkdir server-build-trt && cd server-build-trt cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \ -DPYTHON_LIBRARIES=$PYTHONROOT/lib/libpython2.7.so \ @@ -123,6 +134,7 @@ cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \ -DTENSORRT_ROOT=${TENSORRT_LIBRARY_PATH} \ -DCUDA_TOOLKIT_ROOT_DIR=${CUDA_PATH} \ -DCUDNN_LIBRARY=${CUDNN_LIBRARY} \ + -DCUDA_CUDART_LIBRARY=${CUDA_CUDART_LIBRARY} \ -DSERVER=ON \ -DWITH_GPU=ON \ -DWITH_TRT=ON .. diff --git a/doc/COMPILE_CN.md b/doc/COMPILE_CN.md index b3619d9a..3462191c 100644 --- a/doc/COMPILE_CN.md +++ b/doc/COMPILE_CN.md @@ -97,14 +97,20 @@ make -j10 可以执行`make install`把目标产出放在`./output`目录下,cmake阶段需添加`-DCMAKE_INSTALL_PREFIX=./output`选项来指定存放路径。 ### 集成GPU版本Paddle Inference Library - +### CUDA_PATH是cuda的安装路径,可以使用命令行whereis cuda命令确认你的cuda安装路径,通常应该是/usr/local/cuda +### CUDNN_LIBRARY CUDA_CUDART_LIBRARY 是cuda库文件的路径,通常应该是/usr/local/cuda/lib64/ ``` shell +export CUDA_PATH='/usr/local' +export CUDNN_LIBRARY='/usr/local/cuda/lib64/' +export CUDA_CUDART_LIBRARY="/usr/local/cuda/lib64/" + mkdir server-build-gpu && cd server-build-gpu cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \ -DPYTHON_LIBRARIES=$PYTHONROOT/lib/libpython2.7.so \ -DPYTHON_EXECUTABLE=$PYTHONROOT/bin/python \ -DCUDA_TOOLKIT_ROOT_DIR=${CUDA_PATH} \ -DCUDNN_LIBRARY=${CUDNN_LIBRARY} \ + -DCUDA_CUDART_LIBRARY=${CUDA_CUDART_LIBRARY} \ -DSERVER=ON \ -DWITH_GPU=ON .. make -j10 @@ -113,6 +119,10 @@ make -j10 ### 集成TensorRT版本Paddle Inference Library ``` +export CUDA_PATH='/usr/local' +export CUDNN_LIBRARY='/usr/local/cuda/lib64/' +export CUDA_CUDART_LIBRARY="/usr/local/cuda/lib64/" + mkdir server-build-trt && cd server-build-trt cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \ -DPYTHON_LIBRARIES=$PYTHONROOT/lib/libpython2.7.so \ @@ -120,6 +130,7 @@ cmake -DPYTHON_INCLUDE_DIR=$PYTHONROOT/include/python2.7/ \ -DTENSORRT_ROOT=${TENSORRT_LIBRARY_PATH} \ -DCUDA_TOOLKIT_ROOT_DIR=${CUDA_PATH} \ -DCUDNN_LIBRARY=${CUDNN_LIBRARY} \ + -DCUDA_CUDART_LIBRARY=${CUDA_CUDART_LIBRARY} \ -DSERVER=ON \ -DWITH_GPU=ON \ -DWITH_TRT=ON .. diff --git a/doc/DOCKER_IMAGES.md b/doc/DOCKER_IMAGES.md index dcaa34b1..a66e91c1 100644 --- a/doc/DOCKER_IMAGES.md +++ b/doc/DOCKER_IMAGES.md @@ -28,6 +28,7 @@ You can get images in two ways: ## Image description Runtime images cannot be used for compilation. +If you need secondary development based on source code, use the version with the suffix - devel. | Description | OS | TAG | Dockerfile | | :----------------------------------------------------------: | :-----: | :--------------------------: | :----------------------------------------------------------: | diff --git a/doc/DOCKER_IMAGES_CN.md b/doc/DOCKER_IMAGES_CN.md index 6865eb77..30aca584 100644 --- a/doc/DOCKER_IMAGES_CN.md +++ b/doc/DOCKER_IMAGES_CN.md @@ -28,6 +28,7 @@ ## 镜像说明 运行时镜像不能用于开发编译。 +若需要基于源代码二次开发编译,请使用后缀为-devel的版本。 | 镜像说明 | 操作系统 | TAG | Dockerfile | | -------------------------------------------------- | -------- | ---------------------------- | ------------------------------------------------------------ | -- GitLab