diff --git a/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_cn.rst b/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_cn.rst index 4787e2c8ee8d71ead611efd2e33aed1ebca7f737..de43de186d5ccb4875bf570e5749ddbad8b19b7b 100644 --- a/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_cn.rst +++ b/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_cn.rst @@ -7,15 +7,16 @@ ------------- .. csv-table:: c++预测库列表 - :header: "版本说明", "预测库(1.4版本)", "预测库(develop版本)" + :header: "版本说明", "预测库(1.5版本)", "预测库(develop版本)" :widths: 1, 3, 3 - "cpu_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cpu_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cpu_noavx_openblas","`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda8.0_cudnn5_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda8.0_cudnn7_avx_mkl","`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda9.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_noavx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda8.0_cudnn7_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda8.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda9.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda10.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" 从源码编译 @@ -99,4 +100,4 @@ version.txt 中记录了该预测库的版本信息,包括Git Commit ID、使 WITH_MKLDNN: ON WITH_GPU: ON CUDA version: 8.0 - CUDNN version: v5 + CUDNN version: v7 diff --git a/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_en.rst b/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_en.rst index d64e5b0fa6d649e539916aecee76dbd284936616..21d8c0cbe0219ff19fe8cd3ebebcbe067b13e1f7 100644 --- a/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_en.rst +++ b/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_en.rst @@ -7,15 +7,16 @@ Direct Download and Installation --------------------------------- .. csv-table:: c++ inference library list - :header: "version description", "inference library(1.4 version)", "inference library(develop version)" + :header: "version description", "inference library(1.5 version)", "inference library(develop version)" :widths: 1, 3, 3 - "cpu_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cpu_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cpu_noavx_openblas","`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda8.0_cudnn5_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda8.0_cudnn7_avx_mkl","`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda9.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_noavx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda8.0_cudnn7_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda8.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda9.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda10.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" Build from Source Code @@ -102,4 +103,4 @@ The version information of the inference library is recorded in version.txt, inc WITH_MKLDNN: ON WITH_GPU: ON CUDA version: 8.0 - CUDNN version: v5 + CUDNN version: v7