diff --git a/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_cn.rst b/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_cn.rst index 1603a2e97661c68b94fce525e0e20409b9b8f43d..1870cc5c661088800b10d22bd91f8c8cd9ac54c4 100644 --- a/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_cn.rst +++ b/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_cn.rst @@ -7,17 +7,18 @@ ------------- .. csv-table:: - :header: "版本说明", "预测库(1.6.1版本)", "预测库(develop版本)" + :header: "版本说明", "预测库(1.6.2版本)", "预测库(develop版本)" :widths: 3, 2, 2 - "ubuntu14.04_cpu_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "ubuntu14.04_cpu_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "ubuntu14.04_cpu_noavx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "ubuntu14.04_cuda9.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "ubuntu14.04_cuda10.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "ubuntu14.04_cuda8.0_cudnn7_avx_mkl_trt4", "`fluid_inference.tgz `_", - "ubuntu14.04_cuda9.0_cudnn7_avx_mkl_trt5", "`fluid_inference.tgz `_", - "ubuntu14.04_cuda10.0_cudnn7_avx_mkl_trt5", "`fluid_inference.tgz `_", + "ubuntu14.04_cpu_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "ubuntu14.04_cpu_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "ubuntu14.04_cpu_noavx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "ubuntu14.04_cuda9.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "ubuntu14.04_cuda10.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "ubuntu14.04_cuda8.0_cudnn7_avx_mkl_trt4", "`fluid_inference.tgz `_", + "ubuntu14.04_cuda9.0_cudnn7_avx_mkl_trt5", "`fluid_inference.tgz `_", + "ubuntu14.04_cuda10.0_cudnn7_avx_mkl_trt5", "`fluid_inference.tgz `_", + "nv-jetson-cuda10-cudnn7.5-trt5", "`fluid_inference.tar.gz `_", **Note:所提供的C++预测库均使用GCC 4.8编译。** diff --git a/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_en.rst b/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_en.rst index 6f25f975db73299bc048768bae7a27861e9abcf6..474ebddd777265bbe3758570b7d56c00bdaa82fc 100644 --- a/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_en.rst +++ b/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_en.rst @@ -7,15 +7,15 @@ Direct Download and Installation --------------------------------- .. csv-table:: c++ inference library list - :header: "version description", "inference library(1.6.1 version)", "inference library(develop version)" + :header: "version description", "inference library(1.6.2 version)", "inference library(develop version)" :widths: 1, 3, 3 - "cpu_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cpu_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cpu_noavx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda9.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda10.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - + "cpu_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_noavx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda9.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda10.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "nv-jetson-cuda10-cudnn7.5-trt5", "`fluid_inference.tar.gz `_", Build from Source Code -----------------------