diff --git a/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_cn.rst b/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_cn.rst index 4787e2c8ee8d71ead611efd2e33aed1ebca7f737..2e3db18b1a6b59d93aba3cb9d7494432a9c978ad 100644 --- a/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_cn.rst +++ b/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_cn.rst @@ -10,12 +10,12 @@ :header: "版本说明", "预测库(1.4版本)", "预测库(develop版本)" :widths: 1, 3, 3 - "cpu_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cpu_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cpu_noavx_openblas","`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda8.0_cudnn5_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda8.0_cudnn7_avx_mkl","`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda9.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_noavx_openblas","`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda8.0_cudnn5_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda8.0_cudnn7_avx_mkl","`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda9.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" 从源码编译 diff --git a/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_en.rst b/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_en.rst index d64e5b0fa6d649e539916aecee76dbd284936616..883a96dfade606159616561d1a9f3a385917447f 100644 --- a/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_en.rst +++ b/doc/fluid/advanced_usage/deploy/inference/build_and_install_lib_en.rst @@ -10,12 +10,12 @@ Direct Download and Installation :header: "version description", "inference library(1.4 version)", "inference library(develop version)" :widths: 1, 3, 3 - "cpu_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cpu_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cpu_noavx_openblas","`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda8.0_cudnn5_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda8.0_cudnn7_avx_mkl","`fluid_inference.tgz `_", "`fluid_inference.tgz `_" - "cuda9.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_avx_openblas", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cpu_noavx_openblas","`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda8.0_cudnn5_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda8.0_cudnn7_avx_mkl","`fluid_inference.tgz `_", "`fluid_inference.tgz `_" + "cuda9.0_cudnn7_avx_mkl", "`fluid_inference.tgz `_", "`fluid_inference.tgz `_" Build from Source Code