From 1662e6630c66fb73a367abb3eaa36a37378850b8 Mon Sep 17 00:00:00 2001 From: zhouwei25 <52485244+zhouwei25@users.noreply.github.com> Date: Sun, 27 Oct 2019 20:31:42 +0800 Subject: [PATCH] fix inference lib package on windows (#1555) fix inference lib package on windows --- .../deploy/inference/windows_cpp_inference.md | 10 +++--- .../inference/windows_cpp_inference_en.md | 31 +++++++++---------- 2 files changed, 20 insertions(+), 21 deletions(-) diff --git a/doc/fluid/advanced_usage/deploy/inference/windows_cpp_inference.md b/doc/fluid/advanced_usage/deploy/inference/windows_cpp_inference.md index 5d9039611..4def40bf6 100755 --- a/doc/fluid/advanced_usage/deploy/inference/windows_cpp_inference.md +++ b/doc/fluid/advanced_usage/deploy/inference/windows_cpp_inference.md @@ -5,13 +5,13 @@ 直接下载安装 ------------- - | 版本说明 | 预测库(1.6.0版本) | |:---------|:-------------------| -| cpu_avx_mkl | [fluid_inference.zip](https://paddle-inference-lib.bj.bcebos.com/1.5.2-win/cpu_mkl_avx/fluid_inference_install_dir.zip) | -| cuda8.0_cudnn7_avx_mkl | [fluid_inference.zip](https://paddle-inference-lib.bj.bcebos.com/1.5.2-win/gpu_mkl_avx_8.0/fluid_inference_install_dir.zip) | -| cuda9.0_cudnn7_avx_mkl | [fluid_inference.zip](https://paddle-inference-lib.bj.bcebos.com/1.5.2-win/gpu_mkl_avx_9.0/fluid_inference_install_dir.zip) | -| cuda10.0_cudnn7_avx_mkl | [fluid_inference.zip](https://paddle-inference-lib.bj.bcebos.com/1.5.2-win/gpu_mkl_avx_10.0/fluid_inference_install_dir.zip) | +| cpu_avx_mkl | [fluid_inference.zip](https://paddle-wheel.bj.bcebos.com/1.6.0/win-infer/mkl/cpu/fluid_inference_install_dir.zip) | +| cpu_avx_openblas | [fluid_inference.zip](https://paddle-wheel.bj.bcebos.com/1.6.0/win-infer/open/cpu/fluid_inference_install_dir.zip) | +| cuda9.0_cudnn7_avx_openblas | [fluid_inference.zip](https://paddle-wheel.bj.bcebos.com/1.6.0/win-infer/open/post97/fluid_inference_install_dir.zip) | +| cuda9.0_cudnn7_avx_mkl | [fluid_inference.zip](https://paddle-wheel.bj.bcebos.com/1.6.0/win-infer/mkl/post97/fluid_inference_install_dir.zip) | +| cuda10.0_cudnn7_avx_mkl | [fluid_inference.zip](https://paddle-wheel.bj.bcebos.com/1.6.0/win-infer/mkl/post107/fluid_inference_install_dir.zip) | 从源码编译预测库 -------------- diff --git a/doc/fluid/advanced_usage/deploy/inference/windows_cpp_inference_en.md b/doc/fluid/advanced_usage/deploy/inference/windows_cpp_inference_en.md index 6ec1d03ee..b47edf2b7 100755 --- a/doc/fluid/advanced_usage/deploy/inference/windows_cpp_inference_en.md +++ b/doc/fluid/advanced_usage/deploy/inference/windows_cpp_inference_en.md @@ -1,31 +1,30 @@ -Model Inference on Windows + +Install and Compile C++ Inference Library on Windows =========================== -Pre-Built Inference Libraries +Direct Download and Install ------------- -| Version | Inference Libraries(v1.6.0) | +| Version | Inference Libraries(v1.6.0) | |:---------|:-------------------| -| cpu_avx_mkl | [fluid_inference.zip](https://paddle-inference-lib.bj.bcebos.com/1.5.1-win/cpu_mkl_avx/fluid_inference_install_dir.zip) | -| cpu_avx_openblas | [fluid_inference.zip](https://paddle-inference-lib.bj.bcebos.com/1.5.1-win/cpu_open_avx/fluid_inference_install_dir.zip) | -| cuda8.0_cudnn7_avx_mkl | [fluid_inference.zip](https://paddle-inference-lib.bj.bcebos.com/1.5.1-win/gpu_mkl_avx_8.0/fluid_inference_install_dir.zip) | -| cuda8.0_cudnn7_avx_openblas | [fluid_inference.zip](https://paddle-inference-lib.bj.bcebos.com/1.5.1-win/gpu_open_avx_8.0/fluid_inference_install_dir.zip)| -| cuda9.0_cudnn7_avx_mkl | [fluid_inference.zip](https://paddle-inference-lib.bj.bcebos.com/1.5.1-win/gpu_mkl_avx_9.0/fluid_inference_install_dir.zip) | -| cuda9.0_cudnn7_avx_openblas | [fluid_inference.zip](https://paddle-inference-lib.bj.bcebos.com/1.5.1-win/gpu_open_avx_9.0/fluid_inference_install_dir.zip) | - +| cpu_avx_mkl | [fluid_inference.zip](https://paddle-wheel.bj.bcebos.com/1.6.0/win-infer/mkl/cpu/fluid_inference_install_dir.zip) | +| cpu_avx_openblas | [fluid_inference.zip](https://paddle-wheel.bj.bcebos.com/1.6.0/win-infer/open/cpu/fluid_inference_install_dir.zip) | +| cuda9.0_cudnn7_avx_openblas | [fluid_inference.zip](https://paddle-wheel.bj.bcebos.com/1.6.0/win-infer/open/post97/fluid_inference_install_dir.zip) | +| cuda9.0_cudnn7_avx_mkl | [fluid_inference.zip](https://paddle-wheel.bj.bcebos.com/1.6.0/win-infer/mkl/post97/fluid_inference_install_dir.zip) | +| cuda10.0_cudnn7_avx_mkl | [fluid_inference.zip](https://paddle-wheel.bj.bcebos.com/1.6.0/win-infer/mkl/post107/fluid_inference_install_dir.zip) | Build From Source Code -------------- -Important Compilation Flags: -|Option | Value | +Users can also compile C++ inference libraries from the PaddlePaddle core code by specifying the following compile options at compile time: + +|Option | Value | |:-------------|:-------------------| |CMAKE_BUILD_TYPE | Release | -|ON_INFER | ON (recommended) | -|WITH_GPU | ON/OFF | +|ON_INFER | ON(recommended) | +|WITH_GPU | ON/OFF | |WITH_MKL | ON/OFF | -|WITH_PYTHON | OFF | - +|WITH_PYTHON | OFF | **Paddle Windows Inference Library Compilation Steps** -- GitLab