未验证 提交 fe38d799 编写于 作者: J Jason 提交者: GitHub

Merge pull request #306 from FlyingQianMM/develop_draw

update inference version
...@@ -320,46 +320,34 @@ target_link_libraries(video_segmenter ${DEPS}) ...@@ -320,46 +320,34 @@ target_link_libraries(video_segmenter ${DEPS})
if (WIN32 AND WITH_MKL) if (WIN32 AND WITH_MKL)
add_custom_command(TARGET classifier POST_BUILD add_custom_command(TARGET classifier POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./paddlex_inference/Release/mklml.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./paddlex_inference/mklml.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./paddlex_inference/Release/libiomp5md.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./paddlex_inference/libiomp5md.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./paddlex_inference/Release/mkldnn.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./paddlex_inference/mkldnn.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./release/mklml.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./release/libiomp5md.dll
) )
add_custom_command(TARGET detector POST_BUILD add_custom_command(TARGET detector POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./paddlex_inference/Release/mklml.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./paddlex_inference/mklml.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./paddlex_inference/Release/libiomp5md.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./paddlex_inference/libiomp5md.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./paddlex_inference/Release/mkldnn.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./paddlex_inference/mkldnn.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./release/mklml.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./release/libiomp5md.dll
) )
add_custom_command(TARGET segmenter POST_BUILD add_custom_command(TARGET segmenter POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./paddlex_inference/Release/mklml.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./paddlex_inference/mklml.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./paddlex_inference/Release/libiomp5md.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./paddlex_inference/libiomp5md.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./paddlex_inference/Release/mkldnn.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./paddlex_inference/mkldnn.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./release/mklml.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./release/libiomp5md.dll
) )
add_custom_command(TARGET video_classifier POST_BUILD add_custom_command(TARGET video_classifier POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./paddlex_inference/Release/mklml.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./paddlex_inference/mklml.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./paddlex_inference/Release/libiomp5md.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./paddlex_inference/libiomp5md.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./paddlex_inference/Release/mkldnn.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./paddlex_inference/mkldnn.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./release/mklml.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./release/libiomp5md.dll
) )
add_custom_command(TARGET video_detector POST_BUILD add_custom_command(TARGET video_detector POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./paddlex_inference/Release/mklml.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./paddlex_inference/mklml.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./paddlex_inference/Release/libiomp5md.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./paddlex_inference/libiomp5md.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./paddlex_inference/Release/mkldnn.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./paddlex_inference/kldnn.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./release/mklml.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./release/libiomp5md.dll
) )
add_custom_command(TARGET video_segmenter POST_BUILD add_custom_command(TARGET video_segmenter POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./paddlex_inference/Release/mklml.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./paddlex_inference/mklml.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./paddlex_inference/Release/libiomp5md.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./paddlex_inference/libiomp5md.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./paddlex_inference/Release/mkldnn.dll COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./paddlex_inference/mkldnn.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./release/mklml.dll
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./release/libiomp5md.dll
) )
# for encryption # for encryption
if (EXISTS "${ENCRYPTION_DIR}/lib/pmodel-decrypt.dll") if (EXISTS "${ENCRYPTION_DIR}/lib/pmodel-decrypt.dll")
......
...@@ -66,9 +66,14 @@ void Model::create_predictor(const std::string& model_dir, ...@@ -66,9 +66,14 @@ void Model::create_predictor(const std::string& model_dir,
if (key == "") { if (key == "") {
config.SetModel(model_file, params_file); config.SetModel(model_file, params_file);
} }
if (use_mkl && name != "HRNet" && name != "DeepLabv3p") { if (use_mkl) {
config.EnableMKLDNN(); if (name != "HRNet" && name != "DeepLabv3p" && name != "PPYOLO") {
config.SetCpuMathLibraryNumThreads(mkl_thread_num); config.EnableMKLDNN();
config.SetCpuMathLibraryNumThreads(mkl_thread_num);
} else {
std::cerr << "HRNet/DeepLabv3p/PPYOLO are not supported "
<< "for the use of mkldnn" << std::endl;
}
} }
if (use_gpu) { if (use_gpu) {
config.EnableUseGpu(100, gpu_id); config.EnableUseGpu(100, gpu_id);
...@@ -85,7 +90,7 @@ void Model::create_predictor(const std::string& model_dir, ...@@ -85,7 +90,7 @@ void Model::create_predictor(const std::string& model_dir,
#endif #endif
// enable Memory Optim // enable Memory Optim
config.EnableMemoryOptim(); config.EnableMemoryOptim();
if (use_trt) { if (use_trt && use_gpu) {
config.EnableTensorRtEngine( config.EnableTensorRtEngine(
1 << 20 /* workspace_size*/, 1 << 20 /* workspace_size*/,
32 /* max_batch_size*/, 32 /* max_batch_size*/,
...@@ -283,7 +288,7 @@ bool Model::predict(const cv::Mat& im, DetResult* result) { ...@@ -283,7 +288,7 @@ bool Model::predict(const cv::Mat& im, DetResult* result) {
im_tensor->Reshape({1, 3, h, w}); im_tensor->Reshape({1, 3, h, w});
im_tensor->copy_from_cpu(inputs_.im_data_.data()); im_tensor->copy_from_cpu(inputs_.im_data_.data());
if (name == "YOLOv3") { if (name == "YOLOv3" || name == "PPYOLO") {
auto im_size_tensor = predictor_->GetInputTensor("im_size"); auto im_size_tensor = predictor_->GetInputTensor("im_size");
im_size_tensor->Reshape({1, 2}); im_size_tensor->Reshape({1, 2});
im_size_tensor->copy_from_cpu(inputs_.ori_im_size_.data()); im_size_tensor->copy_from_cpu(inputs_.ori_im_size_.data());
...@@ -442,7 +447,7 @@ bool Model::predict(const std::vector<cv::Mat>& im_batch, ...@@ -442,7 +447,7 @@ bool Model::predict(const std::vector<cv::Mat>& im_batch,
inputs_data.begin() + i * 3 * h * w); inputs_data.begin() + i * 3 * h * w);
} }
im_tensor->copy_from_cpu(inputs_data.data()); im_tensor->copy_from_cpu(inputs_data.data());
if (name == "YOLOv3") { if (name == "YOLOv3" || name == "PPYOLO") {
auto im_size_tensor = predictor_->GetInputTensor("im_size"); auto im_size_tensor = predictor_->GetInputTensor("im_size");
im_size_tensor->Reshape({batch_size, 2}); im_size_tensor->Reshape({batch_size, 2});
std::vector<int> inputs_data_size(batch_size * 2); std::vector<int> inputs_data_size(batch_size * 2);
......
# 更新日志 # 更新日志
**v1.2.0** 2020.09.07
- 模型更新
> - 新增目标检测模型PPYOLO[详情链接](https://paddlex.readthedocs.io/zh_CN/develop/apis/models/detection.html#paddlex-det-ppyolo)
> - FasterRCNN、MaskRCNN、YOLOv3、DeepLabv3p等模型新增内置COCO数据集预训练模型
> - 目标检测模型FasterRCNN和MaskRCNN新增backbone HRNet_W18[详情链接](https://paddlex.readthedocs.io/zh_CN/develop/apis/models/detection.html#paddlex-det-fasterrcnn)
> - 语义分割模型DeepLabv3p新增backbone MobileNetV3_large_ssld[详情链接](https://paddlex.readthedocs.io/zh_CN/develop/apis/models/semantic_segmentation.html#paddlex-seg-deeplabv3p)
- 模型部署更新
> - 新增模型通过OpenVINO的部署方案[详情链接](https://paddlex.readthedocs.io/zh_CN/develop/deploy/openvino/index.html)
> - 新增模型在树莓派上的部署方案[详情链接](https://paddlex.readthedocs.io/zh_CN/develop/deploy/raspberry/index.html)
> - 优化PaddleLite Android部署的数据预处理和后处理代码性能
> - 优化Paddle服务端C++代码部署代码,增加use_mkl等参数,通过mkldnn显著提升模型在CPU上的预测性能
- 产业案例更新
> - 新增RGB图像遥感分割案例[详情链接](https://paddlex.readthedocs.io/zh_CN/develop/examples/remote_sensing.html)
> - 新增多通道遥感分割案例[详情链接](https://paddlex.readthedocs.io/zh_CN/develop/examples/multi-channel_remote_sensing/README.html)
- 其它
> - 新增数据集切分功能,支持通过命令行切分ImageNet、PascalVOC、MSCOCO和语义分割数据集[详情链接](https://paddlex.readthedocs.io/zh_CN/develop/data/format/classification.html#id2)
**v1.1.0** 2020.07.12 **v1.1.0** 2020.07.12
...@@ -13,8 +32,8 @@ ...@@ -13,8 +32,8 @@
> - 新增Jetson、Paddle Lite模型部署预测方案 > - 新增Jetson、Paddle Lite模型部署预测方案
> - C++部署代码新增batch批预测,并采用OpenMP对预处理进行并行加速 > - C++部署代码新增batch批预测,并采用OpenMP对预处理进行并行加速
- 新增2个PaddleX产业案例 - 新增2个PaddleX产业案例
> - [人像分割案例]() > - [人像分割案例](https://paddlex.readthedocs.io/zh_CN/develop/examples/human_segmentation.html)
> - [工业表计读数案例]() > - [工业表计读数案例](https://paddlex.readthedocs.io/zh_CN/develop/examples/meter_reader.html)
- 新增数据格式转换功能,LabelMe、精灵标注助手和EasyData平台标注的数据转为PaddleX支持加载的数据格式 - 新增数据格式转换功能,LabelMe、精灵标注助手和EasyData平台标注的数据转为PaddleX支持加载的数据格式
- PaddleX文档更新,优化文档结构 - PaddleX文档更新,优化文档结构
......
...@@ -11,3 +11,5 @@ ...@@ -11,3 +11,5 @@
server/index server/index
nvidia-jetson.md nvidia-jetson.md
paddlelite/index paddlelite/index
openvino/index
raspberry/index
...@@ -8,4 +8,4 @@ ...@@ -8,4 +8,4 @@
Raspberry.md Raspberry.md
python.md python.md
export_nb_model.md export_nb_model.md
\ No newline at end of file
...@@ -19,16 +19,15 @@ ...@@ -19,16 +19,15 @@
### Step2: 下载PaddlePaddle C++ 预测库 paddle_inference ### Step2: 下载PaddlePaddle C++ 预测库 paddle_inference
PaddlePaddle C++ 预测库针对不同的`CPU``CUDA`,以及是否支持TensorRT,提供了不同的预编译版本,目前PaddleX依赖于Paddle1.8版本,以下提供了多个不同版本的Paddle预测库: PaddlePaddle C++ 预测库针对不同的`CPU``CUDA`,以及是否支持TensorRT,提供了不同的预编译版本,目前PaddleX依赖于Paddle1.8.4版本,以下提供了多个不同版本的Paddle预测库:
| 版本说明 | 预测库(1.8.2版本) | | 版本说明 | 预测库(1.8.4版本) |
| ---- | ---- | | ---- | ---- |
| ubuntu14.04_cpu_avx_mkl | [paddle_inference](https://paddle-inference-lib.bj.bcebos.com/1.8.2-cpu-avx-mkl/fluid_inference.tgz) | | ubuntu14.04_cpu_avx_mkl | [paddle_inference](https://paddle-inference-lib.bj.bcebos.com/latest-cpu-avx-mkl/fluid_inference.tgz) |
| ubuntu14.04_cpu_avx_openblas | [paddle_inference](https://paddle-inference-lib.bj.bcebos.com/1.8.2-cpu-avx-openblas/fluid_inference.tgz) | | ubuntu14.04_cpu_avx_openblas | [paddle_inference](https://paddle-inference-lib.bj.bcebos.com/latest-cpu-avx-openblas/fluid_inference.tgz) |
| ubuntu14.04_cpu_noavx_openblas | [paddle_inference](https://paddle-inference-lib.bj.bcebos.com/1.8.2-cpu-noavx-openblas/fluid_inference.tgz) | | ubuntu14.04_cpu_noavx_openblas | [paddle_inference](https://paddle-inference-lib.bj.bcebos.com/latest-cpu-noavx-openblas/fluid_inference.tgz) |
| ubuntu14.04_cuda9.0_cudnn7_avx_mkl | [paddle_inference](https://paddle-inference-lib.bj.bcebos.com/1.8.2-gpu-cuda9-cudnn7-avx-mkl/fluid_inference.tgz) | | ubuntu14.04_cuda9.0_cudnn7_avx_mkl | [paddle_inference](https://paddle-inference-lib.bj.bcebos.com/latest-gpu-cuda9-cudnn7-avx-mkl/fluid_inference.tgz) |
| ubuntu14.04_cuda10.0_cudnn7_avx_mkl | [paddle_inference](https://paddle-inference-lib.bj.bcebos.com/1.8.2-gpu-cuda10-cudnn7-avx-mkl/fluid_inference.tgz ) | | ubuntu14.04_cuda10.0_cudnn7_avx_mkl | [paddle_inference](https://paddle-inference-lib.bj.bcebos.com/latest-gpu-cuda10-cudnn7-avx-mkl/fluid_inference.tgz) |
| ubuntu14.04_cuda10.1_cudnn7.6_avx_mkl_trt6 | [paddle_inference](https://paddle-inference-lib.bj.bcebos.com/1.8.2-gpu-cuda10.1-cudnn7.6-avx-mkl-trt6%2Ffluid_inference.tgz) |
更多和更新的版本,请根据实际情况下载: [C++预测库下载列表](https://www.paddlepaddle.org.cn/documentation/docs/zh/develop/advanced_guide/inference_deployment/inference/build_and_install_lib_cn.html) 更多和更新的版本,请根据实际情况下载: [C++预测库下载列表](https://www.paddlepaddle.org.cn/documentation/docs/zh/develop/advanced_guide/inference_deployment/inference/build_and_install_lib_cn.html)
...@@ -97,8 +96,8 @@ make ...@@ -97,8 +96,8 @@ make
``` ```
**注意:** linux环境下编译会自动下载OPENCV, PaddleX-Encryption和YAML,如果编译环境无法访问外网,可手动下载: **注意:** linux环境下编译会自动下载OPENCV, PaddleX-Encryption和YAML,如果编译环境无法访问外网,可手动下载:
- [opencv3gcc4.8.tar.bz2](https://paddleseg.bj.bcebos.com/deploy/docker/opencv3gcc4.8.tar.bz2) - [opencv3.4.6gcc4.8ffmpeg.tar.gz2](https://bj.bcebos.com/paddleseg/deploy/opencv3.4.6gcc4.8ffmpeg.tar.gz2)
- [paddlex-encryption.zip](https://bj.bcebos.com/paddlex/tools/paddlex-encryption.zip) - [paddlex-encryption.zip](https://bj.bcebos.com/paddlex/tools/1.2.0/paddlex-encryption.zip)
- [yaml-cpp.zip](https://bj.bcebos.com/paddlex/deploy/deps/yaml-cpp.zip) - [yaml-cpp.zip](https://bj.bcebos.com/paddlex/deploy/deps/yaml-cpp.zip)
opencv3gcc4.8.tar.bz2文件下载后解压,然后在script/build.sh中指定`OPENCE_DIR`为解压后的路径。 opencv3gcc4.8.tar.bz2文件下载后解压,然后在script/build.sh中指定`OPENCE_DIR`为解压后的路径。
......
...@@ -26,15 +26,15 @@ git clone https://github.com/PaddlePaddle/PaddleX.git ...@@ -26,15 +26,15 @@ git clone https://github.com/PaddlePaddle/PaddleX.git
### Step2: 下载PaddlePaddle C++ 预测库 paddle_inference ### Step2: 下载PaddlePaddle C++ 预测库 paddle_inference
PaddlePaddle C++ 预测库针对是否使用GPU、是否支持TensorRT、以及不同的CUDA版本提供了已经编译好的预测库,目前PaddleX依赖于Paddle 1.8,基于Paddle 1.8的Paddle预测库下载链接如下所示: PaddlePaddle C++ 预测库针对是否使用GPU、是否支持TensorRT、以及不同的CUDA版本提供了已经编译好的预测库,目前PaddleX依赖于Paddle 1.8.4,基于Paddle 1.8.4的Paddle预测库下载链接如下所示:
| 版本说明 | 预测库(1.8.2版本) | 编译器 | 构建工具| cuDNN | CUDA | | 版本说明 | 预测库(1.8.4版本) | 编译器 | 构建工具| cuDNN | CUDA |
| ---- | ---- | ---- | ---- | ---- | ---- | | ---- | ---- | ---- | ---- | ---- | ---- |
| cpu_avx_mkl | [paddle_inference](https://paddle-wheel.bj.bcebos.com/1.8.2/win-infer/mkl/cpu/fluid_inference_install_dir.zip) | MSVC 2015 update 3 | CMake v3.16.0 | | cpu_avx_mkl | [paddle_inference](https://paddle-wheel.bj.bcebos.com/1.8.4/win-infer/mkl/cpu/fluid_inference_install_dir.zip) | MSVC 2015 update 3 | CMake v3.16.0 |
| cpu_avx_openblas | [paddle_inference](https://paddle-wheel.bj.bcebos.com/1.8.2/win-infer/open/cpu/fluid_inference_install_dir.zip) | MSVC 2015 update 3 | CMake v3.16.0 | | cpu_avx_openblas | [paddle_inference](https://paddle-wheel.bj.bcebos.com/1.8.4/win-infer/open/cpu/fluid_inference_install_dir.zip) | MSVC 2015 update 3 | CMake v3.16.0 |
| cuda9.0_cudnn7_avx_mkl | [paddle_inference](https://paddle-wheel.bj.bcebos.com/1.8.2/win-infer/mkl/post97/fluid_inference_install_dir.zip) | MSVC 2015 update 3 | CMake v3.16.0 | 7.4.1 | 9.0 | | cuda9.0_cudnn7_avx_mkl | [paddle_inference](https://paddle-wheel.bj.bcebos.com/1.8.4/win-infer/mkl/post97/fluid_inference_install_dir.zip) | MSVC 2015 update 3 | CMake v3.16.0 | 7.4.1 | 9.0 |
| cuda9.0_cudnn7_avx_openblas | [paddle_inference](https://paddle-wheel.bj.bcebos.com/1.8.2/win-infer/open/post97/fluid_inference_install_dir.zip) | MSVC 2015 update 3 | CMake v3.16.0 | 7.4.1 | 9.0 | | cuda9.0_cudnn7_avx_openblas | [paddle_inference](https://paddle-wheel.bj.bcebos.com/1.8.4/win-infer/open/post97/fluid_inference_install_dir.zip) | MSVC 2015 update 3 | CMake v3.16.0 | 7.4.1 | 9.0 |
| cuda10.0_cudnn7_avx_mkl | [paddle_inference](https://paddle-wheel.bj.bcebos.com/1.8.2/win-infer/mkl/post107/fluid_inference_install_dir.zip) | MSVC 2015 update 3 | CMake v3.16.0 | 7.5.0 | 10.0 | | cuda10.0_cudnn7_avx_mkl | [paddle_inference](https://paddle-wheel.bj.bcebos.com/1.8.4/win-infer/mkl/post107/fluid_inference_install_dir.zip) | MSVC 2015 update 3 | CMake v3.16.0 | 7.5.0 | 10.0 |
请根据实际情况选择下载,如若以上版本不满足您的需求,请至[C++预测库下载列表](https://www.paddlepaddle.org.cn/documentation/docs/zh/develop/advanced_guide/inference_deployment/inference/windows_cpp_inference.html)选择符合的版本。 请根据实际情况选择下载,如若以上版本不满足您的需求,请至[C++预测库下载列表](https://www.paddlepaddle.org.cn/documentation/docs/zh/develop/advanced_guide/inference_deployment/inference/windows_cpp_inference.html)选择符合的版本。
...@@ -82,7 +82,7 @@ PaddlePaddle C++ 预测库针对是否使用GPU、是否支持TensorRT、以及 ...@@ -82,7 +82,7 @@ PaddlePaddle C++ 预测库针对是否使用GPU、是否支持TensorRT、以及
1. 如果使用`CPU`版预测库,请把`WITH_GPU`的``去掉勾 1. 如果使用`CPU`版预测库,请把`WITH_GPU`的``去掉勾
2. 如果使用的是`openblas`版本,请把`WITH_MKL`的``去掉勾 2. 如果使用的是`openblas`版本,请把`WITH_MKL`的``去掉勾
3. Windows环境下编译会自动下载YAML,如果编译环境无法访问外网,可手动下载: [yaml-cpp.zip](https://bj.bcebos.com/paddlex/deploy/deps/yaml-cpp.zip)。YAML文件下载后无需解压,在`cmake/yaml.cmake`中将`URL https://bj.bcebos.com/paddlex/deploy/deps/yaml-cpp.zip` 中的网址,改为下载文件的路径。 3. Windows环境下编译会自动下载YAML,如果编译环境无法访问外网,可手动下载: [yaml-cpp.zip](https://bj.bcebos.com/paddlex/deploy/deps/yaml-cpp.zip)。YAML文件下载后无需解压,在`cmake/yaml.cmake`中将`URL https://bj.bcebos.com/paddlex/deploy/deps/yaml-cpp.zip` 中的网址,改为下载文件的路径。
4. 如果需要使用模型加密功能,需要手动下载[Windows预测模型加密工具](https://bj.bcebos.com/paddlex/tools/win/paddlex-encryption.zip)。例如解压到`D:/projects`,解压后目录为`D:/projects/paddlex-encryption`。编译时需勾选`WITH_EBNCRYPTION`并且在`ENCRTYPTION_DIR`填入`D:/projects/paddlex-encryption`。 4. 如果需要使用模型加密功能,需要手动下载[Windows预测模型加密工具](https://bj.bcebos.com/paddlex/tools/win/1.2.0/paddlex-encryption.zip)。例如解压到`D:/projects`,解压后目录为`D:/projects/paddlex-encryption`。编译时需勾选`WITH_EBNCRYPTION`并且在`ENCRTYPTION_DIR`填入`D:/projects/paddlex-encryption`。
![](../../images/vs2019_step_encryption.png) ![](../../images/vs2019_step_encryption.png)
![](../../images/vs2019_step6.png) ![](../../images/vs2019_step6.png)
**设置完成后**, 点击上图中`保存并生成CMake缓存以加载变量`。 **设置完成后**, 点击上图中`保存并生成CMake缓存以加载变量`。
......
...@@ -40,9 +40,9 @@ PaddleX提供一个轻量级的模型加密部署方案,通过PaddleX内置的 ...@@ -40,9 +40,9 @@ PaddleX提供一个轻量级的模型加密部署方案,通过PaddleX内置的
### 1.2 加密工具 ### 1.2 加密工具
[Linux版本 PaddleX模型加密工具](https://bj.bcebos.com/paddlex/tools/paddlex-encryption.zip),编译脚本会自动下载该版本加密工具,您也可以选择手动下载。 [Linux版本 PaddleX模型加密工具](https://bj.bcebos.com/paddlex/tools/1.2.0/paddlex-encryption.zip),编译脚本会自动下载该版本加密工具,您也可以选择手动下载。
[Windows版本 PaddleX模型加密工具](https://bj.bcebos.com/paddlex/tools/win/paddlex-encryption.zip),该版本加密工具需手动下载,如果您在使用Visual Studio 2019编译C++预测代码的过程中已经下载过该工具,此处可不必重复下载。 [Windows版本 PaddleX模型加密工具](https://bj.bcebos.com/paddlex/tools/win/1.2.0/paddlex-encryption.zip),该版本加密工具需手动下载,如果您在使用Visual Studio 2019编译C++预测代码的过程中已经下载过该工具,此处可不必重复下载。
Linux加密工具包含内容为: Linux加密工具包含内容为:
``` ```
......
...@@ -23,6 +23,7 @@ from paddlex.cv.transforms import build_transforms ...@@ -23,6 +23,7 @@ from paddlex.cv.transforms import build_transforms
from paddlex.cv.models import BaseClassifier from paddlex.cv.models import BaseClassifier
from paddlex.cv.models import PPYOLO, FasterRCNN, MaskRCNN from paddlex.cv.models import PPYOLO, FasterRCNN, MaskRCNN
from paddlex.cv.models import DeepLabv3p from paddlex.cv.models import DeepLabv3p
import paddlex.utils.logging as logging
class Predictor: class Predictor:
...@@ -108,9 +109,13 @@ class Predictor: ...@@ -108,9 +109,13 @@ class Predictor:
else: else:
config.disable_gpu() config.disable_gpu()
if use_mkl: if use_mkl:
if self.model_name not in ["HRNet", "DeepLabv3p"]: if self.model_name not in ["HRNet", "DeepLabv3p", "PPYOLO"]:
config.enable_mkldnn() config.enable_mkldnn()
config.set_cpu_math_library_num_threads(mkl_thread_num) config.set_cpu_math_library_num_threads(mkl_thread_num)
else:
logging.warning(
"HRNet/DeepLabv3p/PPYOLO are not supported for the use of mkldnn\n"
)
if use_glog: if use_glog:
config.enable_glog_info() config.enable_glog_info()
else: else:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册