diff --git a/CMakeLists.txt b/CMakeLists.txt index 7d61ce76fd9ddd2c70fced654efd08b02ce94b52..9761750b2715931e8be2dca5baf860034db0fb1d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -61,6 +61,7 @@ option(PADDLE_ON_INFERENCE "Compile for encryption" option(WITH_OPENCV "Compile Paddle Serving with OPENCV" OFF) option(WITH_ROCM "Compile Paddle Serving with ROCM" OFF) option(WITH_ASCEND_CL "Compile PaddlePaddle with ASCEND CL" OFF) +option(WITH_JETSON "Compile PaddlePaddle with JETSON" OFF) if(NOT DEFINED VERSION_TAG) set(VERSION_TAG "0.0.0") diff --git a/README.md b/README.md index dcc5ef7903c9f32f4a4c4bfd101000fdb03f9b68..9f1c60178da79aa2b8b331b2c69f4355ba36a4e3 100755 --- a/README.md +++ b/README.md @@ -64,7 +64,7 @@ This chapter guides you through the installation and deployment steps. It is str - [Deploy Paddle Serving with Security gateway(Chinese)](doc/Serving_Auth_Docker_CN.md) - Deploy on more hardwares[[ARM CPU、百度昆仑](doc/Run_On_XPU_EN.md)、[华为昇腾](doc/Run_On_NPU_CN.md)、[海光DCU](doc/Run_On_DCU_CN.md)、[Jetson](doc/Run_On_JETSON_CN.md)] - [Docker Images](doc/Docker_Images_EN.md) -- [Latest Wheel packages](doc/Latest_Packages_CN.md) +- [Download Wheel packages](doc/Latest_Packages_EN.md) > Use @@ -90,6 +90,8 @@ The first step is to call the model save interface to generate a model parameter - [Analyze and optimize performance](doc/Python_Pipeline/Performance_Tuning_EN.md) - [TensorRT dynamic Shape](doc/TensorRT_Dynamic_Shape_EN.md) - [Benchmark(Chinese)](doc/Python_Pipeline/Benchmark_CN.md) + - Our Paper: [JiZhi: A Fast and Cost-Effective Model-As-A-Service System for +Web-Scale Online Inference at Baidu](https://arxiv.org/pdf/2106.01674.pdf) - Client SDK - [Python SDK(Chinese)](doc/C++_Serving/Introduction_CN.md#42-多语言多协议Client) - [JAVA SDK](doc/Java_SDK_EN.md) @@ -108,13 +110,13 @@ For Paddle Serving developers, we provide extended documents such as custom OP,

Model Zoo

-Paddle Serving works closely with the Paddle model suite, and implements a large number of service deployment examples, including image classification, object detection, language and text recognition, Chinese part of speech, sentiment analysis, content recommendation and other types of examples, for a total of 45 models. +Paddle Serving works closely with the Paddle model suite, and implements a large number of service deployment examples, including image classification, object detection, language and text recognition, Chinese part of speech, sentiment analysis, content recommendation and other types of examples, for a total of 46 models.

-| PaddleOCR | PaddleDetection | PaddleClas | PaddleSeg | PaddleRec | Paddle NLP | -| :----: | :----: | :----: | :----: | :----: | :----: | -| 8 | 12 | 14 | 2 | 3 | 6 | +| PaddleOCR | PaddleDetection | PaddleClas | PaddleSeg | PaddleRec | Paddle NLP | Paddle Video | +| :----: | :----: | :----: | :----: | :----: | :----: | :----: | +| 8 | 12 | 14 | 2 | 3 | 6 | 1|

diff --git a/README_CN.md b/README_CN.md index e5ea1976d24b8b8f2882347c86e8b1e1a074b01a..65bf8b4686318c68103a00f8e32dd98169cddbb1 100755 --- a/README_CN.md +++ b/README_CN.md @@ -60,7 +60,7 @@ Paddle Serving依托深度学习框架PaddlePaddle旨在帮助深度学习开发 - [部署Paddle Serving安全网关](doc/Serving_Auth_Docker_CN.md) - 异构硬件部署[[ARM CPU、百度昆仑](doc/Run_On_XPU_CN.md)、[华为昇腾](doc/Run_On_NPU_CN.md)、[海光DCU](doc/Run_On_DCU_CN.md)、[Jetson](doc/Run_On_JETSON_CN.md)] - [Docker镜像](doc/Docker_Images_CN.md) -- [最新Wheel开发包(English)](doc/Latest_Packages_CN.md) +- [下载Wheel包](doc/Latest_Packages_CN.md) > 使用 @@ -104,9 +104,9 @@ Paddle Serving与Paddle模型套件紧密配合,实现大量服务化部署,

-| PaddleOCR | PaddleDetection | PaddleClas | PaddleSeg | PaddleRec | Paddle NLP | -| :----: | :----: | :----: | :----: | :----: | :----: | -| 8 | 12 | 14 | 2 | 3 | 6 | +| PaddleOCR | PaddleDetection | PaddleClas | PaddleSeg | PaddleRec | Paddle NLP | Paddle Video | +| :----: | :----: | :----: | :----: | :----: | :----: | :----: | +| 8 | 12 | 14 | 2 | 3 | 6 | 1 |

diff --git a/cmake/paddlepaddle.cmake b/cmake/paddlepaddle.cmake index 0774db92b5a9132ef22bea8271bbfa0eb8ab57b5..ad8df0f1844e902a632f7c7df390ec3c2a783345 100644 --- a/cmake/paddlepaddle.cmake +++ b/cmake/paddlepaddle.cmake @@ -94,6 +94,9 @@ else() SET(PADDLE_LIB_VERSION "${PADDLE_VERSION}/cxx_c/Linux/CPU/gcc8.2_openblas") endif() endif() +if(WITH_JETSON) + SET(PADDLE_LIB_VERSION "${PADDLE_VERSION}/cxx_c/Jetson/jetpack4.6_gcc7.5/all") +endif() if(WITH_LITE) if (WITH_XPU) @@ -103,6 +106,8 @@ if(WITH_LITE) endif() elseif(WITH_ASCEND_CL) SET(PADDLE_LIB_PATH "http://paddle-serving.bj.bcebos.com/inferlib/${PADDLE_LIB_VERSION}/paddle_inference.tgz ") +elseif(WITH_JETSON) + SET(PADDLE_LIB_PATH "http://paddle-inference-lib.bj.bcebos.com/${PADDLE_LIB_VERSION}/paddle_inference_install_dir.tgz") else() SET(PADDLE_LIB_PATH "http://paddle-inference-lib.bj.bcebos.com/${PADDLE_LIB_VERSION}/paddle_inference.tgz") endif() diff --git a/core/general-server/CMakeLists.txt b/core/general-server/CMakeLists.txt index 4dbb066ee49fa6ec971c83286197117774e14eb9..9b78ec4c0d8abd9676684d8c7271db75f35fa571 100644 --- a/core/general-server/CMakeLists.txt +++ b/core/general-server/CMakeLists.txt @@ -42,7 +42,9 @@ if(WITH_GPU) endif() if(WITH_MKL OR WITH_GPU) - if (WITH_TRT) + if (WITH_JETSON) + target_link_libraries(serving openblas -lpthread -lcrypto -lm -lrt -lssl -ldl -lz -lbz2) + elseif (WITH_TRT) target_link_libraries(serving -liomp5 -lmklml_intel -lpthread -lcrypto -lm -lrt -lssl -ldl -lz -lbz2 -ldnnl) else() target_link_libraries(serving -liomp5 -lmklml_intel -lmkldnn -lpthread -lcrypto -lm -lrt -lssl -ldl -lz -lbz2) diff --git a/doc/Docker_Images_CN.md b/doc/Docker_Images_CN.md index ad49ecd5e39e7333942ad280667518e490b4271b..97293b3b3b6b82bcd3252f2cd2c4fbbcd73589d3 100644 --- a/doc/Docker_Images_CN.md +++ b/doc/Docker_Images_CN.md @@ -26,7 +26,7 @@ ## 镜像说明 若需要基于源代码二次开发编译,请使用后缀为-devel的版本。 -**在TAG列,0.7.0也可以替换成对应的版本号,例如0.5.0/0.4.1等,但需要注意的是,部分开发环境随着某个版本迭代才增加,因此并非所有环境都有对应的版本号可以使用。** +**在TAG列,0.8.0也可以替换成对应的版本号,例如0.5.0/0.4.1等,但需要注意的是,部分开发环境随着某个版本迭代才增加,因此并非所有环境都有对应的版本号可以使用。** | 镜像选择 | 操作系统 | TAG | Dockerfile | diff --git a/doc/FAQ_CN.md b/doc/FAQ_CN.md index 7713f0eef92ed0b1bc07e7e1c8a7a9121df70a36..ee3c30428601a7d54858b9d334fea875f51c1487 100644 --- a/doc/FAQ_CN.md +++ b/doc/FAQ_CN.md @@ -8,6 +8,8 @@ Failed to predict: (data_id=1 log_id=0) [det|0] Failed to postprocess: postproce ``` **A:** 在服务端程序(例如 web_service.py)的postprocess函数定义中增加参数data_id,改为 def postprocess(self, input_dicts, fetch_dict, **data_id**, log_id) 即可。 +*** + ## 基础知识 #### Q: Paddle Serving 、Paddle Inference、PaddleHub Serving三者的区别及联系? @@ -40,6 +42,8 @@ Failed to predict: (data_id=1 log_id=0) [det|0] Failed to postprocess: postproce **A:** http rpc +*** + ## 安装问题 #### Q: pip install安装whl包过程,报错信息如下: @@ -119,6 +123,7 @@ pip install shapely==1.7.0 方法2: pip install -r python/requirements.txt ``` +*** ## 编译问题 @@ -144,8 +149,16 @@ make: *** [all] Error 2 **A:** 运行命令安装libbz2: apt install libbz2-dev +*** ## 环境问题 +#### Q: ImportError: dlopen: cannot load any more object with static TLS + +**A:** 一般是用户使用Linux系统版本比较低或者Python使用的gcc版本比较低导致的,可使用以下命令检查,或者通过使用Serving或Paddle镜像安装 +``` +strings /lib/libc.so | grep GLIBC +``` + #### Q:使用过程中出现CXXABI错误。 这个问题出现的原因是Python使用的gcc版本和Serving所需的gcc版本对不上。对于Docker用户,推荐使用[Docker容器](https://github.com/PaddlePaddle/Serving/blob/develop/doc/Docker_Images_CN.md),由于Docker容器内的Python版本与Serving在发布前都做过适配,这样就不会出现类似的错误。如果是其他开发环境,首先需要确保开发环境中具备GCC 8.2,如果没有gcc 8.2,参考安装方式 @@ -208,6 +221,24 @@ wget https://paddle-serving.bj.bcebos.com/others/centos_ssl.tar && \ (3) Cuda10.1及更高版本需要TensorRT。安装TensorRT相关文件的脚本参考 [install_trt.sh](../tools/dockerfiles/build_scripts/install_trt.sh). +*** + +## 模型参数保存问题 + +#### Q: 找不到'_remove_training_info'属性,详细报错信息如下: +``` +python3 -m paddle_serving_client.convert --dirname ./ch_PP-OCRv2_det_infer/ \ + --model_filename inference.pdmodel \ + --params_filename inference.pdiparams \ + --serving_server ./ppocrv2_det_serving/ \ + --serving_client ./ppocrv2_det_client/ + AttributeError: 'Program' object has no attribute '_remove_training_info' +``` + +**A:** Paddle版本低,升级Paddle版本到2.2.x及以上 + +*** + ## 部署问题 #### Q: GPU环境运行Serving报错,GPU count is: 0。 @@ -251,6 +282,8 @@ InvalidArgumentError: Device id must be less than GPU count, but received id is: #### Q: Docker中启动server IP地址 127.0.0.1 与 0.0.0.0 差异 **A:** 您必须将容器的主进程设置为绑定到特殊的 0.0.0.0 “所有接口”地址,否则它将无法从容器外部访问。在Docker中 127.0.0.1 代表“这个容器”,而不是“这台机器”。如果您从容器建立到 127.0.0.1 的出站连接,它将返回到同一个容器;如果您将服务器绑定到 127.0.0.1,接收不到来自外部的连接。 +*** + ## 预测问题 #### Q: 使用GPU第一次预测时特别慢,如何调整RPC服务的等待时间避免超时? @@ -296,7 +329,7 @@ client.connect(["127.0.0.1:9393"]) **A:** 参考该文档安装TensorRT: https://blog.csdn.net/hesongzefairy/article/details/105343525 - +*** ## 日志排查 @@ -321,7 +354,6 @@ GLOG_v=2 python -m paddle_serving_server.serve --model xxx_conf/ --port 9999 ``` - #### Q: (GLOG_v=2下)Server端日志一切正常,但Client端始终得不到正确的预测结果 **A:** 可能是配置文件有问题,检查下配置文件(is_load_tensor,fetch_type等有没有问题) @@ -341,4 +373,3 @@ GLOG_v=2 python -m paddle_serving_server.serve --model xxx_conf/ --port 9999 注意:可执行文件路径是C++ bin文件的路径,而不是python命令,一般为类似下面的这种/usr/local/lib/python3.6/site-packages/paddle_serving_server/serving-gpu-102-0.7.0/serving -## 性能优化 diff --git a/doc/Install_CN.md b/doc/Install_CN.md index da1201b6d1b23ba170a8198fd669c63fcd206b91..74ee28d2633e9b43cc9cd3495e57b20e9ebdbb18 100644 --- a/doc/Install_CN.md +++ b/doc/Install_CN.md @@ -65,21 +65,21 @@ pip3 install -r python/requirements.txt - post112 = CUDA11.2 + TensorRT8 ```shell -pip3 install paddle-serving-client==0.8.2 -i https://pypi.tuna.tsinghua.edu.cn/simple -pip3 install paddle-serving-app==0.8.2 -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install paddle-serving-client==0.8.3 -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install paddle-serving-app==0.8.3 -i https://pypi.tuna.tsinghua.edu.cn/simple # CPU Server -pip3 install paddle-serving-server==0.8.2 -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install paddle-serving-server==0.8.3 -i https://pypi.tuna.tsinghua.edu.cn/simple # GPU Server,需要确认环境再选择执行哪一条,推荐使用CUDA 10.2的包 -pip3 install paddle-serving-server-gpu==0.8.2.post102 -i https://pypi.tuna.tsinghua.edu.cn/simple -pip3 install paddle-serving-server-gpu==0.8.2.post101 -i https://pypi.tuna.tsinghua.edu.cn/simple -pip3 install paddle-serving-server-gpu==0.8.2.post112 -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install paddle-serving-server-gpu==0.8.3.post102 -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install paddle-serving-server-gpu==0.8.3.post101 -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install paddle-serving-server-gpu==0.8.3.post112 -i https://pypi.tuna.tsinghua.edu.cn/simple ``` 默认开启国内清华镜像源来加速下载,如果您使用HTTP代理可以关闭(`-i https://pypi.tuna.tsinghua.edu.cn/simple`) -如果需要使用develop分支编译的安装包,请从[最新安装包列表](./Latest_Packages_CN.md)中获取下载地址进行下载,使用`pip install`命令进行安装。如果您想自行编译,请参照[Paddle Serving编译文档](./Compile_CN.md)。 +如果需要使用develop分支编译的安装包,请从[下载Wheel包](./Latest_Packages_CN.md)中获取下载地址进行下载,使用`pip install`命令进行安装。如果您想自行编译,请参照[Paddle Serving编译文档](./Compile_CN.md)。 paddle-serving-server和paddle-serving-server-gpu安装包支持Centos 6/7, Ubuntu 16/18和Windows 10。 diff --git a/doc/Install_EN.md b/doc/Install_EN.md index 98e43277704080cfdc9391b0d30e9a27a5f70c7c..709a88789c8478f0a19948dbd13faadf238b5ff2 100644 --- a/doc/Install_EN.md +++ b/doc/Install_EN.md @@ -62,24 +62,22 @@ pip3 install -r python/requirements.txt Install the service whl package. There are three types of client, app and server. The server is divided into CPU and GPU. Choose one installation according to the environment. - GPU with CUDA10.2 + Cudnn7 + TensorRT6(Recommended) -- post101 = CUDA10.1 + TensorRT6 -- post112 = CUDA11.2 + TensorRT8 ```shell -pip3 install paddle-serving-client==0.8.2 -i https://pypi.tuna.tsinghua.edu.cn/simple -pip3 install paddle-serving-app==0.8.2 -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install paddle-serving-client==0.8.3 -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install paddle-serving-app==0.8.3 -i https://pypi.tuna.tsinghua.edu.cn/simple # CPU Server -pip3 install paddle-serving-server==0.8.2 -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install paddle-serving-server==0.8.3 -i https://pypi.tuna.tsinghua.edu.cn/simple # GPU environments need to confirm the environment before choosing which one to execute -pip3 install paddle-serving-server-gpu==0.8.2.post102 -i https://pypi.tuna.tsinghua.edu.cn/simple -pip3 install paddle-serving-server-gpu==0.8.2.post101 -i https://pypi.tuna.tsinghua.edu.cn/simple -pip3 install paddle-serving-server-gpu==0.8.2.post112 -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install paddle-serving-server-gpu==0.8.3.post102 -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install paddle-serving-server-gpu==0.8.3.post101 -i https://pypi.tuna.tsinghua.edu.cn/simple +pip3 install paddle-serving-server-gpu==0.8.3.post112 -i https://pypi.tuna.tsinghua.edu.cn/simple ``` By default, the domestic Tsinghua mirror source is turned on to speed up the download. If you use a proxy, you can turn it off(`-i https://pypi.tuna.tsinghua.edu.cn/simple`). -If you need to use the installation package compiled by the develop branch, please download the download address from [Latest installation package list](./Latest_Packages_CN.md), and use the `pip install` command to install. If you want to compile by yourself, please refer to [Paddle Serving Compilation Document](./Compile_CN.md). +If you need to use the installation package compiled by the develop branch, please download the download address from [Download wheel packages](./Latest_Packages_EN.md), and use the `pip install` command to install. If you want to compile by yourself, please refer to [Paddle Serving Compilation Document](./Compile_CN.md). The paddle-serving-server and paddle-serving-server-gpu installation packages support Centos 6/7, Ubuntu 16/18 and Windows 10. @@ -124,3 +122,10 @@ pip3 install https://paddle-inference-lib.bj.bcebos.com/2.2.2/python/Linux/GPU/x | CUDA11.2 + CUDNN8 | 0.8.0-cuda11.2-cudnn8-devel | Ubuntu 16.04 | 2.2.2-gpu-cuda11.2-cudnn8 | Ubuntu 18.04 | For **Windows 10 users**, please refer to the document [Paddle Serving Guide for Windows Platform](Windows_Tutorial_CN.md). + +## 5.Installation Check +When the above steps are completed, you can use the command line to run the environment check function to automatically run the Paddle Serving related examples to verify the environment-related configuration. +``` +python3 -m paddle_serving_server.serve check +``` +For more information, please see[Installation Check](./Check_Env_CN.md) \ No newline at end of file diff --git a/doc/Latest_Packages_CN.md b/doc/Latest_Packages_CN.md index 9c87ad04eb2364c52dc3056e70585896eabc191c..31b6f39c828d4b7ae74b311623ddad4cac8897a5 100644 --- a/doc/Latest_Packages_CN.md +++ b/doc/Latest_Packages_CN.md @@ -1,50 +1,53 @@ -# Latest Wheel Packages +# Wheel包下载 + +(简体中文|[English](./Latest_Packages_EN.md)) ## Paddle-Serving-Server (x86 CPU/GPU) -Check the following table, and copy the address of hyperlink then run `pip3 install`. For example, if you want to install `paddle-serving-server-0.0.0-py3-non-any.whl`, right click the hyper link and copy the link address, the final command is `pip3 install https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.0.0-py3-none-any.whl`. +查找下面表格,拷贝链接地址,并运行 `pip3 install`。例如要安装 `paddle-serving-server-0.0.0-py3-non-any.whl`, 请右键点击链接拷贝链接地址,最终命令是`pip3 install https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.0.0-py3-none-any.whl`。 | | develop whl | develop bin | stable whl | stable bin | |---------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------| -| cpu-avx-mkl | [paddle_serving_server-0.0.0-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.0.0-py3-none-any.whl) | [serving-cpu-avx-mkl-0.0.0.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-avx-mkl-0.0.0.tar.gz) | [paddle_serving_server-0.8.2-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.8.2-py3-none-any.whl) | [serving-cpu-avx-mkl-0.8.2.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-avx-mkl-0.8.2.tar.gz) | -| cpu-avx-openblas | [paddle_serving_server-0.0.0-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.0.0-py3-none-any.whl) | [serving-cpu-avx-openblas-0.0.0.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-avx-openblas-0.0.0.tar.gz) | [paddle_serving_server-0.8.2-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.8.2-py3-none-any.whl) | [serving-cpu-avx-openblas-0.8.2.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-avx-openblas-0.8.2.tar.gz) | -| cpu-noavx-openblas | [paddle_serving_server-0.0.0-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.0.0-py3-none-any.whl) | [ serving-cpu-noavx-openblas-0.0.0.tar.gz ]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-noavx-openblas-0.0.0.tar.gz) | [paddle_serving_server-0.8.2-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.8.2-py3-none-any.whl) | [serving-cpu-noavx-openblas-0.8.2.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-noavx-openblas-0.8.2.tar.gz) | -| cuda10.1-cudnn7-TensorRT6 | [paddle_serving_server_gpu-0.0.0.post101-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.0.0.post101-py3-none-any.whl) | [serving-gpu-101-0.0.0.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-101-0.0.0.tar.gz) | [paddle_serving_server_gpu-0.8.2.post101-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.8.2.post101-py3-none-any.whl) | [serving-gpu-101-0.8.2.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-101-0.8.2.tar.gz) | -| cuda10.2-cudnn7-TensorRT6 | [paddle_serving_server_gpu-0.0.0.post102-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.0.0.post102-py3-none-any.whl) | [serving-gpu-102-0.0.0.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-102-0.0.0.tar.gz) | [paddle_serving_server_gpu-0.8.2.post102-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.8.2.post102-py3-none-any.whl) | [serving-gpu-102-0.8.2.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-102-0.8.2.tar.gz) | -| cuda10.2-cudnn8-TensorRT7 | [paddle_serving_server_gpu-0.0.0.post1028-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.0.0.post102-py3-none-any.whl) | [ serving-gpu-1028-0.0.0.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-1028-0.0.0.tar.gz ) | [paddle_serving_server_gpu-0.8.2.post1028-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.8.2.post102-py3-none-any.whl) | [serving-gpu-1028-0.8.2.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-1028-0.8.2.tar.gz ) | -| cuda11.2-cudnn8-TensorRT8 | [paddle_serving_server_gpu-0.0.0.post112-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.0.0.post112-py3-none-any.whl) | [ serving-gpu-112-0.0.0.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-112-0.0.0.tar.gz ) | [paddle_serving_server_gpu-0.8.2.post112-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.8.2.post112-py3-none-any.whl) | [serving-gpu-112-0.8.2.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-112-0.8.2.tar.gz ) | +| cpu-avx-mkl | [paddle_serving_server-0.0.0-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.0.0-py3-none-any.whl) | [serving-cpu-avx-mkl-0.0.0.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-avx-mkl-0.0.0.tar.gz) | [paddle_serving_server-0.8.3-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.8.3-py3-none-any.whl) | [serving-cpu-avx-mkl-0.8.3.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-avx-mkl-0.8.3.tar.gz) | +| cpu-avx-openblas | [paddle_serving_server-0.0.0-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.0.0-py3-none-any.whl) | [serving-cpu-avx-openblas-0.0.0.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-avx-openblas-0.0.0.tar.gz) | [paddle_serving_server-0.8.3-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.8.3-py3-none-any.whl) | [serving-cpu-avx-openblas-0.8.3.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-avx-openblas-0.8.3.tar.gz) | +| cpu-noavx-openblas | [paddle_serving_server-0.0.0-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.0.0-py3-none-any.whl) | [ serving-cpu-noavx-openblas-0.0.0.tar.gz ]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-noavx-openblas-0.0.0.tar.gz) | [paddle_serving_server-0.8.3-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.8.3-py3-none-any.whl) | [serving-cpu-noavx-openblas-0.8.3.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-noavx-openblas-0.8.3.tar.gz) | +| cuda10.1-cudnn7-TensorRT6 | [paddle_serving_server_gpu-0.0.0.post101-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.0.0.post101-py3-none-any.whl) | [serving-gpu-101-0.0.0.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-101-0.0.0.tar.gz) | [paddle_serving_server_gpu-0.8.3.post101-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.8.3.post101-py3-none-any.whl) | [serving-gpu-101-0.8.3.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-101-0.8.3.tar.gz) | +| cuda10.2-cudnn7-TensorRT6 | [paddle_serving_server_gpu-0.0.0.post102-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.0.0.post102-py3-none-any.whl) | [serving-gpu-102-0.0.0.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-102-0.0.0.tar.gz) | [paddle_serving_server_gpu-0.8.3.post102-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.8.3.post102-py3-none-any.whl) | [serving-gpu-102-0.8.3.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-102-0.8.3.tar.gz) | +| cuda10.2-cudnn8-TensorRT7 | [paddle_serving_server_gpu-0.0.0.post1028-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.0.0.post102-py3-none-any.whl) | [ serving-gpu-1028-0.0.0.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-1028-0.0.0.tar.gz ) | [paddle_serving_server_gpu-0.8.3.post1028-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.8.3.post102-py3-none-any.whl) | [serving-gpu-1028-0.8.3.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-1028-0.8.3.tar.gz ) | +| cuda11.2-cudnn8-TensorRT8 | [paddle_serving_server_gpu-0.0.0.post112-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.0.0.post112-py3-none-any.whl) | [ serving-gpu-112-0.0.0.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-112-0.0.0.tar.gz ) | [paddle_serving_server_gpu-0.8.3.post112-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.8.3.post112-py3-none-any.whl) | [serving-gpu-112-0.8.3.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-112-0.8.3.tar.gz ) | -### Binary Package -for most users, we do not need to read this section. But if you deploy your Paddle Serving on a machine without network, you will encounter a problem that the binary executable tar file cannot be downloaded. Therefore, here we give you all the download links for various environment. +### 二进制包(Binary Package) +大多数用户不会用到此章节。但是如果你在无网络的环境下部署Paddle Serving,在首次启动Serving时,无法下载二进制tar文件。因此,提供多种环境二进制包的下载链接,下载后传到无网络环境的指定目录下,即可使用。 -### How to setup SERVING_BIN offline? +### 如何离线设置SERVING_BIN? -- download the serving server whl package and bin package, and make sure they are for the same environment -- download the serving client whl and serving app whl, pay attention to the Python version. -- `pip install ` the serving and `tar xf ` the binary package, then `export SERVING_BIN=$PWD/serving-gpu-cuda11-0.0.0/serving` (take Cuda 11 as the example) +- 下载Serving Server Wheel包和二进制tar包,确保它们与环境是一致的 +- 下载Serving Client Wheel包和Serving App wheel包, 同时注意Python版本要一致. +- `pip install ` 所有Wheel包 and `tar xf ` 二进制tar包, 然后`export SERVING_BIN=$PWD/serving-gpu-cuda11-0.0.0/serving` (以Cuda 11为例) -## paddle-serving-client +## paddle-serving-client Wheel包 | | develop whl | stable whl | |-----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------| -| Python3.6 | [paddle_serving_client-0.0.0-cp36-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.0.0-cp36-none-any.whl) | [paddle_serving_client-0.8.2-cp36-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.8.2-cp36-none-any.whl) | -| Python3.7 | [paddle_serving_client-0.0.0-cp37-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.0.0-cp37-none-any.whl) | [paddle_serving_client-0.8.2-cp37-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.8.2-cp37-none-any.whl) | -| Python3.8 | [paddle_serving_client-0.0.0-cp38-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.0.0-cp38-none-any.whl) | [paddle_serving_client-0.8.2-cp38-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.8.2-cp38-none-any.whl) | -| Python3.9 | [paddle_serving_client-0.0.0-cp39-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.0.0-cp39-none-any.whl) | [paddle_serving_client-0.8.2-cp39-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.8.2-cp38-none-any.whl) | -## paddle-serving-app +| Python3.6 | [paddle_serving_client-0.0.0-cp36-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.0.0-cp36-none-any.whl) | [paddle_serving_client-0.8.3-cp36-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.8.3-cp36-none-any.whl) | +| Python3.7 | [paddle_serving_client-0.0.0-cp37-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.0.0-cp37-none-any.whl) | [paddle_serving_client-0.8.3-cp37-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.8.3-cp37-none-any.whl) | +| Python3.8 | [paddle_serving_client-0.0.0-cp38-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.0.0-cp38-none-any.whl) | [paddle_serving_client-0.8.3-cp38-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.8.3-cp38-none-any.whl) | +| Python3.9 | [paddle_serving_client-0.0.0-cp39-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.0.0-cp39-none-any.whl) | [paddle_serving_client-0.8.3-cp39-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.8.3-cp38-none-any.whl) | + +## paddle-serving-app Wheel包 | | develop whl | stable whl | |---------|------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------| -| Python3 | [paddle_serving_app-0.0.0-py3-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_app-0.0.0-py3-none-any.whl) | [ paddle_serving_app-0.8.2-py3-none-any.whl ]( https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_app-0.8.2-py3-none-any.whl) | +| Python3 | [paddle_serving_app-0.0.0-py3-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_app-0.0.0-py3-none-any.whl) | [ paddle_serving_app-0.8.3-py3-none-any.whl ]( https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_app-0.8.3-py3-none-any.whl) | -## Baidu Kunlun user -for kunlun user who uses arm-xpu or x86-xpu can download the wheel packages as follows. Users should use the xpu-beta docker [DOCKER IMAGES](./Docker_Images_CN.md) -**We only support Python 3.6 for Kunlun Users.** +## 百度昆仑芯片 +对于使用百度昆仑芯片的用户, 通过以下方式下载arm-xpu 或 x86-xpu Wheel包。选择 xpu-beta docker [DOCKER镜像](./Docker_Images_CN.md) +**昆仑环境仅支持python36** -### Wheel Package Links +### Wheel包链接 -for arm kunlun user +适用ARM CPU环境的昆仑Wheel包: ``` # paddle-serving-server https://paddle-serving.bj.bcebos.com/whl/xpu/arm/paddle_serving_server_xpu-0.0.0.post2-py3-none-any.whl @@ -57,9 +60,9 @@ https://paddle-serving.bj.bcebos.com/whl/xpu/arm/paddle_serving_app-0.0.0-py3-no https://paddle-serving.bj.bcebos.com/bin/serving-xpu-aarch64-0.0.0.tar.gz ``` -for x86 kunlun user +适用于x86 CPU环境的昆仑Wheel包: ``` -https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_xpu-0.8.2.post2-py3-none-any.whl +https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_xpu-0.8.3.post2-py3-none-any.whl ``` diff --git a/doc/Latest_Packages_EN.md b/doc/Latest_Packages_EN.md new file mode 100644 index 0000000000000000000000000000000000000000..68f6f6d6805e1d14ae93e4d7c80e7ed0a1bbc506 --- /dev/null +++ b/doc/Latest_Packages_EN.md @@ -0,0 +1,68 @@ +# Download Wheel Packages + +(English|[简体中文](./Latest_Packages_CN.md)) + +## Paddle-Serving-Server (x86 CPU/GPU) + +Check the following table, and copy the address of hyperlink then run `pip3 install`. For example, if you want to install `paddle-serving-server-0.0.0-py3-non-any.whl`, right click the hyper link and copy the link address, the final command is `pip3 install https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.0.0-py3-none-any.whl`. + +| | develop whl | develop bin | stable whl | stable bin | +|---------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------| +| cpu-avx-mkl | [paddle_serving_server-0.0.0-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.0.0-py3-none-any.whl) | [serving-cpu-avx-mkl-0.0.0.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-avx-mkl-0.0.0.tar.gz) | [paddle_serving_server-0.8.3-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.8.3-py3-none-any.whl) | [serving-cpu-avx-mkl-0.8.3.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-avx-mkl-0.8.3.tar.gz) | +| cpu-avx-openblas | [paddle_serving_server-0.0.0-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.0.0-py3-none-any.whl) | [serving-cpu-avx-openblas-0.0.0.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-avx-openblas-0.0.0.tar.gz) | [paddle_serving_server-0.8.3-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.8.3-py3-none-any.whl) | [serving-cpu-avx-openblas-0.8.3.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-avx-openblas-0.8.3.tar.gz) | +| cpu-noavx-openblas | [paddle_serving_server-0.0.0-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.0.0-py3-none-any.whl) | [ serving-cpu-noavx-openblas-0.0.0.tar.gz ]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-noavx-openblas-0.0.0.tar.gz) | [paddle_serving_server-0.8.3-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server-0.8.3-py3-none-any.whl) | [serving-cpu-noavx-openblas-0.8.3.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-cpu-noavx-openblas-0.8.3.tar.gz) | +| cuda10.1-cudnn7-TensorRT6 | [paddle_serving_server_gpu-0.0.0.post101-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.0.0.post101-py3-none-any.whl) | [serving-gpu-101-0.0.0.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-101-0.0.0.tar.gz) | [paddle_serving_server_gpu-0.8.3.post101-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.8.3.post101-py3-none-any.whl) | [serving-gpu-101-0.8.3.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-101-0.8.3.tar.gz) | +| cuda10.2-cudnn7-TensorRT6 | [paddle_serving_server_gpu-0.0.0.post102-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.0.0.post102-py3-none-any.whl) | [serving-gpu-102-0.0.0.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-102-0.0.0.tar.gz) | [paddle_serving_server_gpu-0.8.3.post102-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.8.3.post102-py3-none-any.whl) | [serving-gpu-102-0.8.3.tar.gz](https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-102-0.8.3.tar.gz) | +| cuda10.2-cudnn8-TensorRT7 | [paddle_serving_server_gpu-0.0.0.post1028-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.0.0.post102-py3-none-any.whl) | [ serving-gpu-1028-0.0.0.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-1028-0.0.0.tar.gz ) | [paddle_serving_server_gpu-0.8.3.post1028-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.8.3.post102-py3-none-any.whl) | [serving-gpu-1028-0.8.3.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-1028-0.8.3.tar.gz ) | +| cuda11.2-cudnn8-TensorRT8 | [paddle_serving_server_gpu-0.0.0.post112-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.0.0.post112-py3-none-any.whl) | [ serving-gpu-112-0.0.0.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-112-0.0.0.tar.gz ) | [paddle_serving_server_gpu-0.8.3.post112-py3-none-any.whl ](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_gpu-0.8.3.post112-py3-none-any.whl) | [serving-gpu-112-0.8.3.tar.gz]( https://paddle-serving.bj.bcebos.com/test-dev/bin/serving-gpu-112-0.8.3.tar.gz ) | + +### Binary Package +for most users, we do not need to read this section. But if you deploy your Paddle Serving on a machine without network, you will encounter a problem that the binary executable tar file cannot be downloaded. Therefore, here we give you all the download links for various environment. + +### How to setup SERVING_BIN offline? + +- download the serving server whl package and bin package, and make sure they are for the same environment +- download the serving client whl and serving app whl, pay attention to the Python version. +- `pip install ` the serving and `tar xf ` the binary package, then `export SERVING_BIN=$PWD/serving-gpu-cuda11-0.0.0/serving` (take Cuda 11 as the example) + +## paddle-serving-client + +| | develop whl | stable whl | +|-----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------| +| Python3.6 | [paddle_serving_client-0.0.0-cp36-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.0.0-cp36-none-any.whl) | [paddle_serving_client-0.8.3-cp36-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.8.3-cp36-none-any.whl) | +| Python3.7 | [paddle_serving_client-0.0.0-cp37-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.0.0-cp37-none-any.whl) | [paddle_serving_client-0.8.3-cp37-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.8.3-cp37-none-any.whl) | +| Python3.8 | [paddle_serving_client-0.0.0-cp38-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.0.0-cp38-none-any.whl) | [paddle_serving_client-0.8.3-cp38-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.8.3-cp38-none-any.whl) | +| Python3.9 | [paddle_serving_client-0.0.0-cp39-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.0.0-cp39-none-any.whl) | [paddle_serving_client-0.8.3-cp39-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_client-0.8.3-cp38-none-any.whl) | +## paddle-serving-app + +| | develop whl | stable whl | +|---------|------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------| +| Python3 | [paddle_serving_app-0.0.0-py3-none-any.whl](https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_app-0.0.0-py3-none-any.whl) | [ paddle_serving_app-0.8.3-py3-none-any.whl ]( https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_app-0.8.3-py3-none-any.whl) | + + +## Baidu Kunlun user +for kunlun user who uses arm-xpu or x86-xpu can download the wheel packages as follows. Users should use the xpu-beta docker [DOCKER IMAGES](./Docker_Images_CN.md) +**We only support Python 3.6 for Kunlun Users.** + +### Wheel Package Links + +for arm kunlun user +``` +# paddle-serving-server +https://paddle-serving.bj.bcebos.com/whl/xpu/arm/paddle_serving_server_xpu-0.0.0.post2-py3-none-any.whl +# paddle-serving-client +https://paddle-serving.bj.bcebos.com/whl/xpu/arm/paddle_serving_client-0.0.0-cp36-none-any.whl +# paddle-serving-app +https://paddle-serving.bj.bcebos.com/whl/xpu/arm/paddle_serving_app-0.0.0-py3-none-any.whl + +# SERVING BIN +https://paddle-serving.bj.bcebos.com/bin/serving-xpu-aarch64-0.0.0.tar.gz +``` + +for x86 kunlun user +``` +https://paddle-serving.bj.bcebos.com/test-dev/whl/paddle_serving_server_xpu-0.8.3.post2-py3-none-any.whl + +``` + + diff --git a/doc/Model_Zoo_CN.md b/doc/Model_Zoo_CN.md index 23472f26c8a60ad1113c3fcebcaeda7dc79d57c3..53db1d7752e2c46cc3deaa63d80f4dc07421dfad 100755 --- a/doc/Model_Zoo_CN.md +++ b/doc/Model_Zoo_CN.md @@ -55,6 +55,7 @@ | ch_ppocr_server_v2.0 | PaddleOCR | [Pipeline Serving](https://github.com/PaddlePaddle/PaddleOCR/blob/release/2.3/deploy/pdserving/README.md) | [model](https://github.com/PaddlePaddle/PaddleOCR) | | deeplabv3 | PaddleSeg | [C++ Serving](../examples/C++/PaddleSeg/deeplabv3) | [.tar.gz](https://paddle-serving.bj.bcebos.com/paddle_hub_models/image/ImageSegmentation/deeplabv3.tar.gz) | | unet | PaddleSeg | [C++ Serving](../examples/C++/PaddleSeg/unet_for_image_seg) | [.tar.gz](https://paddle-serving.bj.bcebos.com/paddle_hub_models/image/ImageSegmentation/unet.tar.gz) | +| PPTSN_K400 | PaddleVideo | [Pipeline Serving](../examples/Pipeline/PaddleVideo/PPTSN_K400) | [model](https://paddle-serving.bj.bcebos.com/model/PaddleVideo/PPTSN_K400.tar) | - 请参考 [example](../examples) 查看详情 @@ -69,3 +70,4 @@ - [PaddleRec](https://github.com/PaddlePaddle/PaddleRec) - [PaddleSeg](https://github.com/PaddlePaddle/PaddleSeg) - [PaddleGAN](https://github.com/PaddlePaddle/PaddleGAN) + - [PaddleVideo](https://github.com/PaddlePaddle/PaddleVideo) diff --git a/doc/Model_Zoo_EN.md b/doc/Model_Zoo_EN.md index 10baea39c3333f295c9be3b15a0f093ef1b5d0af..1ba48b3089510e197833cdc1a3069bdc51cd8101 100755 --- a/doc/Model_Zoo_EN.md +++ b/doc/Model_Zoo_EN.md @@ -53,6 +53,7 @@ Special thanks to the [Padddle wholechain](https://www.paddlepaddle.org.cn/whole | ch_ppocr_server_v2.0 | PaddleOCR | [Pipeline Serving](https://github.com/PaddlePaddle/PaddleOCR/blob/release/2.3/deploy/pdserving/README.md) | [model](https://github.com/PaddlePaddle/PaddleOCR) | | deeplabv3 | PaddleSeg | [C++ Serving](../examples/C++/PaddleSeg/deeplabv3) | [.tar.gz](https://paddle-serving.bj.bcebos.com/paddle_hub_models/image/ImageSegmentation/deeplabv3.tar.gz) | | unet | PaddleSeg | [C++ Serving](../examples/C++/PaddleSeg/unet_for_image_seg) | [.tar.gz](https://paddle-serving.bj.bcebos.com/paddle_hub_models/image/ImageSegmentation/unet.tar.gz) | +| PPTSN_K400 | PaddleVideo | [Pipeline Serving](../examples/Pipeline/PaddleVideo/PPTSN_K400) | [model](https://paddle-serving.bj.bcebos.com/model/PaddleVideo/PPTSN_K400.tar) | - Refer [example](../examples) for more details on above models. @@ -66,3 +67,4 @@ Special thanks to the [Padddle wholechain](https://www.paddlepaddle.org.cn/whole - [PaddleRec](https://github.com/PaddlePaddle/PaddleRec) - [PaddleSeg](https://github.com/PaddlePaddle/PaddleSeg) - [PaddleGAN](https://github.com/PaddlePaddle/PaddleGAN) + - [PaddleVideo](https://github.com/PaddlePaddle/PaddleVideo) diff --git a/doc/Offical_Docs/1-0_Introduction_CN.md b/doc/Offical_Docs/1-0_Introduction_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..f7a42558bc95c6c0fc7f40fc40fdd5b5b4d55a5b --- /dev/null +++ b/doc/Offical_Docs/1-0_Introduction_CN.md @@ -0,0 +1,58 @@ +# Serving介绍、概述 + +Paddle Serving 是飞桨官方推荐的服务化部署框架,围绕 AI 落地的最后一公里提供专业、可靠、易用的在线模型服务框架。 + +## 主要特性 +- 支持深度学习平台:支持 Paddle 框架训练模型,其他机器学习平台(Caffe、TensorFlow、ONNX、PyTorch)可通过 x2paddle 工具迁移模型。 +- 支持多种网络协议:HTTP、gRPC、bRPC等多种协议。 +- 支持多种语言 SDK:C++、Python、Java。 +- 适配多种计算硬件:x86(Intel) CPU、ARM CPU、Nvidia GPU、昆仑 XPU、华为昇腾 310/910、海光 DCU、Nvidia Jetson等多种硬件。 +- 部署平台:支持 Docker 和 Kubernetes 云端部署,提供多种环境的 Docker 开发镜像和云端部署案例。 +- 具有高性能设计:基于有向无环图(DAG)的异步流水线构建高性能服务化推理框架,具有多模型组合、异步调度、并发推理、动态批量和多卡多流等设计。 +- 提供模型安全部署解决方案:加密模型部署、鉴权校验、HTTPs 安全网关,并在实际项目中应用。 +- 提供大规模稀疏参数模型服务化部署方案:如推荐、广告等场景,具有高性能、大批量在线查询、离线增量版本更新、多版本管理和多表横向扩展能力。 +- 提供丰富的经典模型示例:如 PaddleOCR、PaddleClas、PaddleDetection、PaddleSeg、PaddleNLP、PaddleRec 等套件,共计40+个预训练精品模型,更多模型持续扩展。 + +## 开发流程 + +**一.环境安装** + +根据计算硬件、操作系统和软件驱动版本等环境差异,要选择正确的安装程序。提供多种系统环境安装、PIP 程序安装和源码编译安装三种方案,安装完成后可使用环境检查命令验证。 + +#### 1.标准环境安装方案,请参考[标准环境安装]() +#### 2.使用 PIP 安装,请参考[使用 PIP 安装]() +#### 3.源码编译安装,请参考[源码编译]() +#### 4.安装环境检查,请参考[环境检查]() + +**二.快速开发** + +环境安装完成后,参考二种模型开发示例快速开发程序,更多模型示例请参考[模型库]()。 + +#### 1.基于 C++ Serving 的单模型 Resnet_v2_50 部署示例,请参考[Resnet_v2_50]() +#### 2.基于 Python Pipeline 多模型组合 OCR_v2 部署示例,请参考[OCR]() + +**三.服务部署** + +经过开发和测试后,程序要在服务器上部署,Paddle Serving 提供基于Kubernetes集群部署案例,请参考[Kubernetes集群部署]() + +多个服务入口安全网关部署,请参考[安全网关]() + + +## 功能说明 + +**一.基础功能** + +本章节系统介绍 Paddle Serving 提供的基础功能,每种功能的使用方法和功能选项,详情请参考[基础功能]() + +**二.进阶 C++ Serving** + +本章节详细介绍 C++ Serving 的多种高级功能特性,以及设计方案、使用方法和性能调优等,详情请参考[进阶 C++ Serving]() + +**三.进阶 Python Pipeline** + +本章节详细介绍 Python Pipeline 的多种高级功能特性,以及设计方案、使用方法和性能调优等,详情请参考[进阶 Python Pipeline]() + +**四.大规模稀疏参数索引服务** + +本章节介绍稀疏参数索引场景,如推荐、广告系统中大规模 Embedding 查询的设计与使用方案,详情请参考[大规模稀疏参数索引服务]() + diff --git a/doc/Offical_Docs/1-1_Architecture_CN.md b/doc/Offical_Docs/1-1_Architecture_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..032d7a2c66e6fbf4f1502a6ffcb76832cbb5f3b7 --- /dev/null +++ b/doc/Offical_Docs/1-1_Architecture_CN.md @@ -0,0 +1,45 @@ +# 技术架构 + +## 设计目标 + +- 实现高性能的在线服务化推理框架。既能满足同步请求低延时快速响应,又能满足异步高吞吐的批量处理,大幅提高计算资源利用率。与同类竞品相比有性能优势。 + +- 覆盖工业级 AI 应用场景。工业场景对基础功能和模型有更高的要求,进阶功能包括模型安全、授权访问、适配多种计算硬件和系统环境、云端集群化部署和弹性伸缩能力等;另外,要求服务化框架支持种类型的深度学习模型,如 CV、NLP、推荐系统等 + +- 具备良好的服务可靠性。服务可靠性是服务运行稳定程度的一项重要指标,是对服务质量的一种测量,也是满足工业级场景使用的前提。 + +- 简单易用。以极低的成本部署模型,能与训练框架无缝打通的预测部署 API。通过参考大量的部署示例快速上手。 + +## 技术栈 +Paddle Serving 的技术体系有7个层级,计算硬件、安装部署、操作系统、高性能计算库、推理引擎、Paddle Serving 框架与产业应用。Serving 集成 Paddle Inference 和 Paddle Lite 高性能推理框架,支持在多种异构硬件和多种操作中部署。 +

+
+ +
+

+ +Paddle Serving 框架提供多种编程语言客户端 SDK 方便调用和系统集成,包括 Python、C++ 和 Java 语言。使用3种协议 HTTP、gRPC 和 bRPC 与服务端交互。 + +

+
+ +
+

+ +为了满足不同场景的用户需求,服务端设计了2种框架 C++ Serving 和 Python Pipeline。技术选型方法参见下表: + +| 框架 | 响应时间 | 吞吐 | 开发效率 | 资源利用率 | 应用场景| +|-----|------|-----|-----|------|------| +|C++ Serving | 低 | 高 | 低 | 高 | 高并发低延时场景,功能完善,适合大型服务架构| +|Python Pipeline | 高 | 较高 | 高 | 高 | 开发效率高,吞吐量较高,适合单算子多模型组合场景| + + +性能指标说明: +1. 响应时间(ms):单次请求平均响应时间,计算50、90、95、99分位响应时长,数值越低越好。 +2. 吞吐(QPS):服务处理请求的效率,单位时间内处理请求数量,越高越好。 +3. 开发效率:使用不同开发语言完成相同工作时间不同,包括开发、调试、维护的效率等,越高越好。 +4. 资源利用率:部署一个服务对资源利用率,资源利用率低是对资源的浪费,数值越高越好。 + +C++ Serving 完整设计与功能参见[C++ Serving 设计与实现](../C++_Serving/Introduction_CN.md) + +Python Pipeline 完整设计与功能参见[Python Pipeline 设计与实现](../Python_Pipeline/Pipeline_Design_CN.md) diff --git a/doc/Offical_Docs/1-2_Benchmark.md b/doc/Offical_Docs/1-2_Benchmark.md new file mode 100644 index 0000000000000000000000000000000000000000..10577387f4ea0d8ec9598722fc6ebf46812c01f2 --- /dev/null +++ b/doc/Offical_Docs/1-2_Benchmark.md @@ -0,0 +1,59 @@ +# Benchmark + +## C++ Serving 性能测试 + +**一.测试环境** +- 机器型号:4 × Tesla P4-8GB ,48 core Intel(R) Xeon(R) Gold 5117 @ 2.00GHz +- CUDA:11.0,cuDNN:v8.0.4 +- Serving:v0.7.0 +- Paddle:v2.2.0 +- 模型:ResNet_v2_50 +- batch:1 +- 使用的测试代码和使用的数据集:[resnet_v2_50](../../examples/C++/PaddleClas/resnet_v2_50) + +**二.测试方法** +- 请求数量递增:不断增加 client 数量,指标稳定后统计 client 的耗时信息 +- 竞品对比:C++ Serving(蓝色) 与 Tenserflow Serving(灰色)都是 C++ 实现,且同为业界主流 Serving 框架 +- 吞吐性能(QPS):折线图,数值越大表示每秒钟处理的请求数量越大,性能就越好 +- 平均处理时延(ms):柱状图,数值越大表示单个请求处理时间越长,性能就越差 +- 同步模式:网络线程同步处理,保证显存占用相同的情况下,开启最大线程数 +- 异步模式:异步线程处理方式,保证显存占用相同,最大批量为32,异步线程数为2 + +**三.同步模式** + +结论:同步模型默认参数配置情况下,C++ Serving 吞吐和平均时延指标均优于 Tensorflow Serving。 + +

+
+ +
+

+ +|client_num | model_name | qps(samples/s) | mean(ms) | model_name | qps(samples/s) | mean(ms) | +| --- | --- | --- | --- | --- | --- | --- | +| 10 | pd-serving | 111.336 | 89.787| tf-serving| 84.632| 118.13| +|30 |pd-serving |165.928 |180.761 |tf-serving |106.572 |281.473| +|50| pd-serving| 207.244| 241.211| tf-serving| 80.002 |624.959| +|70 |pd-serving |214.769 |325.894 |tf-serving |105.17 |665.561| +|100| pd-serving| 235.405| 424.759| tf-serving| 93.664 |1067.619| +|150 |pd-serving |239.114 |627.279 |tf-serving |86.312 |1737.848| + +**四.异步模式** + +结论:client数据较少时,Tensorflow Serving 性能略优于 C++ Serving ,但当 client 并发数超过70后,Tensorflow Serving 服务出现大量超时,而 C++ Serving 仍能正常运行 + +

+
+ +
+

+ +|client_num | model_name | qps(samples/s) | mean(ms) | model_name | qps(samples/s) | mean(ms) | +| --- | --- | --- | --- | --- | --- | --- | +|10| pd-serving| 130.631| 76.502| tf-serving |172.64 |57.916| +|30| pd-serving| 201.062| 149.168| tf-serving| 241.669| 124.128| +|50| pd-serving| 286.01| 174.764| tf-serving |278.744 |179.367| +|70| pd-serving| 313.58| 223.187| tf-serving| 298.241| 234.7| +|100| pd-serving| 323.369| 309.208| tf-serving| 0| ∞| +|150| pd-serving| 328.248| 456.933| tf-serving| 0| ∞| + diff --git a/doc/Offical_Docs/Index_CN.md b/doc/Offical_Docs/Index_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..5d57a91449ef87d606df6761b2f4c9c667c1b82f --- /dev/null +++ b/doc/Offical_Docs/Index_CN.md @@ -0,0 +1,111 @@ +# 安装指南 + +## 安装说明 + +本说明将指导您在64位操作系统编译和安装 PaddleServing。 +**强烈建议**您在**Docker 内构建** Paddle Serving,更多镜像请查看[Docker镜像列表](Docker_Images_CN.md)。 + +**一. Python 和 pip 版本:** + +* Python 的版本支持 3.6/3.7/3.8/3.9 +* Python 具有 pip, 且 pip 的版本要求 20.2.2+ +* Python 和 pip 要求是 64 位版本 + +**二. PaddleServing 对 GPU 支持情况:** + +* 目前 **PaddleServing** 支持 **NVIDIA** 显卡的 **CUDA** 驱动和 **AMD** 显卡的 **ROCm** 架构 +* 目前支持CUDA 10.1/10.2/11.2 + + +**第一种安装方式:使用 pip 安装** + +您可以选择“使用 pip 安装”、“从源码编译安装” 两种方式中的任意一种方式进行安装。 + +本节将介绍使用 pip 的安装方式。 +以下示例中 GPU 环境均为 cuda10.2-cudnn7 + +1. 启动开发镜像 + + **CPU:** + ``` + # 启动 CPU Docker + docker pull paddlepaddle/serving:0.8.0-devel + docker run -p 9292:9292 --name test -dit paddlepaddle/serving:0.8.0-devel bash + docker exec -it test bash + git clone https://github.com/PaddlePaddle/Serving + ``` + **GPU:** + ``` + # 启动 GPU Docker + docker pull paddlepaddle/serving:0.8.0-cuda10.2-cudnn7-devel + nvidia-docker run -p 9292:9292 --name test -dit paddlepaddle/serving:0.8.0-cuda10.2-cudnn7-devel bash + nvidia-docker exec -it test bash + git clone https://github.com/PaddlePaddle/Serving + ``` + +2. 检查 Python 的版本 + + 使用以下命令确认是 3.6/3.7/3.8/3.9 + + python3 --version + +3. 检查 pip 的版本,确认是 20.2.2+ + + python3 -m ensurepip + python3 -m pip --version + +4. 安装所需的 pip 依赖 + + ``` + cd Serving + pip3 install -r python/requirements.txt + ``` + +5. 安装服务 whl 包,共有3种 client、app、server,Server 分为 CPU 和 GPU,GPU 包根据您的环境选择一种安装 + + - post102 = CUDA10.2 + Cudnn7 + TensorRT6(推荐) + - post101 = CUDA10.1 + TensorRT6 + - post112 = CUDA11.2 + TensorRT8 + + ```shell + pip3 install paddle-serving-client==0.8.2 -i https://pypi.tuna.tsinghua.edu.cn/simple + pip3 install paddle-serving-app==0.8.2 -i https://pypi.tuna.tsinghua.edu.cn/simple + + # CPU Server + pip3 install paddle-serving-server==0.8.2 -i https://pypi.tuna.tsinghua.edu.cn/simple + + # GPU Server,需要确认环境再选择执行哪一条,推荐使用CUDA 10.2的包 + pip3 install paddle-serving-server-gpu==0.8.2.post102 -i https://pypi.tuna.tsinghua.edu.cn/simple + pip3 install paddle-serving-server-gpu==0.8.2.post101 -i https://pypi.tuna.tsinghua.edu.cn/simple + pip3 install paddle-serving-server-gpu==0.8.2.post112 -i https://pypi.tuna.tsinghua.edu.cn/simple + ``` + + 默认开启国内清华镜像源来加速下载,如果您使用 HTTP 代理可以关闭(`-i https://pypi.tuna.tsinghua.edu.cn/simple`) + +6. 安装 Paddle 相关 Python 库 + **当您使用`paddle_serving_client.convert`命令或者`Python Pipeline 框架`时才需要安装。** + ``` + # CPU 环境请执行 + pip3 install paddlepaddle==2.2.2 + + # GPU CUDA 10.2环境请执行 + pip3 install paddlepaddle-gpu==2.2.2 + ``` + **注意**: 如果您的 Cuda 版本不是10.2,或者您需要在 GPU 环境上使用 TensorRT,请勿直接执行上述命令,需要参考[Paddle-Inference官方文档-下载安装Linux预测库](https://paddleinference.paddlepaddle.org.cn/master/user_guides/download_lib.html#python)选择相应的 GPU 环境的 url 链接并进行安装。 + +7. 安装完成后的环境检查 + 当以上步骤均完成后可使用命令行运行环境检查功能,自动运行 Paddle Serving 相关示例,进行环境相关配置校验。 + ``` + python3 -m paddle_serving_server.serve check + ``` + 详情请参考[环境检查文档](./Check_Env_CN.md) + +8. 更多帮助信息请参考: + + + + +**第二种安装方式:使用源代码编译安装** + +- 如果您只是使用 PaddleServing ,建议使用 **pip** 安装即可。 +- 如果您有开发 PaddleServing 的需求,请参考:[从源码编译] \ No newline at end of file diff --git a/doc/Run_On_Kubernetes_CN.md b/doc/Run_On_Kubernetes_CN.md index 20de18aa800e46eeb90aed6f3c1c3d77f65a5fad..0194ae87a13da0328785bfee1e5293e16efa147b 100644 --- a/doc/Run_On_Kubernetes_CN.md +++ b/doc/Run_On_Kubernetes_CN.md @@ -29,7 +29,7 @@ kubectl apply -f https://bit.ly/kong-ingress-dbless bash tools/generate_runtime_docker.sh --env cuda10.1 --python 3.7 --image_name serving_runtime:cuda10.1-py37 --paddle 2.2.0 --serving 0.8.0 ``` -会生成 cuda10.1,python 3.7,serving版本0.7.0 还有 paddle版本2.2.0的运行镜像。如果有其他疑问,可以执行下列语句得到帮助信息。强烈建议您使用最新的paddle和serving的版本(2个版本是对应的如paddle 2.2.x 与serving 0.7.0对应,paddle 2.1.x 与 serving 0.6.x对应),因为更早的版本上出现的错误只在最新版本修复,无法在历史版本中修复。 +会生成 cuda10.1,python 3.7,serving版本0.8.0 还有 paddle版本2.2.2的运行镜像。如果有其他疑问,可以执行下列语句得到帮助信息。强烈建议您使用最新的paddle和serving的版本(2个版本是对应的如paddle 2.2.0 与serving 0.7.x对应,paddle 2.2.2 与 serving 0.8.x对应),因为更早的版本上出现的错误只在最新版本修复,无法在历史版本中修复。 ``` bash tools/generate_runtime_docker.sh --help diff --git a/doc/Serving_Auth_Docker_CN.md b/doc/Serving_Auth_Docker_CN.md index d54468c93b92ef1e8f5813ef4256de74562f9f25..c05a5e82f6c6235a60ef12f4674cf61b2b35acc3 100644 --- a/doc/Serving_Auth_Docker_CN.md +++ b/doc/Serving_Auth_Docker_CN.md @@ -30,8 +30,8 @@ ee59a3dd4806 registry.baidubce.com/serving_dev/serving-runtime:cpu-py36 665fd8a34e15 redis:latest "docker-entrypoint.s…" About an hour ago Up About an hour 0.0.0.0:6379->6379/tcp anquan_redis_1 ``` -其中我们之前serving容器 以 9393端口暴露,KONG网关的端口是8443, KONG的Web控制台的端口是8001。接下来我们在浏览器访问 `https://$IP_ADDR:8001`, 其中 IP_ADDR就是宿主机的IP。 - +其中我们之前serving容器 以 9393端口暴露,KONG网关的端口是8443, KONG的Web控制台的端口是8001。接下来我们在浏览器访问 `https://$IP_ADDR:8005`, 其中 IP_ADDR就是宿主机的IP。 +>> **注意**: 第一次登录的时候可能需要输入 Name : admin 以及 Kong Admin URL : http://kong:8001 可以看到在注册结束后,登陆,看到了 DASHBOARD,我们先看SERVICES,可以看到`serving_service`,这意味着我们端口在9393的Serving服务已经在KONG当中被注册。 diff --git a/doc/images/tech_stack.png b/doc/images/tech_stack.png new file mode 100644 index 0000000000000000000000000000000000000000..1c163ba580221022562602b581f40fb79af36a8d Binary files /dev/null and b/doc/images/tech_stack.png differ diff --git a/doc/images/wechat_group_1.jpeg b/doc/images/wechat_group_1.jpeg index 9173def0c6bc0fda44c268e1e00b4dcb0c667fd8..eb58bfe81ce5bd2694ac5de20518c53c6ae23a8e 100644 Binary files a/doc/images/wechat_group_1.jpeg and b/doc/images/wechat_group_1.jpeg differ diff --git a/examples/Pipeline/PaddleClas/DarkNet53-encryption/README.md b/examples/Pipeline/PaddleClas/DarkNet53-encryption/README.md new file mode 100644 index 0000000000000000000000000000000000000000..4d6ee01e9b5302839d2a92b475cf50a8d7f24a8e --- /dev/null +++ b/examples/Pipeline/PaddleClas/DarkNet53-encryption/README.md @@ -0,0 +1,23 @@ +# Imagenet Pipeline WebService + +This document will takes Imagenet service as an example to introduce how to use Pipeline WebService. + +## Get model +``` +sh get_model.sh +python encrypt.py +``` + +## Start server + +``` +python -m paddle_serving_server.serve --model encrypt_server/ --port 9400 --encryption_rpc_port 9401 --use_encryption_model & +python web_service.py &>log.txt & +``` + +## client test +``` +python http_client.py +``` + +if you configure the api gateway, you can use `https_client.py` diff --git a/examples/Pipeline/PaddleClas/DarkNet53-encryption/README_CN.md b/examples/Pipeline/PaddleClas/DarkNet53-encryption/README_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..b3bc900b8d5293a90a1b773aa3995c66715ee3ee --- /dev/null +++ b/examples/Pipeline/PaddleClas/DarkNet53-encryption/README_CN.md @@ -0,0 +1,24 @@ +# Imagenet Pipeline WebService + +这里以 Imagenet 服务为例来介绍 Pipeline WebService 的使用。 + +## 获取模型 +``` +sh get_model.sh +python encrypt.py +``` + +## 启动服务 + +``` +python -m paddle_serving_server.serve --model encrypt_server/ --port 9400 --encryption_rpc_port 9401 --use_encryption_model & +python web_service.py &>log.txt & +``` + +## 测试 +``` +python http_client.py +``` +如果您已经配置好了api gateway, 您可以使用 `https_client.py` + +~ diff --git a/examples/Pipeline/PaddleClas/DarkNet53-encryption/config.yml b/examples/Pipeline/PaddleClas/DarkNet53-encryption/config.yml new file mode 100644 index 0000000000000000000000000000000000000000..8ea7f08df7b19f1dcb77a9684e391320f4a153e3 --- /dev/null +++ b/examples/Pipeline/PaddleClas/DarkNet53-encryption/config.yml @@ -0,0 +1,25 @@ +#worker_num, 最大并发数。当build_dag_each_worker=True时, 框架会创建worker_num个进程,每个进程内构建grpcSever和DAG +##当build_dag_each_worker=False时,框架会设置主线程grpc线程池的max_workers=worker_num +worker_num: 1 + +#http端口, rpc_port和http_port不允许同时为空。当rpc_port可用且http_port为空时,不自动生成http_port +http_port: 18080 +rpc_port: 9993 + +dag: + #op资源类型, True, 为线程模型;False,为进程模型 + is_thread_op: False +op: + imagenet: + #并发数,is_thread_op=True时,为线程并发;否则为进程并发 + concurrency: 1 + client_type: brpc + retry: 1 + timeout: 3000 + server_endpoints: ["127.0.0.1:9400"] + client_config: "encrypt_client" + fetch_list: ["save_infer_model/scale_0.tmp_0"] + batch_size: 1 + auto_batching_timeout: 2000 + use_encryption_model: True + encryption_key: "./key" diff --git a/examples/Pipeline/PaddleClas/DarkNet53-encryption/daisy.jpg b/examples/Pipeline/PaddleClas/DarkNet53-encryption/daisy.jpg new file mode 100644 index 0000000000000000000000000000000000000000..7edeca63e5f32e68550ef720d81f59df58a8eabc Binary files /dev/null and b/examples/Pipeline/PaddleClas/DarkNet53-encryption/daisy.jpg differ diff --git a/examples/Pipeline/PaddleClas/DarkNet53-encryption/encrypt.py b/examples/Pipeline/PaddleClas/DarkNet53-encryption/encrypt.py new file mode 100644 index 0000000000000000000000000000000000000000..403d1cb330d7f09f38d00cc1f3993dacac4367ef --- /dev/null +++ b/examples/Pipeline/PaddleClas/DarkNet53-encryption/encrypt.py @@ -0,0 +1,15 @@ +from paddle_serving_client.io import inference_model_to_serving + + +def serving_encryption(): + inference_model_to_serving( + dirname="./DarkNet53/ppcls_model/", + model_filename="__model__", + params_filename="./__params__", + serving_server="encrypt_server", + serving_client="encrypt_client", + encryption=True) + + +if __name__ == "__main__": + serving_encryption() diff --git a/examples/Pipeline/PaddleClas/DarkNet53-encryption/get_model.sh b/examples/Pipeline/PaddleClas/DarkNet53-encryption/get_model.sh new file mode 100644 index 0000000000000000000000000000000000000000..b19bd02d24ad82188a788c0a825616d21b6807b8 --- /dev/null +++ b/examples/Pipeline/PaddleClas/DarkNet53-encryption/get_model.sh @@ -0,0 +1,5 @@ +wget --no-check-certificate https://paddle-serving.bj.bcebos.com/model/DarkNet53.tar +tar -xf DarkNet53.tar + +wget --no-check-certificate https://paddle-serving.bj.bcebos.com/imagenet-example/image_data.tar.gz +tar -xzvf image_data.tar.gz diff --git a/examples/Pipeline/PaddleClas/DarkNet53-encryption/http_client.py b/examples/Pipeline/PaddleClas/DarkNet53-encryption/http_client.py new file mode 100644 index 0000000000000000000000000000000000000000..d1507a907ee5f49ffbe8b04af0c154ed4e5938d0 --- /dev/null +++ b/examples/Pipeline/PaddleClas/DarkNet53-encryption/http_client.py @@ -0,0 +1,19 @@ +import numpy as np +import requests +import json +import cv2 +import base64 +import os + +def cv2_to_base64(image): + return base64.b64encode(image).decode('utf8') + +if __name__ == "__main__": + url = "http://127.0.0.1:18080/imagenet/prediction" + with open(os.path.join(".", "daisy.jpg"), 'rb') as file: + image_data1 = file.read() + image = cv2_to_base64(image_data1) + data = {"key": ["image"], "value": [image]} + for i in range(1): + r = requests.post(url=url, data=json.dumps(data)) + print(r.json()) diff --git a/examples/Pipeline/PaddleClas/DarkNet53-encryption/https_client.py b/examples/Pipeline/PaddleClas/DarkNet53-encryption/https_client.py new file mode 100644 index 0000000000000000000000000000000000000000..f963d93dcf87bf0c2e93755953f99ee27a531aa8 --- /dev/null +++ b/examples/Pipeline/PaddleClas/DarkNet53-encryption/https_client.py @@ -0,0 +1,20 @@ +import numpy as np +import requests +import json +import cv2 +import base64 +import os + +def cv2_to_base64(image): + return base64.b64encode(image).decode('utf8') + +if __name__ == "__main__": + url = "https://10.21.8.132:8443/image-clas/imagenet/prediction" + with open(os.path.join(".", "daisy.jpg"), 'rb') as file: + image_data1 = file.read() + image = cv2_to_base64(image_data1) + headers = {"Content-Type":"application/json", "apikey":"BlfvO08Z9mQpFjcMagl2dxOIA8h2UVdp", "X-INSTANCE-ID" : "kong_ins10"} + data = {"key": ["image"], "value": [image]} + for i in range(1): + r = requests.post(url=url, headers=headers, data=json.dumps(data),verify=False) + print(r.json()) diff --git a/examples/Pipeline/PaddleClas/DarkNet53-encryption/imagenet.label b/examples/Pipeline/PaddleClas/DarkNet53-encryption/imagenet.label new file mode 100644 index 0000000000000000000000000000000000000000..d7146735146ea1894173d6d0e20fb90af36be849 --- /dev/null +++ b/examples/Pipeline/PaddleClas/DarkNet53-encryption/imagenet.label @@ -0,0 +1,1000 @@ +tench, Tinca tinca, +goldfish, Carassius auratus, +great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias, +tiger shark, Galeocerdo cuvieri, +hammerhead, hammerhead shark, +electric ray, crampfish, numbfish, torpedo, +stingray, +cock, +hen, +ostrich, Struthio camelus, +brambling, Fringilla montifringilla, +goldfinch, Carduelis carduelis, +house finch, linnet, Carpodacus mexicanus, +junco, snowbird, +indigo bunting, indigo finch, indigo bird, Passerina cyanea, +robin, American robin, Turdus migratorius, +bulbul, +jay, +magpie, +chickadee, +water ouzel, dipper, +kite, +bald eagle, American eagle, Haliaeetus leucocephalus, +vulture, +great grey owl, great gray owl, Strix nebulosa, +European fire salamander, Salamandra salamandra, +common newt, Triturus vulgaris, +eft, +spotted salamander, Ambystoma maculatum, +axolotl, mud puppy, Ambystoma mexicanum, +bullfrog, Rana catesbeiana, +tree frog, tree-frog, +tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui, +loggerhead, loggerhead turtle, Caretta caretta, +leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea, +mud turtle, +terrapin, +box turtle, box tortoise, +banded gecko, +common iguana, iguana, Iguana iguana, +American chameleon, anole, Anolis carolinensis, +whiptail, whiptail lizard, +agama, +frilled lizard, Chlamydosaurus kingi, +alligator lizard, +Gila monster, Heloderma suspectum, +green lizard, Lacerta viridis, +African chameleon, Chamaeleo chamaeleon, +Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis, +African crocodile, Nile crocodile, Crocodylus niloticus, +American alligator, Alligator mississipiensis, +triceratops, +thunder snake, worm snake, Carphophis amoenus, +ringneck snake, ring-necked snake, ring snake, +hognose snake, puff adder, sand viper, +green snake, grass snake, +king snake, kingsnake, +garter snake, grass snake, +water snake, +vine snake, +night snake, Hypsiglena torquata, +boa constrictor, Constrictor constrictor, +rock python, rock snake, Python sebae, +Indian cobra, Naja naja, +green mamba, +sea snake, +horned viper, cerastes, sand viper, horned asp, Cerastes cornutus, +diamondback, diamondback rattlesnake, Crotalus adamanteus, +sidewinder, horned rattlesnake, Crotalus cerastes, +trilobite, +harvestman, daddy longlegs, Phalangium opilio, +scorpion, +black and gold garden spider, Argiope aurantia, +barn spider, Araneus cavaticus, +garden spider, Aranea diademata, +black widow, Latrodectus mactans, +tarantula, +wolf spider, hunting spider, +tick, +centipede, +black grouse, +ptarmigan, +ruffed grouse, partridge, Bonasa umbellus, +prairie chicken, prairie grouse, prairie fowl, +peacock, +quail, +partridge, +African grey, African gray, Psittacus erithacus, +macaw, +sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita, +lorikeet, +coucal, +bee eater, +hornbill, +hummingbird, +jacamar, +toucan, +drake, +red-breasted merganser, Mergus serrator, +goose, +black swan, Cygnus atratus, +tusker, +echidna, spiny anteater, anteater, +platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus, +wallaby, brush kangaroo, +koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus, +wombat, +jellyfish, +sea anemone, anemone, +brain coral, +flatworm, platyhelminth, +nematode, nematode worm, roundworm, +conch, +snail, +slug, +sea slug, nudibranch, +chiton, coat-of-mail shell, sea cradle, polyplacophore, +chambered nautilus, pearly nautilus, nautilus, +Dungeness crab, Cancer magister, +rock crab, Cancer irroratus, +fiddler crab, +king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica, +American lobster, Northern lobster, Maine lobster, Homarus americanus, +spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish, +crayfish, crawfish, crawdad, crawdaddy, +hermit crab, +isopod, +white stork, Ciconia ciconia, +black stork, Ciconia nigra, +spoonbill, +flamingo, +little blue heron, Egretta caerulea, +American egret, great white heron, Egretta albus, +bittern, +crane, +limpkin, Aramus pictus, +European gallinule, Porphyrio porphyrio, +American coot, marsh hen, mud hen, water hen, Fulica americana, +bustard, +ruddy turnstone, Arenaria interpres, +red-backed sandpiper, dunlin, Erolia alpina, +redshank, Tringa totanus, +dowitcher, +oystercatcher, oyster catcher, +pelican, +king penguin, Aptenodytes patagonica, +albatross, mollymawk, +grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus, +killer whale, killer, orca, grampus, sea wolf, Orcinus orca, +dugong, Dugong dugon, +sea lion, +Chihuahua, +Japanese spaniel, +Maltese dog, Maltese terrier, Maltese, +Pekinese, Pekingese, Peke, +Shih-Tzu, +Blenheim spaniel, +papillon, +toy terrier, +Rhodesian ridgeback, +Afghan hound, Afghan, +basset, basset hound, +beagle, +bloodhound, sleuthhound, +bluetick, +black-and-tan coonhound, +Walker hound, Walker foxhound, +English foxhound, +redbone, +borzoi, Russian wolfhound, +Irish wolfhound, +Italian greyhound, +whippet, +Ibizan hound, Ibizan Podenco, +Norwegian elkhound, elkhound, +otterhound, otter hound, +Saluki, gazelle hound, +Scottish deerhound, deerhound, +Weimaraner, +Staffordshire bullterrier, Staffordshire bull terrier, +American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier, +Bedlington terrier, +Border terrier, +Kerry blue terrier, +Irish terrier, +Norfolk terrier, +Norwich terrier, +Yorkshire terrier, +wire-haired fox terrier, +Lakeland terrier, +Sealyham terrier, Sealyham, +Airedale, Airedale terrier, +cairn, cairn terrier, +Australian terrier, +Dandie Dinmont, Dandie Dinmont terrier, +Boston bull, Boston terrier, +miniature schnauzer, +giant schnauzer, +standard schnauzer, +Scotch terrier, Scottish terrier, Scottie, +Tibetan terrier, chrysanthemum dog, +silky terrier, Sydney silky, +soft-coated wheaten terrier, +West Highland white terrier, +Lhasa, Lhasa apso, +flat-coated retriever, +curly-coated retriever, +golden retriever, +Labrador retriever, +Chesapeake Bay retriever, +German short-haired pointer, +vizsla, Hungarian pointer, +English setter, +Irish setter, red setter, +Gordon setter, +Brittany spaniel, +clumber, clumber spaniel, +English springer, English springer spaniel, +Welsh springer spaniel, +cocker spaniel, English cocker spaniel, cocker, +Sussex spaniel, +Irish water spaniel, +kuvasz, +schipperke, +groenendael, +malinois, +briard, +kelpie, +komondor, +Old English sheepdog, bobtail, +Shetland sheepdog, Shetland sheep dog, Shetland, +collie, +Border collie, +Bouvier des Flandres, Bouviers des Flandres, +Rottweiler, +German shepherd, German shepherd dog, German police dog, alsatian, +Doberman, Doberman pinscher, +miniature pinscher, +Greater Swiss Mountain dog, +Bernese mountain dog, +Appenzeller, +EntleBucher, +boxer, +bull mastiff, +Tibetan mastiff, +French bulldog, +Great Dane, +Saint Bernard, St Bernard, +Eskimo dog, husky, +malamute, malemute, Alaskan malamute, +Siberian husky, +dalmatian, coach dog, carriage dog, +affenpinscher, monkey pinscher, monkey dog, +basenji, +pug, pug-dog, +Leonberg, +Newfoundland, Newfoundland dog, +Great Pyrenees, +Samoyed, Samoyede, +Pomeranian, +chow, chow chow, +keeshond, +Brabancon griffon, +Pembroke, Pembroke Welsh corgi, +Cardigan, Cardigan Welsh corgi, +toy poodle, +miniature poodle, +standard poodle, +Mexican hairless, +timber wolf, grey wolf, gray wolf, Canis lupus, +white wolf, Arctic wolf, Canis lupus tundrarum, +red wolf, maned wolf, Canis rufus, Canis niger, +coyote, prairie wolf, brush wolf, Canis latrans, +dingo, warrigal, warragal, Canis dingo, +dhole, Cuon alpinus, +African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus, +hyena, hyaena, +red fox, Vulpes vulpes, +kit fox, Vulpes macrotis, +Arctic fox, white fox, Alopex lagopus, +grey fox, gray fox, Urocyon cinereoargenteus, +tabby, tabby cat, +tiger cat, +Persian cat, +Siamese cat, Siamese, +Egyptian cat, +cougar, puma, catamount, mountain lion, painter, panther, Felis concolor, +lynx, catamount, +leopard, Panthera pardus, +snow leopard, ounce, Panthera uncia, +jaguar, panther, Panthera onca, Felis onca, +lion, king of beasts, Panthera leo, +tiger, Panthera tigris, +cheetah, chetah, Acinonyx jubatus, +brown bear, bruin, Ursus arctos, +American black bear, black bear, Ursus americanus, Euarctos americanus, +ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus, +sloth bear, Melursus ursinus, Ursus ursinus, +mongoose, +meerkat, mierkat, +tiger beetle, +ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle, +ground beetle, carabid beetle, +long-horned beetle, longicorn, longicorn beetle, +leaf beetle, chrysomelid, +dung beetle, +rhinoceros beetle, +weevil, +fly, +bee, +ant, emmet, pismire, +grasshopper, hopper, +cricket, +walking stick, walkingstick, stick insect, +cockroach, roach, +mantis, mantid, +cicada, cicala, +leafhopper, +lacewing, lacewing fly, +"dragonfly, darning needle, devils darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", +damselfly, +admiral, +ringlet, ringlet butterfly, +monarch, monarch butterfly, milkweed butterfly, Danaus plexippus, +cabbage butterfly, +sulphur butterfly, sulfur butterfly, +lycaenid, lycaenid butterfly, +starfish, sea star, +sea urchin, +sea cucumber, holothurian, +wood rabbit, cottontail, cottontail rabbit, +hare, +Angora, Angora rabbit, +hamster, +porcupine, hedgehog, +fox squirrel, eastern fox squirrel, Sciurus niger, +marmot, +beaver, +guinea pig, Cavia cobaya, +sorrel, +zebra, +hog, pig, grunter, squealer, Sus scrofa, +wild boar, boar, Sus scrofa, +warthog, +hippopotamus, hippo, river horse, Hippopotamus amphibius, +ox, +water buffalo, water ox, Asiatic buffalo, Bubalus bubalis, +bison, +ram, tup, +bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis, +ibex, Capra ibex, +hartebeest, +impala, Aepyceros melampus, +gazelle, +Arabian camel, dromedary, Camelus dromedarius, +llama, +weasel, +mink, +polecat, fitch, foulmart, foumart, Mustela putorius, +black-footed ferret, ferret, Mustela nigripes, +otter, +skunk, polecat, wood pussy, +badger, +armadillo, +three-toed sloth, ai, Bradypus tridactylus, +orangutan, orang, orangutang, Pongo pygmaeus, +gorilla, Gorilla gorilla, +chimpanzee, chimp, Pan troglodytes, +gibbon, Hylobates lar, +siamang, Hylobates syndactylus, Symphalangus syndactylus, +guenon, guenon monkey, +patas, hussar monkey, Erythrocebus patas, +baboon, +macaque, +langur, +colobus, colobus monkey, +proboscis monkey, Nasalis larvatus, +marmoset, +capuchin, ringtail, Cebus capucinus, +howler monkey, howler, +titi, titi monkey, +spider monkey, Ateles geoffroyi, +squirrel monkey, Saimiri sciureus, +Madagascar cat, ring-tailed lemur, Lemur catta, +indri, indris, Indri indri, Indri brevicaudatus, +Indian elephant, Elephas maximus, +African elephant, Loxodonta africana, +lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens, +giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca, +barracouta, snoek, +eel, +coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch, +rock beauty, Holocanthus tricolor, +anemone fish, +sturgeon, +gar, garfish, garpike, billfish, Lepisosteus osseus, +lionfish, +puffer, pufferfish, blowfish, globefish, +abacus, +abaya, +"academic gown, academic robe, judges robe", +accordion, piano accordion, squeeze box, +acoustic guitar, +aircraft carrier, carrier, flattop, attack aircraft carrier, +airliner, +airship, dirigible, +altar, +ambulance, +amphibian, amphibious vehicle, +analog clock, +apiary, bee house, +apron, +ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin, +assault rifle, assault gun, +backpack, back pack, knapsack, packsack, rucksack, haversack, +bakery, bakeshop, bakehouse, +balance beam, beam, +balloon, +ballpoint, ballpoint pen, ballpen, Biro, +Band Aid, +banjo, +bannister, banister, balustrade, balusters, handrail, +barbell, +barber chair, +barbershop, +barn, +barometer, +barrel, cask, +barrow, garden cart, lawn cart, wheelbarrow, +baseball, +basketball, +bassinet, +bassoon, +bathing cap, swimming cap, +bath towel, +bathtub, bathing tub, bath, tub, +beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon, +beacon, lighthouse, beacon light, pharos, +beaker, +bearskin, busby, shako, +beer bottle, +beer glass, +bell cote, bell cot, +bib, +bicycle-built-for-two, tandem bicycle, tandem, +bikini, two-piece, +binder, ring-binder, +binoculars, field glasses, opera glasses, +birdhouse, +boathouse, +bobsled, bobsleigh, bob, +bolo tie, bolo, bola tie, bola, +bonnet, poke bonnet, +bookcase, +bookshop, bookstore, bookstall, +bottlecap, +bow, +bow tie, bow-tie, bowtie, +brass, memorial tablet, plaque, +brassiere, bra, bandeau, +breakwater, groin, groyne, mole, bulwark, seawall, jetty, +breastplate, aegis, egis, +broom, +bucket, pail, +buckle, +bulletproof vest, +bullet train, bullet, +butcher shop, meat market, +cab, hack, taxi, taxicab, +caldron, cauldron, +candle, taper, wax light, +cannon, +canoe, +can opener, tin opener, +cardigan, +car mirror, +carousel, carrousel, merry-go-round, roundabout, whirligig, +"carpenters kit, tool kit", +carton, +car wheel, +cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM, +cassette, +cassette player, +castle, +catamaran, +CD player, +cello, violoncello, +cellular telephone, cellular phone, cellphone, cell, mobile phone, +chain, +chainlink fence, +chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour, +chain saw, chainsaw, +chest, +chiffonier, commode, +chime, bell, gong, +china cabinet, china closet, +Christmas stocking, +church, church building, +cinema, movie theater, movie theatre, movie house, picture palace, +cleaver, meat cleaver, chopper, +cliff dwelling, +cloak, +clog, geta, patten, sabot, +cocktail shaker, +coffee mug, +coffeepot, +coil, spiral, volute, whorl, helix, +combination lock, +computer keyboard, keypad, +confectionery, confectionary, candy store, +container ship, containership, container vessel, +convertible, +corkscrew, bottle screw, +cornet, horn, trumpet, trump, +cowboy boot, +cowboy hat, ten-gallon hat, +cradle, +crane, +crash helmet, +crate, +crib, cot, +Crock Pot, +croquet ball, +crutch, +cuirass, +dam, dike, dyke, +desk, +desktop computer, +dial telephone, dial phone, +diaper, nappy, napkin, +digital clock, +digital watch, +dining table, board, +dishrag, dishcloth, +dishwasher, dish washer, dishwashing machine, +disk brake, disc brake, +dock, dockage, docking facility, +dogsled, dog sled, dog sleigh, +dome, +doormat, welcome mat, +drilling platform, offshore rig, +drum, membranophone, tympan, +drumstick, +dumbbell, +Dutch oven, +electric fan, blower, +electric guitar, +electric locomotive, +entertainment center, +envelope, +espresso maker, +face powder, +feather boa, boa, +file, file cabinet, filing cabinet, +fireboat, +fire engine, fire truck, +fire screen, fireguard, +flagpole, flagstaff, +flute, transverse flute, +folding chair, +football helmet, +forklift, +fountain, +fountain pen, +four-poster, +freight car, +French horn, horn, +frying pan, frypan, skillet, +fur coat, +garbage truck, dustcart, +gasmask, respirator, gas helmet, +gas pump, gasoline pump, petrol pump, island dispenser, +goblet, +go-kart, +golf ball, +golfcart, golf cart, +gondola, +gong, tam-tam, +gown, +grand piano, grand, +greenhouse, nursery, glasshouse, +grille, radiator grille, +grocery store, grocery, food market, market, +guillotine, +hair slide, +hair spray, +half track, +hammer, +hamper, +hand blower, blow dryer, blow drier, hair dryer, hair drier, +hand-held computer, hand-held microcomputer, +handkerchief, hankie, hanky, hankey, +hard disc, hard disk, fixed disk, +harmonica, mouth organ, harp, mouth harp, +harp, +harvester, reaper, +hatchet, +holster, +home theater, home theatre, +honeycomb, +hook, claw, +hoopskirt, crinoline, +horizontal bar, high bar, +horse cart, horse-cart, +hourglass, +iPod, +iron, smoothing iron, +"jack-o-lantern", +jean, blue jean, denim, +jeep, landrover, +jersey, T-shirt, tee shirt, +jigsaw puzzle, +jinrikisha, ricksha, rickshaw, +joystick, +kimono, +knee pad, +knot, +lab coat, laboratory coat, +ladle, +lampshade, lamp shade, +laptop, laptop computer, +lawn mower, mower, +lens cap, lens cover, +letter opener, paper knife, paperknife, +library, +lifeboat, +lighter, light, igniter, ignitor, +limousine, limo, +liner, ocean liner, +lipstick, lip rouge, +Loafer, +lotion, +loudspeaker, speaker, speaker unit, loudspeaker system, speaker system, +"loupe, jewelers loupe", +lumbermill, sawmill, +magnetic compass, +mailbag, postbag, +mailbox, letter box, +maillot, +maillot, tank suit, +manhole cover, +maraca, +marimba, xylophone, +mask, +matchstick, +maypole, +maze, labyrinth, +measuring cup, +medicine chest, medicine cabinet, +megalith, megalithic structure, +microphone, mike, +microwave, microwave oven, +military uniform, +milk can, +minibus, +miniskirt, mini, +minivan, +missile, +mitten, +mixing bowl, +mobile home, manufactured home, +Model T, +modem, +monastery, +monitor, +moped, +mortar, +mortarboard, +mosque, +mosquito net, +motor scooter, scooter, +mountain bike, all-terrain bike, off-roader, +mountain tent, +mouse, computer mouse, +mousetrap, +moving van, +muzzle, +nail, +neck brace, +necklace, +nipple, +notebook, notebook computer, +obelisk, +oboe, hautboy, hautbois, +ocarina, sweet potato, +odometer, hodometer, mileometer, milometer, +oil filter, +organ, pipe organ, +oscilloscope, scope, cathode-ray oscilloscope, CRO, +overskirt, +oxcart, +oxygen mask, +packet, +paddle, boat paddle, +paddlewheel, paddle wheel, +padlock, +paintbrush, +"pajama, pyjama, pjs, jammies", +palace, +panpipe, pandean pipe, syrinx, +paper towel, +parachute, chute, +parallel bars, bars, +park bench, +parking meter, +passenger car, coach, carriage, +patio, terrace, +pay-phone, pay-station, +pedestal, plinth, footstall, +pencil box, pencil case, +pencil sharpener, +perfume, essence, +Petri dish, +photocopier, +pick, plectrum, plectron, +pickelhaube, +picket fence, paling, +pickup, pickup truck, +pier, +piggy bank, penny bank, +pill bottle, +pillow, +ping-pong ball, +pinwheel, +pirate, pirate ship, +pitcher, ewer, +"plane, carpenters plane, woodworking plane", +planetarium, +plastic bag, +plate rack, +plow, plough, +"plunger, plumbers helper", +Polaroid camera, Polaroid Land camera, +pole, +police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria, +poncho, +pool table, billiard table, snooker table, +pop bottle, soda bottle, +pot, flowerpot, +"potters wheel", +power drill, +prayer rug, prayer mat, +printer, +prison, prison house, +projectile, missile, +projector, +puck, hockey puck, +punching bag, punch bag, punching ball, punchball, +purse, +quill, quill pen, +quilt, comforter, comfort, puff, +racer, race car, racing car, +racket, racquet, +radiator, +radio, wireless, +radio telescope, radio reflector, +rain barrel, +recreational vehicle, RV, R.V., +reel, +reflex camera, +refrigerator, icebox, +remote control, remote, +restaurant, eating house, eating place, eatery, +revolver, six-gun, six-shooter, +rifle, +rocking chair, rocker, +rotisserie, +rubber eraser, rubber, pencil eraser, +rugby ball, +rule, ruler, +running shoe, +safe, +safety pin, +saltshaker, salt shaker, +sandal, +sarong, +sax, saxophone, +scabbard, +scale, weighing machine, +school bus, +schooner, +scoreboard, +screen, CRT screen, +screw, +screwdriver, +seat belt, seatbelt, +sewing machine, +shield, buckler, +shoe shop, shoe-shop, shoe store, +shoji, +shopping basket, +shopping cart, +shovel, +shower cap, +shower curtain, +ski, +ski mask, +sleeping bag, +slide rule, slipstick, +sliding door, +slot, one-armed bandit, +snorkel, +snowmobile, +snowplow, snowplough, +soap dispenser, +soccer ball, +sock, +solar dish, solar collector, solar furnace, +sombrero, +soup bowl, +space bar, +space heater, +space shuttle, +spatula, +speedboat, +"spider web, spiders web", +spindle, +sports car, sport car, +spotlight, spot, +stage, +steam locomotive, +steel arch bridge, +steel drum, +stethoscope, +stole, +stone wall, +stopwatch, stop watch, +stove, +strainer, +streetcar, tram, tramcar, trolley, trolley car, +stretcher, +studio couch, day bed, +stupa, tope, +submarine, pigboat, sub, U-boat, +suit, suit of clothes, +sundial, +sunglass, +sunglasses, dark glasses, shades, +sunscreen, sunblock, sun blocker, +suspension bridge, +swab, swob, mop, +sweatshirt, +swimming trunks, bathing trunks, +swing, +switch, electric switch, electrical switch, +syringe, +table lamp, +tank, army tank, armored combat vehicle, armoured combat vehicle, +tape player, +teapot, +teddy, teddy bear, +television, television system, +tennis ball, +thatch, thatched roof, +theater curtain, theatre curtain, +thimble, +thresher, thrasher, threshing machine, +throne, +tile roof, +toaster, +tobacco shop, tobacconist shop, tobacconist, +toilet seat, +torch, +totem pole, +tow truck, tow car, wrecker, +toyshop, +tractor, +trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi, +tray, +trench coat, +tricycle, trike, velocipede, +trimaran, +tripod, +triumphal arch, +trolleybus, trolley coach, trackless trolley, +trombone, +tub, vat, +turnstile, +typewriter keyboard, +umbrella, +unicycle, monocycle, +upright, upright piano, +vacuum, vacuum cleaner, +vase, +vault, +velvet, +vending machine, +vestment, +viaduct, +violin, fiddle, +volleyball, +waffle iron, +wall clock, +wallet, billfold, notecase, pocketbook, +wardrobe, closet, press, +warplane, military plane, +washbasin, handbasin, washbowl, lavabo, wash-hand basin, +washer, automatic washer, washing machine, +water bottle, +water jug, +water tower, +whiskey jug, +whistle, +wig, +window screen, +window shade, +Windsor tie, +wine bottle, +wing, +wok, +wooden spoon, +wool, woolen, woollen, +worm fence, snake fence, snake-rail fence, Virginia fence, +wreck, +yawl, +yurt, +web site, website, internet site, site, +comic book, +crossword puzzle, crossword, +street sign, +traffic light, traffic signal, stoplight, +book jacket, dust cover, dust jacket, dust wrapper, +menu, +plate, +guacamole, +consomme, +hot pot, hotpot, +trifle, +ice cream, icecream, +ice lolly, lolly, lollipop, popsicle, +French loaf, +bagel, beigel, +pretzel, +cheeseburger, +hotdog, hot dog, red hot, +mashed potato, +head cabbage, +broccoli, +cauliflower, +zucchini, courgette, +spaghetti squash, +acorn squash, +butternut squash, +cucumber, cuke, +artichoke, globe artichoke, +bell pepper, +cardoon, +mushroom, +Granny Smith, +strawberry, +orange, +lemon, +fig, +pineapple, ananas, +banana, +jackfruit, jak, jack, +custard apple, +pomegranate, +hay, +carbonara, +chocolate sauce, chocolate syrup, +dough, +meat loaf, meatloaf, +pizza, pizza pie, +potpie, +burrito, +red wine, +espresso, +cup, +eggnog, +alp, +bubble, +cliff, drop, drop-off, +coral reef, +geyser, +lakeside, lakeshore, +promontory, headland, head, foreland, +sandbar, sand bar, +seashore, coast, seacoast, sea-coast, +valley, vale, +volcano, +ballplayer, baseball player, +groom, bridegroom, +scuba diver, +rapeseed, +daisy, +"yellow ladys slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum", +corn, +acorn, +hip, rose hip, rosehip, +buckeye, horse chestnut, conker, +coral fungus, +agaric, +gyromitra, +stinkhorn, carrion fungus, +earthstar, +hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa, +bolete, +ear, spike, capitulum, +toilet tissue, toilet paper, bathroom tissue diff --git a/examples/Pipeline/PaddleClas/DarkNet53-encryption/web_service.py b/examples/Pipeline/PaddleClas/DarkNet53-encryption/web_service.py new file mode 100644 index 0000000000000000000000000000000000000000..11abe19520de6ecd09473f537d30773e454b0771 --- /dev/null +++ b/examples/Pipeline/PaddleClas/DarkNet53-encryption/web_service.py @@ -0,0 +1,71 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sys +from paddle_serving_app.reader import Sequential, URL2Image, Resize, CenterCrop, RGB2BGR, Transpose, Div, Normalize, Base64ToImage +from paddle_serving_server.web_service import WebService, Op +import logging +import numpy as np +import base64, cv2 + + +class ImagenetOp(Op): + def init_op(self): + self.seq = Sequential([ + Resize(256), CenterCrop(224), RGB2BGR(), Transpose((2, 0, 1)), + Div(255), Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225], + True) + ]) + self.label_dict = {} + label_idx = 0 + with open("imagenet.label") as fin: + for line in fin: + self.label_dict[label_idx] = line.strip() + label_idx += 1 + + def preprocess(self, input_dicts, data_id, log_id): + (_, input_dict), = input_dicts.items() + batch_size = len(input_dict.keys()) + imgs = [] + for key in input_dict.keys(): + data = base64.b64decode(input_dict[key].encode('utf8')) + data = np.fromstring(data, np.uint8) + im = cv2.imdecode(data, cv2.IMREAD_COLOR) + img = self.seq(im) + imgs.append(img[np.newaxis, :].copy()) + input_imgs = np.concatenate(imgs, axis=0) + return {"image": input_imgs}, False, None, "" + + def postprocess(self, input_dicts, fetch_dict, data_id=0, log_id=0): + score_list = fetch_dict["save_infer_model/scale_0.tmp_0"] + result = {"label": [], "prob": []} + for score in score_list: + score = score.tolist() + max_score = max(score) + result["label"].append(self.label_dict[score.index(max_score)] + .strip().replace(",", "")) + result["prob"].append(max_score) + result["label"] = str(result["label"]) + result["prob"] = str(result["prob"]) + return result, None, "" + + +class ImageService(WebService): + def get_pipeline_response(self, read_op): + image_op = ImagenetOp(name="imagenet", input_ops=[read_op]) + return image_op + + +uci_service = ImageService(name="imagenet") +uci_service.prepare_pipeline_config("config.yml") +uci_service.run_service() diff --git a/examples/Pipeline/PaddleVideo/PPTSN_K400/README_CN.md b/examples/Pipeline/PaddleVideo/PPTSN_K400/README_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..77c8854081d50d8509b00d01cccb1205ae0ab877 --- /dev/null +++ b/examples/Pipeline/PaddleVideo/PPTSN_K400/README_CN.md @@ -0,0 +1,28 @@ +# PP-TSN 视频分类模型 + +## 模型简介 +PP-TSN模型是对TSN模型进行改进,在不增加参数量和计算量的情况下,得到了的更高精度的2D实用视频分类模型。 + +## 获取模型 +``` +sh get_model.sh +``` + +## 部署模型并测试 + +### 启动 server +``` +python3 web_service.py &>log.txt & +``` + +## 启动 client + +**一. http client** +``` +python3 pipeline_http_client.py +``` +**二. rpc client** + +``` +python3 pipeline_rpc_client.py +``` diff --git a/examples/Pipeline/PaddleVideo/PPTSN_K400/pipeline_http_client.py b/examples/Pipeline/PaddleVideo/PPTSN_K400/pipeline_http_client.py index e1025598dabe9ea56c3c75ffdb9121af883ae249..9c61b5a0e8597e6978f81edde067bbea2ecfe39f 100644 --- a/examples/Pipeline/PaddleVideo/PPTSN_K400/pipeline_http_client.py +++ b/examples/Pipeline/PaddleVideo/PPTSN_K400/pipeline_http_client.py @@ -16,7 +16,7 @@ import requests import json url = "http://127.0.0.1:9999/ppTSN/prediction" -video_url = "https://paddle-serving.bj.bcebos.com/huangjianhui04/example.avi" +video_url = "https://paddle-serving.bj.bcebos.com/model/PaddleVideo/example.avi" for i in range(4): data = {"key": ["filename"], "value": [video_url]} r = requests.post(url=url, data=json.dumps(data)) diff --git a/examples/Pipeline/PaddleVideo/PPTSN_K400/pipeline_rpc_client.py b/examples/Pipeline/PaddleVideo/PPTSN_K400/pipeline_rpc_client.py new file mode 100644 index 0000000000000000000000000000000000000000..481f54f99d8ea31a6f52adadb6632e0f4e341e57 --- /dev/null +++ b/examples/Pipeline/PaddleVideo/PPTSN_K400/pipeline_rpc_client.py @@ -0,0 +1,31 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +try: + from paddle_serving_server.pipeline import PipelineClient +except ImportError: + from paddle_serving_server.pipeline import PipelineClient +import numpy as np +import requests +import json +import cv2 +import base64 +import os + +client = PipelineClient() +client.connect(['127.0.0.1:18090']) + +video_url = "https://paddle-serving.bj.bcebos.com/model/PaddleVideo/example.avi" +for i in range(1): + ret = client.predict(feed_dict={"video_url": video_url}, fetch=["res"]) + print(ret) diff --git a/python/paddle_serving_server/env_check/run.py b/python/paddle_serving_server/env_check/run.py index c3d5ce4ff8790626e18cb8d69a77c94fdc80e496..2c4216b22e464ab2e3fe4ec363b62eb2fe074540 100644 --- a/python/paddle_serving_server/env_check/run.py +++ b/python/paddle_serving_server/env_check/run.py @@ -44,8 +44,9 @@ def mv_log_to_new_dir(dir_path): serving_log_path = os.environ['SERVING_LOG_PATH'] for file_name in log_files: file_path = os.path.join(serving_log_path, file_name) + dir_path_temp = os.path.join(dir_path, file_name) if os.path.exists(file_path): - shutil.move(file_path, dir_path) + shutil.move(file_path, dir_path_temp) def run_test_cases(cases_list, case_type, is_open_std): diff --git a/python/paddle_serving_server/serve.py b/python/paddle_serving_server/serve.py index f9a50ea37d4cc75b92cf2d62954c3c0de88e8094..09931dad80e19b364cb4e17a4b878662ec190aff 100755 --- a/python/paddle_serving_server/serve.py +++ b/python/paddle_serving_server/serve.py @@ -184,6 +184,12 @@ def serve_args(): default=False, action="store_true", help="Use encryption model") + parser.add_argument( + "--encryption_rpc_port", + type=int, + required=False, + default=12000, + help="Port of encryption model, only valid for arg.use_encryption_model") parser.add_argument( "--use_trt", default=False, action="store_true", help="Use TensorRT") parser.add_argument( @@ -352,8 +358,11 @@ def start_multi_card(args, serving_port=None): # pylint: disable=doc-string-mis class MainService(BaseHTTPRequestHandler): + #def __init__(self): + # print("MainService ___init________\n") def get_available_port(self): - default_port = 12000 + global encryption_rpc_port + default_port = encryption_rpc_port for i in range(1000): if port_is_available(default_port + i): return default_port + i @@ -553,7 +562,8 @@ if __name__ == "__main__": p_flag = False p = None serving_port = 0 - server = HTTPServer(('0.0.0.0', int(args.port)), MainService) + encryption_rpc_port = args.encryption_rpc_port + server = HTTPServer(('localhost', int(args.port)), MainService) print( 'Starting encryption server, waiting for key from client, use to stop' ) diff --git a/python/pipeline/operator.py b/python/pipeline/operator.py index 5c8acc1e9fdf0b468eb3822d467c680757f46b5a..b04f79f971c5212242c2dad71abf19d2cd21af21 100644 --- a/python/pipeline/operator.py +++ b/python/pipeline/operator.py @@ -102,6 +102,8 @@ class Op(object): self._retry = max(1, retry) self._batch_size = batch_size self._auto_batching_timeout = auto_batching_timeout + self._use_encryption_model = None + self._encryption_key = "" self._input = None self._outputs = [] @@ -161,6 +163,11 @@ class Op(object): self._fetch_names = conf.get("fetch_list") if self._client_config is None: self._client_config = conf.get("client_config") + if self._use_encryption_model is None: + print ("config use_encryption model here", conf.get("use_encryption_model")) + self._use_encryption_model = conf.get("use_encryption_model") + if self._encryption_key is None or self._encryption_key=="": + self._encryption_key = conf.get("encryption_key") if self._timeout is None: self._timeout = conf["timeout"] if self._timeout > 0: @@ -409,7 +416,12 @@ class Op(object): self._fetch_names = client.fetch_names_ _LOGGER.info("Op({}) has no fetch name set. So fetch all vars") if self.client_type != "local_predictor": - client.connect(server_endpoints) + if self._use_encryption_model is None or self._use_encryption_model is False: + client.connect(server_endpoints) + else: + print("connect to encryption rpc client") + client.use_key(self._encryption_key) + client.connect(server_endpoints, encryption=True) _LOGGER.info("init_client, feed_list:{}, fetch_list: {}".format(self.right_feed_names, self.right_fetch_names)) return client diff --git a/python/requirements.txt b/python/requirements.txt index b7a642b49489ae15a4c62cc74d2a1919ffa3c6d0..c6938505b2cf5d38eda35192c930ac69ce2144ed 100644 --- a/python/requirements.txt +++ b/python/requirements.txt @@ -22,7 +22,9 @@ opencv-python==3.4.17.61; platform_machine != "aarch64" opencv-python; platform_machine == "aarch64" pytest==7.0.1 prometheus-client==0.12.0 -pillow==8.4.0 +pillow==8.4.0 ; python_version == "3.6" +pillow==9.0.0 ; python_version > "3.6" av==8.0.3 decord==0.4.2 SimpleITK + diff --git a/python/requirements_mac.txt b/python/requirements_mac.txt index df197930750049258d4b02742bd78e74ba8377d2..9281deff1bae30e5a9faebf972eb02fc7941d20e 100644 --- a/python/requirements_mac.txt +++ b/python/requirements_mac.txt @@ -1,9 +1,8 @@ numpy>=1.12, <=1.16.4 ; python_version<"3.5" -shapely==1.7.0 +shapely==1.8.0 wheel>=0.34.0, <0.35.0 setuptools>=44.1.0 google>=2.0.3 -opencv-python==4.2.0.32 protobuf>=3.12.2 func-timeout>=4.3.5 pyyaml>=5.1 @@ -16,5 +15,10 @@ Werkzeug==1.0.1 ujson>=2.0.3 grpcio-tools==1.33.2 grpcio>=1.33.2 -sentencepiece==0.1.83 -pillow==8.4.0 +sentencepiece==0.1.96; platform_machine != "aarch64" +sentencepiece; platform_machine == "aarch64" +opencv-python==3.4.17.61; platform_machine != "aarch64" +opencv-python; platform_machine == "aarch64" +pillow==8.4.0 ; python_version == "3.6" +pillow==9.0.0 ; python_version > "3.6" + diff --git a/python/setup.py.app.in b/python/setup.py.app.in index 79ddca2744bc014efe98f6040365079405c5fd07..edbbe8406688938ce3a11726dfe477a9edecfa32 100644 --- a/python/setup.py.app.in +++ b/python/setup.py.app.in @@ -46,7 +46,7 @@ REQUIRED_PACKAGES = [ 'pyclipper', 'shapely', 'sentencepiece<=0.1.96; platform_machine != "aarch64"', 'sentencepiece; platform_machine == "aarch64"', - 'opencv-python<=4.3.0.38; platform_machine != "aarch64"', + 'opencv-python==3.4.17.61; platform_machine != "aarch64"', 'opencv-python; platform_machine == "aarch64"', ] diff --git a/tools/Dockerfile.cuda10.1-cudnn7-gcc54.devel b/tools/Dockerfile.cuda10.1-cudnn7-gcc54.devel index 0d42813368b5a64955af171c9147524288166120..7c2d19dc1a303cff2fb0cf16e857d0652be89e0b 100644 --- a/tools/Dockerfile.cuda10.1-cudnn7-gcc54.devel +++ b/tools/Dockerfile.cuda10.1-cudnn7-gcc54.devel @@ -15,14 +15,18 @@ ENV HOME /root # Add bash enhancements COPY tools/dockerfiles/root/ /root/ +# Set ubuntu sources +RUN sed -i s:/archive.ubuntu.com:/mirrors.tuna.tsinghua.edu.cn/ubuntu:g /etc/apt/sources.list && \ + cat /etc/apt/sources.list && \ + apt-get clean -y && \ + apt-get -y update --fix-missing + # Prepare packages for Python -RUN apt-get update && \ - apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ +RUN apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \ xz-utils tk-dev libffi-dev liblzma-dev -RUN apt-get update && \ - apt-get install -y --allow-downgrades --allow-change-held-packages \ +RUN apt-get install -y --allow-downgrades --allow-change-held-packages \ patchelf git python-pip python-dev python-opencv openssh-server bison \ wget unzip unrar tar xz-utils bzip2 gzip coreutils ntp \ curl sed grep graphviz libjpeg-dev zlib1g-dev \ @@ -30,44 +34,46 @@ RUN apt-get update && \ automake locales clang-format swig \ liblapack-dev liblapacke-dev libcurl4-openssl-dev \ net-tools libtool module-init-tools vim && \ + apt-get install libprotobuf-dev -y && \ apt-get clean -y RUN ln -s /usr/lib/x86_64-linux-gnu/libssl.so /usr/lib/libssl.so.10 && \ ln -s /usr/lib/x86_64-linux-gnu/libcrypto.so /usr/lib/libcrypto.so.10 -RUN wget https://github.com/koalaman/shellcheck/releases/download/v0.7.1/shellcheck-v0.7.1.linux.x86_64.tar.xz -O shellcheck-v0.7.1.linux.x86_64.tar.xz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/shellcheck-v0.7.1.linux.x86_64.tar.xz && \ tar -xf shellcheck-v0.7.1.linux.x86_64.tar.xz && cp shellcheck-v0.7.1/shellcheck /usr/bin/shellcheck && \ rm -rf shellcheck-v0.7.1.linux.x86_64.tar.xz shellcheck-v0.7.1 # install cmake WORKDIR /home -RUN wget -q https://cmake.org/files/v3.16/cmake-3.16.0-Linux-x86_64.tar.gz && tar -zxvf cmake-3.16.0-Linux-x86_64.tar.gz && rm cmake-3.16.0-Linux-x86_64.tar.gz +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/cmake-3.16.0-Linux-x86_64.tar.gz && tar -zxvf cmake-3.16.0-Linux-x86_64.tar.gz && rm cmake-3.16.0-Linux-x86_64.tar.gz ENV PATH=/home/cmake-3.16.0-Linux-x86_64/bin:$PATH -# Install Python3.6 +# Install Python build RUN mkdir -p /root/python_build/ && wget -q https://www.sqlite.org/2018/sqlite-autoconf-3250300.tar.gz && \ tar -zxf sqlite-autoconf-3250300.tar.gz && cd sqlite-autoconf-3250300 && \ ./configure -prefix=/usr/local && make -j8 && make install && cd ../ && rm sqlite-autoconf-3250300.tar.gz -RUN wget -q https://www.python.org/ftp/python/3.6.0/Python-3.6.0.tgz && \ +# Install Python3.6 +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.6.0.tgz && \ tar -xzf Python-3.6.0.tgz && cd Python-3.6.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.6.0* # Install Python3.7 -RUN wget -q https://www.python.org/ftp/python/3.7.0/Python-3.7.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.7.0.tgz && \ tar -xzf Python-3.7.0.tgz && cd Python-3.7.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.7.0* # Install Python3.8 -RUN wget -q https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.8.0.tgz && \ tar -xzf Python-3.8.0.tgz && cd Python-3.8.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.8.0* # Install Python3.9 -RUN wget -q https://www.python.org/ftp/python/3.9.0/Python-3.9.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.9.0.tgz && \ tar -xzf Python-3.9.0.tgz && cd Python-3.9.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.9.0* @@ -108,8 +114,6 @@ RUN git config --global credential.helper store # Fix locales to en_US.UTF-8 RUN localedef -i en_US -f UTF-8 en_US.UTF-8 -RUN apt-get install libprotobuf-dev -y - # Older versions of patchelf limited the size of the files being processed and were fixed in this pr. # https://github.com/NixOS/patchelf/commit/ba2695a8110abbc8cc6baf0eea819922ee5007fa # So install a newer version here. @@ -128,10 +132,10 @@ RUN wget https://paddle-ci.gz.bcebos.com/ccache-3.7.9.tar.gz && \ ln -s /usr/local/ccache-3.7.9/bin/ccache /usr/local/bin/ccache # Update pip version -RUN python3.8 -m pip install --upgrade pip==21.3.1 requests && \ - python3.7 -m pip install --upgrade pip==21.3.1 requests && \ - python3.6 -m pip install --upgrade pip==21.3.1 requests && \ - python3.9 -m pip install --upgrade pip==21.3.1 requests +RUN python3.9 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.8 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.7 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.6 -m pip install --upgrade pip -i https://pypi.douban.com/simple # Wget ssl libs and link thems RUN wget https://paddle-serving.bj.bcebos.com/others/centos_ssl.tar && \ diff --git a/tools/Dockerfile.cuda10.1-cudnn7.devel b/tools/Dockerfile.cuda10.1-cudnn7.devel index 4f1417f0b89059a40cdb6667466dd445bc3cb869..4b595ef630c154f1a21430707942f2cdd9edee00 100644 --- a/tools/Dockerfile.cuda10.1-cudnn7.devel +++ b/tools/Dockerfile.cuda10.1-cudnn7.devel @@ -15,14 +15,20 @@ ENV HOME /root # Add bash enhancements COPY tools/dockerfiles/root/ /root/ +# Set ubuntu sources +RUN sed -i s:/archive.ubuntu.com:/mirrors.tuna.tsinghua.edu.cn/ubuntu:g /etc/apt/sources.list && \ + cat /etc/apt/sources.list && \ + apt-get clean -y && \ + apt-get -y update --fix-missing + # Prepare packages for Python -RUN apt-get update && \ - apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ +#RUN apt-get update && \ +RUN apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \ xz-utils tk-dev libffi-dev liblzma-dev -RUN apt-get update && \ - apt-get install -y --allow-downgrades --allow-change-held-packages \ +#RUN apt-get update && \ +RUN apt-get install -y --allow-downgrades --allow-change-held-packages \ patchelf git python-pip python-dev python-opencv openssh-server bison \ wget unzip unrar tar xz-utils bzip2 gzip coreutils ntp \ curl sed grep graphviz libjpeg-dev zlib1g-dev \ @@ -30,12 +36,13 @@ RUN apt-get update && \ automake locales clang-format swig \ liblapack-dev liblapacke-dev libcurl4-openssl-dev \ net-tools libtool module-init-tools vim && \ + apt-get install libprotobuf-dev -y && \ apt-get clean -y RUN ln -s /usr/lib/x86_64-linux-gnu/libssl.so /usr/lib/libssl.so.10 && \ ln -s /usr/lib/x86_64-linux-gnu/libcrypto.so /usr/lib/libcrypto.so.10 -RUN wget https://github.com/koalaman/shellcheck/releases/download/v0.7.1/shellcheck-v0.7.1.linux.x86_64.tar.xz -O shellcheck-v0.7.1.linux.x86_64.tar.xz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/shellcheck-v0.7.1.linux.x86_64.tar.xz && \ tar -xf shellcheck-v0.7.1.linux.x86_64.tar.xz && cp shellcheck-v0.7.1/shellcheck /usr/bin/shellcheck && \ rm -rf shellcheck-v0.7.1.linux.x86_64.tar.xz shellcheck-v0.7.1 @@ -52,33 +59,34 @@ WORKDIR /usr/bin # install cmake WORKDIR /home -RUN wget -q https://cmake.org/files/v3.16/cmake-3.16.0-Linux-x86_64.tar.gz && tar -zxvf cmake-3.16.0-Linux-x86_64.tar.gz && rm cmake-3.16.0-Linux-x86_64.tar.gz +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/cmake-3.16.0-Linux-x86_64.tar.gz && tar -zxvf cmake-3.16.0-Linux-x86_64.tar.gz && rm cmake-3.16.0-Linux-x86_64.tar.gz ENV PATH=/home/cmake-3.16.0-Linux-x86_64/bin:$PATH -# Install Python3.6 +# Install Python build RUN mkdir -p /root/python_build/ && wget -q https://www.sqlite.org/2018/sqlite-autoconf-3250300.tar.gz && \ tar -zxf sqlite-autoconf-3250300.tar.gz && cd sqlite-autoconf-3250300 && \ ./configure -prefix=/usr/local && make -j8 && make install && cd ../ && rm sqlite-autoconf-3250300.tar.gz -RUN wget -q https://www.python.org/ftp/python/3.6.0/Python-3.6.0.tgz && \ +# Install Python3.6 +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.6.0.tgz && \ tar -xzf Python-3.6.0.tgz && cd Python-3.6.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.6.0* # Install Python3.7 -RUN wget -q https://www.python.org/ftp/python/3.7.0/Python-3.7.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.7.0.tgz && \ tar -xzf Python-3.7.0.tgz && cd Python-3.7.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.7.0* # Install Python3.8 -RUN wget -q https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.8.0.tgz && \ tar -xzf Python-3.8.0.tgz && cd Python-3.8.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.8.0* # Install Python3.9 -RUN wget -q https://www.python.org/ftp/python/3.9.0/Python-3.9.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.9.0.tgz && \ tar -xzf Python-3.9.0.tgz && cd Python-3.9.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.9.0* @@ -119,8 +127,6 @@ RUN git config --global credential.helper store # Fix locales to en_US.UTF-8 RUN localedef -i en_US -f UTF-8 en_US.UTF-8 -RUN apt-get install libprotobuf-dev -y - # Older versions of patchelf limited the size of the files being processed and were fixed in this pr. # https://github.com/NixOS/patchelf/commit/ba2695a8110abbc8cc6baf0eea819922ee5007fa # So install a newer version here. @@ -139,10 +145,10 @@ RUN wget https://paddle-ci.gz.bcebos.com/ccache-3.7.9.tar.gz && \ ln -s /usr/local/ccache-3.7.9/bin/ccache /usr/local/bin/ccache # Update pip version -RUN python3.8 -m pip install --upgrade pip==21.3.1 requests && \ - python3.7 -m pip install --upgrade pip==21.3.1 requests && \ - python3.6 -m pip install --upgrade pip==21.3.1 requests && \ - python3.9 -m pip install --upgrade pip==21.3.1 requests +RUN python3.9 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.8 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.7 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.6 -m pip install --upgrade pip -i https://pypi.douban.com/simple # Wget ssl libs and link thems RUN wget https://paddle-serving.bj.bcebos.com/others/centos_ssl.tar && \ diff --git a/tools/Dockerfile.cuda10.2-cudnn7.devel b/tools/Dockerfile.cuda10.2-cudnn7.devel index faa299ad1f2a515010d197af252f5cc957f84544..a0d822ad70b7a3c1bc42b6ce7f7422dcf6600b67 100644 --- a/tools/Dockerfile.cuda10.2-cudnn7.devel +++ b/tools/Dockerfile.cuda10.2-cudnn7.devel @@ -15,14 +15,19 @@ ENV HOME /root # Add bash enhancements COPY tools/dockerfiles/root/ /root/ -# Prepare packages for Python -RUN apt-get update && \ - apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ +# Set ubuntu sources +RUN sed -i s:/archive.ubuntu.com:/mirrors.tuna.tsinghua.edu.cn/ubuntu:g /etc/apt/sources.list && \ + cat /etc/apt/sources.list && \ + apt-get clean -y && \ + apt-get -y update --fix-missing + +#RUN apt-get update && \ +RUN apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \ xz-utils tk-dev libffi-dev liblzma-dev -RUN apt-get update && \ - apt-get install -y --allow-downgrades --allow-change-held-packages \ +#RUN apt-get update && \ +RUN apt-get install -y --allow-downgrades --allow-change-held-packages \ patchelf git python-pip python-dev python-opencv openssh-server bison \ wget unzip unrar tar xz-utils bzip2 gzip coreutils ntp \ curl sed grep graphviz libjpeg-dev zlib1g-dev \ @@ -30,12 +35,13 @@ RUN apt-get update && \ automake locales clang-format swig \ liblapack-dev liblapacke-dev libcurl4-openssl-dev \ net-tools libtool module-init-tools vim && \ + apt-get install libprotobuf-dev -y && \ apt-get clean -y RUN ln -s /usr/lib/x86_64-linux-gnu/libssl.so /usr/lib/libssl.so.10 && \ ln -s /usr/lib/x86_64-linux-gnu/libcrypto.so /usr/lib/libcrypto.so.10 -RUN wget https://github.com/koalaman/shellcheck/releases/download/v0.7.1/shellcheck-v0.7.1.linux.x86_64.tar.xz -O shellcheck-v0.7.1.linux.x86_64.tar.xz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/shellcheck-v0.7.1.linux.x86_64.tar.xz && \ tar -xf shellcheck-v0.7.1.linux.x86_64.tar.xz && cp shellcheck-v0.7.1/shellcheck /usr/bin/shellcheck && \ rm -rf shellcheck-v0.7.1.linux.x86_64.tar.xz shellcheck-v0.7.1 @@ -52,33 +58,34 @@ WORKDIR /usr/bin # install cmake WORKDIR /home -RUN wget -q https://cmake.org/files/v3.16/cmake-3.16.0-Linux-x86_64.tar.gz && tar -zxvf cmake-3.16.0-Linux-x86_64.tar.gz && rm cmake-3.16.0-Linux-x86_64.tar.gz +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/cmake-3.16.0-Linux-x86_64.tar.gz && tar -zxvf cmake-3.16.0-Linux-x86_64.tar.gz && rm cmake-3.16.0-Linux-x86_64.tar.gz ENV PATH=/home/cmake-3.16.0-Linux-x86_64/bin:$PATH -# Install Python3.6 +# Install Python RUN mkdir -p /root/python_build/ && wget -q https://www.sqlite.org/2018/sqlite-autoconf-3250300.tar.gz && \ tar -zxf sqlite-autoconf-3250300.tar.gz && cd sqlite-autoconf-3250300 && \ ./configure -prefix=/usr/local && make -j8 && make install && cd ../ && rm sqlite-autoconf-3250300.tar.gz -RUN wget -q https://www.python.org/ftp/python/3.6.0/Python-3.6.0.tgz && \ +# Install Python3.6 +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.6.0.tgz && \ tar -xzf Python-3.6.0.tgz && cd Python-3.6.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.6.0* # Install Python3.7 -RUN wget -q https://www.python.org/ftp/python/3.7.0/Python-3.7.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.7.0.tgz && \ tar -xzf Python-3.7.0.tgz && cd Python-3.7.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.7.0* # Install Python3.8 -RUN wget -q https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.8.0.tgz && \ tar -xzf Python-3.8.0.tgz && cd Python-3.8.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.8.0* # Install Python3.9 -RUN wget -q https://www.python.org/ftp/python/3.9.0/Python-3.9.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.9.0.tgz && \ tar -xzf Python-3.9.0.tgz && cd Python-3.9.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.9.0* @@ -119,8 +126,6 @@ RUN git config --global credential.helper store # Fix locales to en_US.UTF-8 RUN localedef -i en_US -f UTF-8 en_US.UTF-8 -RUN apt-get install libprotobuf-dev -y - # Older versions of patchelf limited the size of the files being processed and were fixed in this pr. # https://github.com/NixOS/patchelf/commit/ba2695a8110abbc8cc6baf0eea819922ee5007fa # So install a newer version here. @@ -139,10 +144,10 @@ RUN wget https://paddle-ci.gz.bcebos.com/ccache-3.7.9.tar.gz && \ ln -s /usr/local/ccache-3.7.9/bin/ccache /usr/local/bin/ccache # Update pip version -RUN python3.8 -m pip install --upgrade pip==21.3.1 requests && \ - python3.7 -m pip install --upgrade pip==21.3.1 requests && \ - python3.6 -m pip install --upgrade pip==21.3.1 requests && \ - python3.9 -m pip install --upgrade pip==21.3.1 requests +RUN python3.9 -m pip install --upgrade pip==22.0.3 -i https://pypi.douban.com/simple && \ + python3.8 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.7 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.6 -m pip install --upgrade pip -i https://pypi.douban.com/simple # Wget ssl libs and link thems RUN wget https://paddle-serving.bj.bcebos.com/others/centos_ssl.tar && \ diff --git a/tools/Dockerfile.cuda10.2-cudnn8.devel b/tools/Dockerfile.cuda10.2-cudnn8.devel index c6278160c9d4122b951ab6b22ec06fe5ffeddc5e..0af732772b2bd351733fc5da68e816d390d42c20 100644 --- a/tools/Dockerfile.cuda10.2-cudnn8.devel +++ b/tools/Dockerfile.cuda10.2-cudnn8.devel @@ -15,14 +15,18 @@ ENV HOME /root # Add bash enhancements COPY tools/dockerfiles/root/ /root/ +# Set ubuntu sources +RUN sed -i s:/archive.ubuntu.com:/mirrors.tuna.tsinghua.edu.cn/ubuntu:g /etc/apt/sources.list && \ + cat /etc/apt/sources.list && \ + apt-get clean -y && \ + apt-get -y update --fix-missing + # Prepare packages for Python -RUN apt-get update && \ - apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ +RUN apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \ xz-utils tk-dev libffi-dev liblzma-dev -RUN apt-get update && \ - apt-get install -y --allow-downgrades --allow-change-held-packages \ +RUN apt-get install -y --allow-downgrades --allow-change-held-packages \ patchelf git python-pip python-dev python-opencv openssh-server bison \ wget unzip unrar tar xz-utils bzip2 gzip coreutils ntp \ curl sed grep graphviz libjpeg-dev zlib1g-dev \ @@ -30,12 +34,13 @@ RUN apt-get update && \ automake locales clang-format swig \ liblapack-dev liblapacke-dev libcurl4-openssl-dev \ net-tools libtool module-init-tools vim && \ + apt-get install libprotobuf-dev -y && \ apt-get clean -y RUN ln -s /usr/lib/x86_64-linux-gnu/libssl.so /usr/lib/libssl.so.10 && \ ln -s /usr/lib/x86_64-linux-gnu/libcrypto.so /usr/lib/libcrypto.so.10 -RUN wget https://github.com/koalaman/shellcheck/releases/download/v0.7.1/shellcheck-v0.7.1.linux.x86_64.tar.xz -O shellcheck-v0.7.1.linux.x86_64.tar.xz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/shellcheck-v0.7.1.linux.x86_64.tar.xz && \ tar -xf shellcheck-v0.7.1.linux.x86_64.tar.xz && cp shellcheck-v0.7.1/shellcheck /usr/bin/shellcheck && \ rm -rf shellcheck-v0.7.1.linux.x86_64.tar.xz shellcheck-v0.7.1 @@ -52,33 +57,34 @@ WORKDIR /usr/bin # install cmake WORKDIR /home -RUN wget -q https://cmake.org/files/v3.16/cmake-3.16.0-Linux-x86_64.tar.gz && tar -zxvf cmake-3.16.0-Linux-x86_64.tar.gz && rm cmake-3.16.0-Linux-x86_64.tar.gz +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/cmake-3.16.0-Linux-x86_64.tar.gz && tar -zxvf cmake-3.16.0-Linux-x86_64.tar.gz && rm cmake-3.16.0-Linux-x86_64.tar.gz ENV PATH=/home/cmake-3.16.0-Linux-x86_64/bin:$PATH -# Install Python3.6 +# Install Python build RUN mkdir -p /root/python_build/ && wget -q https://www.sqlite.org/2018/sqlite-autoconf-3250300.tar.gz && \ tar -zxf sqlite-autoconf-3250300.tar.gz && cd sqlite-autoconf-3250300 && \ ./configure -prefix=/usr/local && make -j8 && make install && cd ../ && rm sqlite-autoconf-3250300.tar.gz -RUN wget -q https://www.python.org/ftp/python/3.6.0/Python-3.6.0.tgz && \ +# Install Python3.6 +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.6.0.tgz && \ tar -xzf Python-3.6.0.tgz && cd Python-3.6.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.6.0* # Install Python3.7 -RUN wget -q https://www.python.org/ftp/python/3.7.0/Python-3.7.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.7.0.tgz && \ tar -xzf Python-3.7.0.tgz && cd Python-3.7.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.7.0* # Install Python3.8 -RUN wget -q https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.8.0.tgz && \ tar -xzf Python-3.8.0.tgz && cd Python-3.8.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.8.0* # Install Python3.9 -RUN wget -q https://www.python.org/ftp/python/3.9.0/Python-3.9.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.9.0.tgz && \ tar -xzf Python-3.9.0.tgz && cd Python-3.9.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.9.0* @@ -119,8 +125,6 @@ RUN git config --global credential.helper store # Fix locales to en_US.UTF-8 RUN localedef -i en_US -f UTF-8 en_US.UTF-8 -RUN apt-get install libprotobuf-dev -y - # Older versions of patchelf limited the size of the files being processed and were fixed in this pr. # https://github.com/NixOS/patchelf/commit/ba2695a8110abbc8cc6baf0eea819922ee5007fa # So install a newer version here. @@ -139,10 +143,10 @@ RUN wget https://paddle-ci.gz.bcebos.com/ccache-3.7.9.tar.gz && \ ln -s /usr/local/ccache-3.7.9/bin/ccache /usr/local/bin/ccache # Update pip version -RUN python3.8 -m pip install --upgrade pip==21.3.1 requests && \ - python3.7 -m pip install --upgrade pip==21.3.1 requests && \ - python3.6 -m pip install --upgrade pip==21.3.1 requests && \ - python3.9 -m pip install --upgrade pip==21.3.1 requests +RUN python3.9 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.8 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.7 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.6 -m pip install --upgrade pip -i https://pypi.douban.com/simple # Wget ssl libs and link thems RUN wget https://paddle-serving.bj.bcebos.com/others/centos_ssl.tar && \ diff --git a/tools/Dockerfile.cuda11.2-cudnn8.devel b/tools/Dockerfile.cuda11.2-cudnn8.devel index cb5b04bb49f51b89b63c825cca8cf36960c689f3..6b9baed54e7109da091cd3dcc6f6e80ab5bed8c6 100644 --- a/tools/Dockerfile.cuda11.2-cudnn8.devel +++ b/tools/Dockerfile.cuda11.2-cudnn8.devel @@ -15,14 +15,18 @@ ENV HOME /root # Add bash enhancements COPY tools/dockerfiles/root/ /root/ +# Set ubuntu sources +RUN sed -i s:/archive.ubuntu.com:/mirrors.tuna.tsinghua.edu.cn/ubuntu:g /etc/apt/sources.list && \ + cat /etc/apt/sources.list && \ + apt-get clean -y && \ + apt-get -y update --fix-missing + # Prepare packages for Python -RUN apt-get update && \ - apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ +RUN apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \ xz-utils tk-dev libffi-dev liblzma-dev -RUN apt-get update && \ - apt-get install -y --allow-downgrades --allow-change-held-packages \ +RUN apt-get install -y --allow-downgrades --allow-change-held-packages \ patchelf git python-pip python-dev python-opencv openssh-server bison \ wget unzip unrar tar xz-utils bzip2 gzip coreutils ntp \ curl sed grep graphviz libjpeg-dev zlib1g-dev \ @@ -30,12 +34,13 @@ RUN apt-get update && \ automake locales clang-format swig \ liblapack-dev liblapacke-dev libcurl4-openssl-dev \ net-tools libtool module-init-tools vim && \ + apt-get install libprotobuf-dev -y && \ apt-get clean -y RUN ln -s /usr/lib/x86_64-linux-gnu/libssl.so /usr/lib/libssl.so.10 && \ ln -s /usr/lib/x86_64-linux-gnu/libcrypto.so /usr/lib/libcrypto.so.10 -RUN wget https://github.com/koalaman/shellcheck/releases/download/v0.7.1/shellcheck-v0.7.1.linux.x86_64.tar.xz -O shellcheck-v0.7.1.linux.x86_64.tar.xz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/shellcheck-v0.7.1.linux.x86_64.tar.xz && \ tar -xf shellcheck-v0.7.1.linux.x86_64.tar.xz && cp shellcheck-v0.7.1/shellcheck /usr/bin/shellcheck && \ rm -rf shellcheck-v0.7.1.linux.x86_64.tar.xz shellcheck-v0.7.1 @@ -52,33 +57,34 @@ WORKDIR /usr/bin # install cmake WORKDIR /home -RUN wget -q https://cmake.org/files/v3.16/cmake-3.16.0-Linux-x86_64.tar.gz && tar -zxvf cmake-3.16.0-Linux-x86_64.tar.gz && rm cmake-3.16.0-Linux-x86_64.tar.gz +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/cmake-3.16.0-Linux-x86_64.tar.gz && tar -zxvf cmake-3.16.0-Linux-x86_64.tar.gz && rm cmake-3.16.0-Linux-x86_64.tar.gz ENV PATH=/home/cmake-3.16.0-Linux-x86_64/bin:$PATH -# Install Python3.6 +# Install Python build RUN mkdir -p /root/python_build/ && wget -q https://www.sqlite.org/2018/sqlite-autoconf-3250300.tar.gz && \ tar -zxf sqlite-autoconf-3250300.tar.gz && cd sqlite-autoconf-3250300 && \ ./configure -prefix=/usr/local && make -j8 && make install && cd ../ && rm sqlite-autoconf-3250300.tar.gz -RUN wget -q https://www.python.org/ftp/python/3.6.0/Python-3.6.0.tgz && \ +# Install Python3.6 +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.6.0.tgz && \ tar -xzf Python-3.6.0.tgz && cd Python-3.6.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.6.0* # Install Python3.7 -RUN wget -q https://www.python.org/ftp/python/3.7.0/Python-3.7.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.7.0.tgz && \ tar -xzf Python-3.7.0.tgz && cd Python-3.7.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.7.0* # Install Python3.8 -RUN wget -q https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.8.0.tgz && \ tar -xzf Python-3.8.0.tgz && cd Python-3.8.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.8.0* # Install Python3.9 -RUN wget -q https://www.python.org/ftp/python/3.9.0/Python-3.9.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.9.0.tgz && \ tar -xzf Python-3.9.0.tgz && cd Python-3.9.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.9.0* @@ -119,8 +125,6 @@ RUN git config --global credential.helper store # Fix locales to en_US.UTF-8 RUN localedef -i en_US -f UTF-8 en_US.UTF-8 -RUN apt-get install libprotobuf-dev -y - # Older versions of patchelf limited the size of the files being processed and were fixed in this pr. # https://github.com/NixOS/patchelf/commit/ba2695a8110abbc8cc6baf0eea819922ee5007fa # So install a newer version here. @@ -139,10 +143,10 @@ RUN wget https://paddle-ci.gz.bcebos.com/ccache-3.7.9.tar.gz && \ ln -s /usr/local/ccache-3.7.9/bin/ccache /usr/local/bin/ccache # Update pip version -RUN python3.8 -m pip install --upgrade pip==21.3.1 requests && \ - python3.7 -m pip install --upgrade pip==21.3.1 requests && \ - python3.6 -m pip install --upgrade pip==21.3.1 requests && \ - python3.9 -m pip install --upgrade pip==21.3.1 requests +RUN python3.9 -m pip install --upgrade pip==22.0.3 -i https://pypi.douban.com/simple && \ + python3.8 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.7 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.6 -m pip install --upgrade pip -i https://pypi.douban.com/simple # Wget ssl libs and link thems RUN wget https://paddle-serving.bj.bcebos.com/others/centos_ssl.tar && \ diff --git a/tools/Dockerfile.devel b/tools/Dockerfile.devel index 73d1cbf6bb57e3e720c4e9dd56be95f0d601902d..791bf69fbb183d110d5b41deab35864ad87c91aa 100644 --- a/tools/Dockerfile.devel +++ b/tools/Dockerfile.devel @@ -15,14 +15,18 @@ ENV HOME /root # Add bash enhancements COPY tools/dockerfiles/root/ /root/ +# Set ubuntu sources +RUN sed -i s:/archive.ubuntu.com:/mirrors.tuna.tsinghua.edu.cn/ubuntu:g /etc/apt/sources.list && \ + cat /etc/apt/sources.list && \ + apt-get clean -y && \ + apt-get -y update --fix-missing + # Prepare packages for Python -RUN apt-get update && \ - apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ +RUN apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \ xz-utils tk-dev libffi-dev liblzma-dev -RUN apt-get update && \ - apt-get install -y --allow-downgrades --allow-change-held-packages \ +RUN apt-get install -y --allow-downgrades --allow-change-held-packages \ patchelf git python-pip python-dev python-opencv openssh-server bison \ wget unzip unrar tar xz-utils bzip2 gzip coreutils ntp \ curl sed grep graphviz libjpeg-dev zlib1g-dev \ @@ -30,12 +34,13 @@ RUN apt-get update && \ automake locales clang-format swig \ liblapack-dev liblapacke-dev libcurl4-openssl-dev \ net-tools libtool module-init-tools vim && \ + apt-get install libprotobuf-dev -y && \ apt-get clean -y RUN ln -s /usr/lib/x86_64-linux-gnu/libssl.so /usr/lib/libssl.so.10 && \ ln -s /usr/lib/x86_64-linux-gnu/libcrypto.so /usr/lib/libcrypto.so.10 -RUN wget https://github.com/koalaman/shellcheck/releases/download/v0.7.1/shellcheck-v0.7.1.linux.x86_64.tar.xz -O shellcheck-v0.7.1.linux.x86_64.tar.xz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/shellcheck-v0.7.1.linux.x86_64.tar.xz && \ tar -xf shellcheck-v0.7.1.linux.x86_64.tar.xz && cp shellcheck-v0.7.1/shellcheck /usr/bin/shellcheck && \ rm -rf shellcheck-v0.7.1.linux.x86_64.tar.xz shellcheck-v0.7.1 @@ -52,33 +57,34 @@ WORKDIR /usr/bin # install cmake WORKDIR /home -RUN wget -q https://cmake.org/files/v3.16/cmake-3.16.0-Linux-x86_64.tar.gz && tar -zxvf cmake-3.16.0-Linux-x86_64.tar.gz && rm cmake-3.16.0-Linux-x86_64.tar.gz +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/cmake-3.16.0-Linux-x86_64.tar.gz && tar -zxvf cmake-3.16.0-Linux-x86_64.tar.gz && rm cmake-3.16.0-Linux-x86_64.tar.gz ENV PATH=/home/cmake-3.16.0-Linux-x86_64/bin:$PATH -# Install Python3.6 +# Install Python build RUN mkdir -p /root/python_build/ && wget -q https://www.sqlite.org/2018/sqlite-autoconf-3250300.tar.gz && \ tar -zxf sqlite-autoconf-3250300.tar.gz && cd sqlite-autoconf-3250300 && \ ./configure -prefix=/usr/local && make -j8 && make install && cd ../ && rm sqlite-autoconf-3250300.tar.gz -RUN wget -q https://www.python.org/ftp/python/3.6.0/Python-3.6.0.tgz && \ +# Install Python3.6 +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.6.0.tgz && \ tar -xzf Python-3.6.0.tgz && cd Python-3.6.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.6.0* # Install Python3.7 -RUN wget -q https://www.python.org/ftp/python/3.7.0/Python-3.7.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.7.0.tgz && \ tar -xzf Python-3.7.0.tgz && cd Python-3.7.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.7.0* # Install Python3.8 -RUN wget -q https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.8.0.tgz && \ tar -xzf Python-3.8.0.tgz && cd Python-3.8.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.8.0* # Install Python3.9 -RUN wget -q https://www.python.org/ftp/python/3.9.0/Python-3.9.0.tgz && \ +RUN wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/Python-3.9.0.tgz && \ tar -xzf Python-3.9.0.tgz && cd Python-3.9.0 && \ CFLAGS="-Wformat" ./configure --prefix=/usr/local/ --enable-shared > /dev/null && \ make -j8 > /dev/null && make altinstall > /dev/null && ldconfig && cd .. && rm -rf Python-3.9.0* @@ -120,8 +126,6 @@ RUN git config --global credential.helper store # Fix locales to en_US.UTF-8 RUN localedef -i en_US -f UTF-8 en_US.UTF-8 -RUN apt-get install libprotobuf-dev -y - # Older versions of patchelf limited the size of the files being processed and were fixed in this pr. # https://github.com/NixOS/patchelf/commit/ba2695a8110abbc8cc6baf0eea819922ee5007fa # So install a newer version here. @@ -140,10 +144,10 @@ RUN wget https://paddle-ci.gz.bcebos.com/ccache-3.7.9.tar.gz && \ ln -s /usr/local/ccache-3.7.9/bin/ccache /usr/local/bin/ccache # Update pip version -RUN python3.8 -m pip install --upgrade pip==21.3.1 requests && \ - python3.7 -m pip install --upgrade pip==21.3.1 requests && \ - python3.6 -m pip install --upgrade pip==21.3.1 requests && \ - python3.9 -m pip install --upgrade pip==21.3.1 requests +RUN python3.9 -m pip install --upgrade pip==22.0.3 -i https://pypi.douban.com/simple && \ + python3.8 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.7 -m pip install --upgrade pip -i https://pypi.douban.com/simple && \ + python3.6 -m pip install --upgrade pip -i https://pypi.douban.com/simple # Wget ssl libs and link thems RUN wget https://paddle-serving.bj.bcebos.com/others/centos_ssl.tar && \ diff --git a/tools/auth/auth-serving-docker.yaml b/tools/auth/auth-serving-docker.yaml index 49659563d74fc08a8d569bddb702b206b6225a51..dcd03037778eeab4094fa8eacd156cf65761d158 100644 --- a/tools/auth/auth-serving-docker.yaml +++ b/tools/auth/auth-serving-docker.yaml @@ -20,7 +20,7 @@ services: restart: always kong-migrations: - image: registry.baidubce.com/serving_gateway/kong:paddle + image: registry.baidubce.com/paddlepaddle/serving:gateway-kong command: kong migrations bootstrap depends_on: - db @@ -33,7 +33,7 @@ services: restart: on-failure kong: - image: registry.baidubce.com/serving_gateway/kong:paddle + image: registry.baidubce.com/paddlepaddle/serving:gateway-kong depends_on: - db - redis @@ -59,7 +59,7 @@ services: restart: always kong-prepare: - image: registry.baidubce.com/serving_gateway/kong:paddle + image: registry.baidubce.com/paddlepaddle/serving:gateway-kong entrypoint: ["bash", "/autoconfigure-admin-api.sh"] depends_on: - kong diff --git a/tools/cpp_examples/bert-gpu-serving/paddle-gpu-serving/paddle_gpu_serving/__init__.py b/tools/cpp_examples/bert-gpu-serving/paddle-gpu-serving/paddle_gpu_serving/__init__.py index b836a7e4a305f0ccd7a1ef5d3f2b4e5f6446a899..dd2ff3a3efcb037d8fc7825342be5c6ddfb98051 100644 --- a/tools/cpp_examples/bert-gpu-serving/paddle-gpu-serving/paddle_gpu_serving/__init__.py +++ b/tools/cpp_examples/bert-gpu-serving/paddle-gpu-serving/paddle_gpu_serving/__init__.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.8.2' +__version__ = '0.8.3' diff --git a/tools/dockerfiles/build_scripts/build.sh b/tools/dockerfiles/build_scripts/build.sh index 7d5e019443229e116599f804f714e599fcc72fbc..f133254d121d7102d9c9347cdc53cbcb25d5a258 100644 --- a/tools/dockerfiles/build_scripts/build.sh +++ b/tools/dockerfiles/build_scripts/build.sh @@ -63,7 +63,7 @@ yum -y install bzip2 make git patch unzip bison yasm diffutils \ # /bin/sh cmake-3.8.1-Linux-x86_64.sh --prefix=/usr/local --skip-license # rm cmake-3.8.1-Linux-x86_64.sh -wget -q https://cmake.org/files/v3.16/cmake-3.16.0.tar.gz && tar xzf cmake-3.16.0.tar.gz && \ +wget -q --no-check-certificate https://paddle-serving.bj.bcebos.com/python/cmake-3.16.0-Linux-x86_64.tar.gz && tar xzf cmake-3.16.0.tar.gz && \ cd cmake-3.16.0 && ./bootstrap && \ make -j8 && make install && cd .. && rm cmake-3.16.0.tar.gz && rm -rf cmake-3.16.0 diff --git a/tools/generate_runtime_docker.sh b/tools/generate_runtime_docker.sh index 61fc507563a62a34b22c3342e391e88726d84c4a..1e58527795aeea6b156277a12af2ea36a2086724 100644 --- a/tools/generate_runtime_docker.sh +++ b/tools/generate_runtime_docker.sh @@ -9,7 +9,7 @@ function usage echo " "; echo " --env : running env, cpu/cuda10.1/cuda10.2/cuda11.2"; echo " --python : python version, 3.6/3.7/3.8/3.9 "; - echo " --serving : serving version(v0.8.2/0.7.0)"; + echo " --serving : serving version(v0.8.3/0.7.0)"; echo " --paddle : paddle version(2.2.2/2.2.0)" echo " --image_name : image name(default serving_runtime:env-python)" echo " -h | --help : helper"; diff --git a/tools/scripts/ipipe_py3.sh b/tools/scripts/ipipe_py3.sh index cbc7a11abc0b8b848f5d0b9f62595ed6d09284c6..4708f0be229e4bf7c7036fc59e52f14679a59f0d 100644 --- a/tools/scripts/ipipe_py3.sh +++ b/tools/scripts/ipipe_py3.sh @@ -228,7 +228,7 @@ function link_data() { function before_hook() { setproxy cd ${build_path}/python - ${py_version} -m pip install --upgrade pip==21.1.3 + ${py_version} -m pip install --upgrade pip==21.1.3 -i https://pypi.douban.com/simple ${py_version} -m pip install requests ${py_version} -m pip install -r requirements.txt ${py_version} -m pip install numpy==1.16.4