diff --git a/README.md b/README.md index 9d1ec854ba67d220a481816cda5eeebf2bc89739..17730e2a071facf7c939cb7fb686596b2b752aa6 100644 --- a/README.md +++ b/README.md @@ -264,8 +264,8 @@ curl -H "Content-Type:application/json" -X POST -d '{"feed":[{"url": "https://pa ### About Efficiency - [How to profile Paddle Serving latency?](python/examples/util) -- [How to optimize performance?(Chinese)](doc/MULTI_SERVICE_ON_ONE_GPU_CN.md) -- [Deploy multi-services on one GPU(Chinese)](doc/PERFORMANCE_OPTIM_CN.md) +- [How to optimize performance?(Chinese)](doc/PERFORMANCE_OPTIM_CN.md) +- [Deploy multi-services on one GPU(Chinese)](doc/MULTI_SERVICE_ON_ONE_GPU_CN.md) - [CPU Benchmarks(Chinese)](doc/BENCHMARKING.md) - [GPU Benchmarks(Chinese)](doc/GPU_BENCHMARKING.md) diff --git a/README_CN.md b/README_CN.md index 0c30ef0cffea7d2940c544c55b641255108908fd..3302d4850e8255e8d2d6460c201892fd6035b260 100644 --- a/README_CN.md +++ b/README_CN.md @@ -270,8 +270,8 @@ curl -H "Content-Type:application/json" -X POST -d '{"feed":[{"url": "https://pa ### 关于Paddle Serving性能 - [如何测试Paddle Serving性能?](python/examples/util/) -- [如何优化性能?](doc/MULTI_SERVICE_ON_ONE_GPU_CN.md) -- [在一张GPU上启动多个预测服务](doc/PERFORMANCE_OPTIM_CN.md) +- [如何优化性能?](doc/PERFORMANCE_OPTIM_CN.md) +- [在一张GPU上启动多个预测服务](doc/MULTI_SERVICE_ON_ONE_GPU_CN.md) - [CPU版Benchmarks](doc/BENCHMARKING.md) - [GPU版Benchmarks](doc/GPU_BENCHMARKING.md) diff --git a/core/cube/cube-agent/src/agent/util.go b/core/cube/cube-agent/src/agent/util.go index 29d27682a3c2e1c46d7ca8cb71de53c2e95df71f..1a0917d9810fb17cdaa4b2b1177d1e7414344a3e 100644 --- a/core/cube/cube-agent/src/agent/util.go +++ b/core/cube/cube-agent/src/agent/util.go @@ -83,9 +83,6 @@ func JsonReq(method, requrl string, timeout int, kv *map[string]string, } func GetHdfsMeta(src string) (master, ugi, path string, err error) { - //src = "hdfs://root:rootpasst@st1-inf-platform0.st01.baidu.com:54310/user/mis_user/news_dnn_ctr_cube_1/1501836820/news_dnn_ctr_cube_1_part54.tar" - //src = "hdfs://st1-inf-platform0.st01.baidu.com:54310/user/mis_user/news_dnn_ctr_cube_1/1501836820/news_dnn_ctr_cube_1_part54.tar" - ugiBegin := strings.Index(src, "//") ugiPos := strings.LastIndex(src, "@") if ugiPos != -1 && ugiBegin != -1 { diff --git a/core/general-client/include/general_model.h b/core/general-client/include/general_model.h index 7e04ae11f2106bc8e03fb9045976abc2460e1864..8a4c5e8c3c15bdcb59fc5faa7461713bf291bab4 100644 --- a/core/general-client/include/general_model.h +++ b/core/general-client/include/general_model.h @@ -69,9 +69,15 @@ class ModelRes { const std::vector& get_int64_by_name(const std::string& name) { return _int64_value_map[name]; } + std::vector&& get_int64_by_name_with_rv(const std::string& name) { + return std::move(_int64_value_map[name]); + } const std::vector& get_float_by_name(const std::string& name) { return _float_value_map[name]; } + std::vector&& get_float_by_name_with_rv(const std::string& name) { + return std::move(_float_value_map[name]); + } const std::vector& get_shape(const std::string& name) { return _shape_map[name]; } @@ -121,10 +127,18 @@ class PredictorRes { const std::string& name) { return _models[model_idx].get_int64_by_name(name); } + std::vector&& get_int64_by_name_with_rv(const int model_idx, + const std::string& name) { + return std::move(_models[model_idx].get_int64_by_name_with_rv(name)); + } const std::vector& get_float_by_name(const int model_idx, const std::string& name) { return _models[model_idx].get_float_by_name(name); } + std::vector&& get_float_by_name_with_rv(const int model_idx, + const std::string& name) { + return std::move(_models[model_idx].get_float_by_name_with_rv(name)); + } const std::vector& get_shape(const int model_idx, const std::string& name) { return _models[model_idx].get_shape(name); diff --git a/core/general-client/src/general_model.cpp b/core/general-client/src/general_model.cpp index cab050e732fb701120c7f1a5c80737fc75282324..d4e54c2ac04cf84b2a036f7abe0d426e6f186699 100644 --- a/core/general-client/src/general_model.cpp +++ b/core/general-client/src/general_model.cpp @@ -258,9 +258,10 @@ int PredictorClient::batch_predict( ModelRes model; model.set_engine_name(output.engine_name()); + int idx = 0; + for (auto &name : fetch_name) { // int idx = _fetch_name_to_idx[name]; - int idx = 0; int shape_size = output.insts(0).tensor_array(idx).shape_size(); VLOG(2) << "fetch var " << name << " index " << idx << " shape size " << shape_size; @@ -279,9 +280,9 @@ int PredictorClient::batch_predict( idx += 1; } + idx = 0; for (auto &name : fetch_name) { // int idx = _fetch_name_to_idx[name]; - int idx = 0; if (_fetch_name_to_type[name] == 0) { VLOG(2) << "ferch var " << name << "type int"; model._int64_value_map[name].resize( @@ -536,9 +537,9 @@ int PredictorClient::numpy_predict( ModelRes model; model.set_engine_name(output.engine_name()); + int idx = 0; for (auto &name : fetch_name) { // int idx = _fetch_name_to_idx[name]; - int idx = 0; int shape_size = output.insts(0).tensor_array(idx).shape_size(); VLOG(2) << "fetch var " << name << " index " << idx << " shape size " << shape_size; @@ -557,9 +558,10 @@ int PredictorClient::numpy_predict( idx += 1; } + idx = 0; + for (auto &name : fetch_name) { // int idx = _fetch_name_to_idx[name]; - int idx = 0; if (_fetch_name_to_type[name] == 0) { VLOG(2) << "ferch var " << name << "type int"; model._int64_value_map[name].resize( diff --git a/core/general-client/src/pybind_general_model.cpp b/core/general-client/src/pybind_general_model.cpp index b0d1d2d624d616a1df3805364cf7802cc19fc46b..676114e4e44a9553cb06f00defb19b6c754d51e6 100644 --- a/core/general-client/src/pybind_general_model.cpp +++ b/core/general-client/src/pybind_general_model.cpp @@ -32,14 +32,23 @@ PYBIND11_MODULE(serving_client, m) { .def(py::init()) .def("get_int64_by_name", [](PredictorRes &self, int model_idx, std::string &name) { - return self.get_int64_by_name(model_idx, name); - }, - py::return_value_policy::reference) + // see more: https://github.com/pybind/pybind11/issues/1042 + std::vector *ptr = new std::vector( + std::move(self.get_int64_by_name_with_rv(model_idx, name))); + auto capsule = py::capsule(ptr, [](void *p) { + delete reinterpret_cast *>(p); + }); + return py::array(ptr->size(), ptr->data(), capsule); + }) .def("get_float_by_name", [](PredictorRes &self, int model_idx, std::string &name) { - return self.get_float_by_name(model_idx, name); - }, - py::return_value_policy::reference) + std::vector *ptr = new std::vector( + std::move(self.get_float_by_name_with_rv(model_idx, name))); + auto capsule = py::capsule(ptr, [](void *p) { + delete reinterpret_cast *>(p); + }); + return py::array(ptr->size(), ptr->data(), capsule); + }) .def("get_shape", [](PredictorRes &self, int model_idx, std::string &name) { return self.get_shape(model_idx, name); diff --git a/doc/BERT_10_MINS_CN.md b/doc/BERT_10_MINS_CN.md index 17592000f016f1f1e939e8f3dc6dab6e05f35fe7..b7a5180da1bae2dafc431251f2b98c8a2041856a 100644 --- a/doc/BERT_10_MINS_CN.md +++ b/doc/BERT_10_MINS_CN.md @@ -13,10 +13,10 @@ import paddlehub as hub model_name = "bert_chinese_L-12_H-768_A-12" module = hub.Module(model_name) inputs, outputs, program = module.context(trainable=True, max_seq_len=20) -feed_keys = ["input_ids", "position_ids", "segment_ids", "input_mask", "pooled_output", "sequence_output"] +feed_keys = ["input_ids", "position_ids", "segment_ids", "input_mask"] fetch_keys = ["pooled_output", "sequence_output"] feed_dict = dict(zip(feed_keys, [inputs[x] for x in feed_keys])) -fetch_dict = dict(zip(fetch_keys, [outputs[x]] for x in fetch_keys)) +fetch_dict = dict(zip(fetch_keys, [outputs[x] for x in fetch_keys])) import paddle_serving_client.io as serving_io serving_io.save_model("bert_seq20_model", "bert_seq20_client", feed_dict, fetch_dict, program) diff --git a/doc/SAVE.md b/doc/SAVE.md index 3f7f97e12e1e309ff0933e150ea7bcd23298b60e..4fcdfa438574fac7de21c963f5bb173c69261210 100644 --- a/doc/SAVE.md +++ b/doc/SAVE.md @@ -10,8 +10,9 @@ serving_io.save_model("imdb_model", "imdb_client_conf", {"words": data}, {"prediction": prediction}, fluid.default_main_program()) ``` -`imdb_model` is the server side model with serving configurations. `imdb_client_conf` is the client rpc configurations. Serving has a -dictionary for `Feed` and `Fetch` variables for client to assign. In the example, `{"words": data}` is the feed dict that specify the input of saved inference model. `{"prediction": prediction}` is the fetch dic that specify the output of saved inference model. An alias name can be defined for feed and fetch variables. An example of how to use alias name +`imdb_model` is the server side model with serving configurations. `imdb_client_conf` is the client rpc configurations. + +Serving has a dictionary for `Feed` and `Fetch` variables for client to assign. In the example, `{"words": data}` is the feed dict that specify the input of saved inference model. `{"prediction": prediction}` is the fetch dic that specify the output of saved inference model. An alias name can be defined for feed and fetch variables. An example of how to use alias name is as follows: ``` python from paddle_serving_client import Client @@ -35,10 +36,14 @@ for line in sys.stdin: If you have saved model files using Paddle's `save_inference_model` API, you can use Paddle Serving's` inference_model_to_serving` API to convert it into a model file that can be used for Paddle Serving. ``` import paddle_serving_client.io as serving_io -serving_io.inference_model_to_serving(dirname, model_filename=None, params_filename=None, serving_server="serving_server", serving_client="serving_client") +serving_io.inference_model_to_serving(dirname, serving_server="serving_server", serving_client="serving_client", model_filename=None, params_filename=None ) ``` dirname (str) - Path of saved model files. Program file and parameter files are saved in this directory. -model_filename (str, optional) - The name of file to load the inference program. If it is None, the default filename __model__ will be used. Default: None. -paras_filename (str, optional) - The name of file to load all parameters. It is only used for the case that all parameters were saved in a single binary file. If parameters were saved in separate files, set it as None. Default: None. + serving_server (str, optional) - The path of model files and configuration files for server. Default: "serving_server". + serving_client (str, optional) - The path of configuration files for client. Default: "serving_client". + +model_filename (str, optional) - The name of file to load the inference program. If it is None, the default filename `__model__` will be used. Default: None. + +paras_filename (str, optional) - The name of file to load all parameters. It is only used for the case that all parameters were saved in a single binary file. If parameters were saved in separate files, set it as None. Default: None. diff --git a/doc/SAVE_CN.md b/doc/SAVE_CN.md index fc75cd8d015a6d6f42a08f29e4035db20f450d91..3ca715c024a38b6fdce5c973844e7d023eebffcc 100644 --- a/doc/SAVE_CN.md +++ b/doc/SAVE_CN.md @@ -11,7 +11,9 @@ serving_io.save_model("imdb_model", "imdb_client_conf", {"words": data}, {"prediction": prediction}, fluid.default_main_program()) ``` -imdb_model是具有服务配置的服务器端模型。 imdb_client_conf是客户端rpc配置。 Serving有一个 提供给用户存放Feed和Fetch变量信息的字典。 在示例中,`{words”:data}` 是用于指定已保存推理模型输入的提要字典。`{"prediction":projection}`是指定保存的推理模型输出的字典。可以为feed和fetch变量定义一个别名。 如何使用别名的例子 示例如下: +imdb_model是具有服务配置的服务器端模型。 imdb_client_conf是客户端rpc配置。 + +Serving有一个提供给用户存放Feed和Fetch变量信息的字典。 在示例中,`{"words":data}` 是用于指定已保存推理模型输入的提要字典。`{"prediction":projection}`是指定保存的推理模型输出的字典。可以为feed和fetch变量定义一个别名。 如何使用别名的例子 示例如下: ``` python from paddle_serving_client import Client @@ -35,10 +37,14 @@ for line in sys.stdin: 如果已使用Paddle 的`save_inference_model`接口保存出预测要使用的模型,则可以通过Paddle Serving的`inference_model_to_serving`接口转换成可用于Paddle Serving的模型文件。 ``` import paddle_serving_client.io as serving_io -serving_io.inference_model_to_serving(dirname, model_filename=None, params_filename=None, serving_server="serving_server", serving_client="serving_client") +serving_io.inference_model_to_serving(dirname, serving_server="serving_server", serving_client="serving_client", model_filename=None, params_filename=None) ``` dirname (str) – 需要转换的模型文件存储路径,Program结构文件和参数文件均保存在此目录。 -model_filename (str,可选) – 存储需要转换的模型Inference Program结构的文件名称。如果设置为None,则使用 __model__ 作为默认的文件名。默认值为None。 + +serving_server (str, 可选) - 转换后的模型文件和配置文件的存储路径。默认值为serving_server。 + +serving_client (str, 可选) - 转换后的客户端配置文件存储路径。默认值为serving_client。 + +model_filename (str,可选) – 存储需要转换的模型Inference Program结构的文件名称。如果设置为None,则使用 `__model__` 作为默认的文件名。默认值为None。 + params_filename (str,可选) – 存储需要转换的模型所有参数的文件名称。当且仅当所有模型参数被保存在一个单独的二进制文件中,它才需要被指定。如果模型参数是存储在各自分离的文件中,设置它的值为None。默认值为None。 -serving_server (str, 可选) - 转换后的模型文件和配置文件的存储路径。默认值为"serving_server"。 -serving_client (str, 可选) - 转换后的客户端配置文件存储路径。默认值为"serving_client"。 diff --git a/doc/UWSGI_DEPLOY.md b/doc/UWSGI_DEPLOY.md index 02c0488d1bc0c43e050421e0991125fb3a4d644e..cb3fb506bf6fd4461240ebe43234fa3bed3d4784 100644 --- a/doc/UWSGI_DEPLOY.md +++ b/doc/UWSGI_DEPLOY.md @@ -1,6 +1,8 @@ -# 使用uwsgi启动HTTP预测服务 +# Deploy HTTP service with uWSGI -在提供的fit_a_line示例中,启动HTTP预测服务后会看到有以下信息: +([简体中文](./UWSGI_DEPLOY_CN.md)|English) + +In fit_a_line example, after starting the HTTP prediction service, you will see the following information: ```shell web service address: @@ -13,46 +15,31 @@ http://10.127.3.150:9393/uci/prediction * Running on http://0.0.0.0:9393/ (Press CTRL+C to quit) ``` -这里会提示启动的HTTP服务是开发模式,并不能用于生产环境的部署。Flask启动的服务环境不够稳定也无法承受大量请求的并发,实际部署过程中配合需要WSGI(Web Server Gateway Interface)使用。 +Here you will be prompted that the HTTP service started is in development mode and cannot be used for production deployment. +The prediction service started by Flask is not stable enough to withstand the concurrency of a large number of requests. In the actual deployment process, WSGI (Web Server Gateway Interface) is used. -下面我们展示一下如何使用[uWSGI](https://github.com/unbit/uwsgi)模块来部署HTTP预测服务用于生产环境。 +Next, we will show how to use the [uWSGI] (https://github.com/unbit/uwsgi) module to deploy HTTP prediction services for production environments. -编写HTTP服务脚本 ```python #uwsgi_service.py from paddle_serving_server.web_service import WebService -from flask import Flask, request -#配置预测服务 +#Define prediction service uci_service = WebService(name = "uci") uci_service.load_model_config("./uci_housing_model") uci_service.prepare_server(workdir="./workdir", port=int(9500), device="cpu") uci_service.run_server() - -#配置flask服务 -app_instance = Flask(__name__) -@app_instance.before_first_request -def init(): - global uci_service - uci_service._launch_web_service() - -service_name = "/" + uci_service.name + "/prediction" -@app_instance.route(service_name, methods=["POST"]) -def run(): - return uci_service.get_prediction(request) - -#run方法用于直接调试中直接启动服务 -if __name__ == "__main__": - app_instance.run() +#Get flask application +app_instance = uci_service.get_app_instance() ``` -使用uwsgi启动HTTP服务 +Start service with uWSGI ```bash -uwsgi --http :9000 --wsgi-file uwsgi_service.py --callable app_instance --processes 4 +uwsgi --http :9393 --module uwsgi_service:app_instance ``` -使用--processes参数可以指定服务的进程数,请注意目前Serving HTTP 服务暂时不支持多线程的方式使用。 +Use the --processes parameter to specify the number of service processes. -更多uWSGI的信息请参考[uWSGI使用文档](https://uwsgi-docs.readthedocs.io/en/latest/) +For more information about uWSGI, please refer to [uWSGI documentation](https://uwsgi-docs.readthedocs.io/en/latest/) diff --git a/doc/UWSGI_DEPLOY_CN.md b/doc/UWSGI_DEPLOY_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..5bb87e26bbae729f8c21b4681413a4c9f5c4e7c8 --- /dev/null +++ b/doc/UWSGI_DEPLOY_CN.md @@ -0,0 +1,45 @@ +# 使用uwsgi启动HTTP预测服务 + +(简体中文|[English](./UWSGI_DEPLOY.md)) + +在提供的fit_a_line示例中,启动HTTP预测服务后会看到有以下信息: + +```shell +web service address: +http://10.127.3.150:9393/uci/prediction + * Serving Flask app "serve" (lazy loading) + * Environment: production + WARNING: This is a development server. Do not use it in a production deployment. + Use a production WSGI server instead. + * Debug mode: off + * Running on http://0.0.0.0:9393/ (Press CTRL+C to quit) +``` + +这里会提示启动的HTTP服务是开发模式,并不能用于生产环境的部署。Flask启动的服务环境不够稳定也无法承受大量请求的并发,实际部署过程中配合需要WSGI(Web Server Gateway Interface)使用。 + +下面我们展示一下如何使用[uWSGI](https://github.com/unbit/uwsgi)模块来部署HTTP预测服务用于生产环境。 + +编写HTTP服务脚本 + +```python +#uwsgi_service.py +from paddle_serving_server.web_service import WebService + +#配置预测服务 +uci_service = WebService(name = "uci") +uci_service.load_model_config("./uci_housing_model") +uci_service.prepare_server(workdir="./workdir", port=int(9500), device="cpu") +uci_service.run_server() +#获取flask服务 +app_instance = uci_service.get_app_instance() +``` + +使用uwsgi启动HTTP服务 + +```bash +uwsgi --http :9393 --module uwsgi_service:app_instance +``` + +使用--processes参数可以指定服务的进程数。 + +更多uWSGI的信息请参考[uWSGI使用文档](https://uwsgi-docs.readthedocs.io/en/latest/) diff --git a/python/examples/cascade_rcnn/README.md b/python/examples/cascade_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..87617a842fcdb78d039b71634521c9d370f755fa --- /dev/null +++ b/python/examples/cascade_rcnn/README.md @@ -0,0 +1,21 @@ +# Cascade RCNN model on Paddle Serving + +([简体中文](./README_CN.md)|English) + +### Get The Cascade RCNN Model +``` +sh get_data.sh +``` +If you want to have more detection models, please refer to [Paddle Detection Model Zoo](https://github.com/PaddlePaddle/PaddleDetection/blob/release/0.2/docs/MODEL_ZOO_cn.md) + +### Start the service +``` +python -m paddle_serving_server_gpu.serve --model serving_server --port 9292 --gpu_id 0 +``` + +### Perform prediction +``` +python test_client.py +``` + +Image with bounding boxes and json result would be saved in `output` folder. diff --git a/python/examples/cascade_rcnn/README_CN.md b/python/examples/cascade_rcnn/README_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..a37cb47331ce516c15587c6b2d8b9072c4d878f1 --- /dev/null +++ b/python/examples/cascade_rcnn/README_CN.md @@ -0,0 +1,21 @@ +# 使用Paddle Serving部署Cascade RCNN模型 + +(简体中文|[English](./README.md)) + +## 获得Cascade RCNN模型 +``` +sh get_data.sh +``` +如果你想要更多的检测模型,请参考[Paddle检测模型库](https://github.com/PaddlePaddle/PaddleDetection/blob/release/0.2/docs/MODEL_ZOO_cn.md) + +### 启动服务 +``` +python -m paddle_serving_server_gpu.serve --model serving_server --port 9292 --gpu_id 0 +``` + +### 执行预测 +``` +python test_client.py +``` + +客户端已经为图片做好了后处理,在`output`文件夹下存放各个框的json格式信息还有后处理结果图片。 diff --git a/python/examples/cascade_rcnn/get_data.sh b/python/examples/cascade_rcnn/get_data.sh new file mode 100644 index 0000000000000000000000000000000000000000..0aa9c7dc340367790eb52f5cc0074cb5d6fd0d05 --- /dev/null +++ b/python/examples/cascade_rcnn/get_data.sh @@ -0,0 +1,2 @@ +wget --no-check-certificate https://paddle-serving.bj.bcebos.com/pddet_demo/cascade_rcnn_r50_fpx_1x_serving.tar.gz +tar xf cascade_rcnn_r50_fpx_1x_serving.tar.gz diff --git a/python/examples/imagenet/README.md b/python/examples/imagenet/README.md index 52518d211a4350284cea19546fb3e55d49fc265f..536440e73ea43f55a4c93bf126d62e86aa3983e6 100644 --- a/python/examples/imagenet/README.md +++ b/python/examples/imagenet/README.md @@ -15,34 +15,35 @@ sh get_model.sh pip install paddle_serving_app ``` -### HTTP Infer +### HTTP Service launch server side ``` -python image_classification_service.py ResNet50_vd_model workdir 9393 #cpu inference service +python resnet50_web_service.py ResNet50_vd_model cpu 9696 #cpu inference service ``` ``` -python image_classification_service_gpu.py ResNet50_vd_model workdir 9393 #gpu inference service +python resnet50_web_service.py ResNet50_vd_model gpu 9696 #gpu inference service ``` client send inference request ``` -python image_http_client.py +curl -H "Content-Type:application/json" -X POST -d '{"feed":[{"image": "https://paddle-serving.bj.bcebos.com/imagenet-example/daisy.jpg"}], "fetch": ["score"]}' http://127.0.0.1:9696/image/prediction ``` -### RPC Infer + +### RPC Service launch server side ``` -python -m paddle_serving_server.serve --model ResNet50_vd_model --port 9393 #cpu inference service +python -m paddle_serving_server.serve --model ResNet50_vd_model --port 9696 #cpu inference service ``` ``` -python -m paddle_serving_server_gpu.serve --model ResNet50_vd_model --port 9393 --gpu_ids 0 #gpu inference service +python -m paddle_serving_server_gpu.serve --model ResNet50_vd_model --port 9696 --gpu_ids 0 #gpu inference service ``` client send inference request ``` python image_rpc_client.py ResNet50_vd_client_config/serving_client_conf.prototxt ``` -*the port of server side in this example is 9393, the sample data used by client side is in the folder ./data. These parameter can be modified in practice* +*the port of server side in this example is 9696 diff --git a/python/examples/imagenet/README_CN.md b/python/examples/imagenet/README_CN.md index 3b865cf91ecb62dacd0be5d35fa97bc2e0d50ce3..c34ccca32b737467e687dfd5e86c3229f4339075 100644 --- a/python/examples/imagenet/README_CN.md +++ b/python/examples/imagenet/README_CN.md @@ -15,34 +15,35 @@ sh get_model.sh pip install paddle_serving_app ``` -### 执行HTTP预测服务 +### HTTP服务 启动server端 ``` -python image_classification_service.py ResNet50_vd_model workdir 9393 #cpu预测服务 +python image_classification_service.py ResNet50_vd_model cpu 9696 #cpu预测服务 ``` ``` -python image_classification_service_gpu.py ResNet50_vd_model workdir 9393 #gpu预测服务 +python image_classification_service.py ResNet50_vd_model gpu 9696 #gpu预测服务 ``` -client端进行预测 +发送HTTP POST请求 ``` -python image_http_client.py +curl -H "Content-Type:application/json" -X POST -d '{"feed":[{"image": "https://paddle-serving.bj.bcebos.com/imagenet-example/daisy.jpg"}], "fetch": ["score"]}' http://127.0.0.1:9696/image/prediction ``` -### 执行RPC预测服务 + +### RPC服务 启动server端 ``` -python -m paddle_serving_server.serve --model ResNet50_vd_model --port 9393 #cpu预测服务 +python -m paddle_serving_server.serve --model ResNet50_vd_model --port 9696 #cpu预测服务 ``` ``` -python -m paddle_serving_server_gpu.serve --model ResNet50_vd_model --port 9393 --gpu_ids 0 #gpu预测服务 +python -m paddle_serving_server_gpu.serve --model ResNet50_vd_model --port 9696 --gpu_ids 0 #gpu预测服务 ``` client端进行预测 ``` python image_rpc_client.py ResNet50_vd_client_config/serving_client_conf.prototxt ``` -*server端示例中服务端口为9393端口,client端示例中数据来自./data文件夹,server端地址为本地9393端口,可根据实际情况更改脚本。* +*server端示例中服务端口为9696端口 diff --git a/python/examples/imagenet/benchmark.py b/python/examples/imagenet/benchmark.py index ece222f74c52614100a119e49c3754e22959b7c8..6b21719e7b665906e7abd02a7a3b8aef50136685 100644 --- a/python/examples/imagenet/benchmark.py +++ b/python/examples/imagenet/benchmark.py @@ -39,8 +39,8 @@ def single_func(idx, resource): client.connect([resource["endpoint"][idx % len(resource["endpoint"])]]) start = time.time() - for i in range(1000): - img = reader.process_image(img_list[i]).reshape(-1) + for i in range(100): + img = reader.process_image(img_list[i]) fetch_map = client.predict(feed={"image": img}, fetch=["score"]) end = time.time() return [[end - start]] @@ -49,7 +49,7 @@ def single_func(idx, resource): if __name__ == "__main__": multi_thread_runner = MultiThreadRunner() - endpoint_list = ["127.0.0.1:9393"] + endpoint_list = ["127.0.0.1:9292"] #card_num = 4 #for i in range(args.thread): # endpoint_list.append("127.0.0.1:{}".format(9295 + i % card_num)) diff --git a/python/examples/imagenet/benchmark_batch.py b/python/examples/imagenet/benchmark_batch.py index e531425770cbf9102b7ebd2f5b082c5c4aa14e71..1646fb9a94d6953f90f9f4907aa74940f13c2730 100644 --- a/python/examples/imagenet/benchmark_batch.py +++ b/python/examples/imagenet/benchmark_batch.py @@ -24,6 +24,7 @@ from paddle_serving_client.utils import MultiThreadRunner from paddle_serving_client.utils import benchmark_args import requests import json +import base64 from image_reader import ImageReader args = benchmark_args() @@ -36,6 +37,10 @@ def single_func(idx, resource): img_list = [] for i in range(1000): img_list.append(open("./image_data/n01440764/" + file_list[i]).read()) + profile_flags = False + if "FLAGS_profile_client" in os.environ and os.environ[ + "FLAGS_profile_client"]: + profile_flags = True if args.request == "rpc": reader = ImageReader() fetch = ["score"] @@ -46,23 +51,43 @@ def single_func(idx, resource): for i in range(1000): if args.batch_size >= 1: feed_batch = [] + i_start = time.time() for bi in range(args.batch_size): img = reader.process_image(img_list[i]) - img = img.reshape(-1) feed_batch.append({"image": img}) + i_end = time.time() + if profile_flags: + print("PROFILE\tpid:{}\timage_pre_0:{} image_pre_1:{}". + format(os.getpid(), + int(round(i_start * 1000000)), + int(round(i_end * 1000000)))) + result = client.predict(feed=feed_batch, fetch=fetch) else: print("unsupport batch size {}".format(args.batch_size)) elif args.request == "http": - raise ("no batch predict for http") + py_version = 2 + server = "http://" + resource["endpoint"][idx % len(resource[ + "endpoint"])] + "/image/prediction" + start = time.time() + for i in range(1000): + if py_version == 2: + image = base64.b64encode( + open("./image_data/n01440764/" + file_list[i]).read()) + else: + image = base64.b64encode(open(image_path, "rb").read()).decode( + "utf-8") + req = json.dumps({"feed": [{"image": image}], "fetch": ["score"]}) + r = requests.post( + server, data=req, headers={"Content-Type": "application/json"}) end = time.time() return [[end - start]] if __name__ == '__main__': multi_thread_runner = MultiThreadRunner() - endpoint_list = ["127.0.0.1:9393"] + endpoint_list = ["127.0.0.1:9292"] #endpoint_list = endpoint_list + endpoint_list + endpoint_list result = multi_thread_runner.run(single_func, args.thread, {"endpoint": endpoint_list}) diff --git a/python/examples/imagenet/daisy.jpg b/python/examples/imagenet/daisy.jpg new file mode 100644 index 0000000000000000000000000000000000000000..7edeca63e5f32e68550ef720d81f59df58a8eabc Binary files /dev/null and b/python/examples/imagenet/daisy.jpg differ diff --git a/python/examples/imagenet/flower.jpg b/python/examples/imagenet/flower.jpg new file mode 100644 index 0000000000000000000000000000000000000000..903f812c4ad87e7f608e895a8e6d26d596cc0b48 Binary files /dev/null and b/python/examples/imagenet/flower.jpg differ diff --git a/python/examples/imagenet/image_classification_service.py b/python/examples/imagenet/image_classification_service.py deleted file mode 100644 index 81169d6bdafa7024f2b997c48c0abdc04411e391..0000000000000000000000000000000000000000 --- a/python/examples/imagenet/image_classification_service.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from paddle_serving_server.web_service import WebService -import sys -import cv2 -import base64 -import numpy as np -from paddle_serving_app import ImageReader - - -class ImageService(WebService): - def preprocess(self, feed={}, fetch=[]): - reader = ImageReader() - feed_batch = [] - for ins in feed: - if "image" not in ins: - raise ("feed data error!") - sample = base64.b64decode(ins["image"]) - img = reader.process_image(sample) - feed_batch.append({"image": img}) - return feed_batch, fetch - - -image_service = ImageService(name="image") -image_service.load_model_config(sys.argv[1]) -image_service.prepare_server( - workdir=sys.argv[2], port=int(sys.argv[3]), device="cpu") -image_service.run_server() -image_service.run_flask() diff --git a/python/examples/imagenet/image_classification_service_gpu.py b/python/examples/imagenet/image_classification_service_gpu.py deleted file mode 100644 index 7cb973547982877a50e86062fe187233a32065e6..0000000000000000000000000000000000000000 --- a/python/examples/imagenet/image_classification_service_gpu.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -import cv2 -import base64 -import numpy as np -from paddle_serving_app import ImageReader -from paddle_serving_server_gpu.web_service import WebService - - -class ImageService(WebService): - def preprocess(self, feed={}, fetch=[]): - reader = ImageReader() - feed_batch = [] - for ins in feed: - if "image" not in ins: - raise ("feed data error!") - sample = base64.b64decode(ins["image"]) - img = reader.process_image(sample) - feed_batch.append({"image": img}) - return feed_batch, fetch - - -image_service = ImageService(name="image") -image_service.load_model_config(sys.argv[1]) -image_service.set_gpus("0,1") -image_service.prepare_server( - workdir=sys.argv[2], port=int(sys.argv[3]), device="gpu") -image_service.run_server() -image_service.run_flask() diff --git a/python/examples/imagenet/image_http_client.py b/python/examples/imagenet/image_http_client.py deleted file mode 100644 index 61b021be246dc4b843e608dcea21418419731b49..0000000000000000000000000000000000000000 --- a/python/examples/imagenet/image_http_client.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import requests -import base64 -import json -import time -import os -import sys - -py_version = sys.version_info[0] - - -def predict(image_path, server): - if py_version == 2: - image = base64.b64encode(open(image_path).read()) - else: - image = base64.b64encode(open(image_path, "rb").read()).decode("utf-8") - req = json.dumps({"feed": [{"image": image}], "fetch": ["score"]}) - r = requests.post( - server, data=req, headers={"Content-Type": "application/json"}) - try: - print(r.json()["result"]["score"]) - except ValueError: - print(r.text) - return r - - -if __name__ == "__main__": - server = "http://127.0.0.1:9393/image/prediction" - image_list = os.listdir("./image_data/n01440764/") - start = time.time() - for img in image_list: - image_file = "./image_data/n01440764/" + img - res = predict(image_file, server) - end = time.time() - print(end - start) diff --git a/python/examples/imagenet/image_reader.py b/python/examples/imagenet/image_reader.py deleted file mode 100644 index 843d9417ba37601232cb640d55f1d03f38cd7f76..0000000000000000000000000000000000000000 --- a/python/examples/imagenet/image_reader.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import cv2 -import numpy as np - - -class ImageReader(): - def __init__(self): - self.image_mean = [0.485, 0.456, 0.406] - self.image_std = [0.229, 0.224, 0.225] - self.image_shape = [3, 224, 224] - self.resize_short_size = 256 - self.interpolation = None - - def resize_short(self, img, target_size, interpolation=None): - """resize image - - Args: - img: image data - target_size: resize short target size - interpolation: interpolation mode - - Returns: - resized image data - """ - percent = float(target_size) / min(img.shape[0], img.shape[1]) - resized_width = int(round(img.shape[1] * percent)) - resized_height = int(round(img.shape[0] * percent)) - if interpolation: - resized = cv2.resize( - img, (resized_width, resized_height), - interpolation=interpolation) - else: - resized = cv2.resize(img, (resized_width, resized_height)) - return resized - - def crop_image(self, img, target_size, center): - """crop image - - Args: - img: images data - target_size: crop target size - center: crop mode - - Returns: - img: cropped image data - """ - height, width = img.shape[:2] - size = target_size - if center == True: - w_start = (width - size) // 2 - h_start = (height - size) // 2 - else: - w_start = np.random.randint(0, width - size + 1) - h_start = np.random.randint(0, height - size + 1) - w_end = w_start + size - h_end = h_start + size - img = img[h_start:h_end, w_start:w_end, :] - return img - - def process_image(self, sample): - """ process_image """ - mean = self.image_mean - std = self.image_std - crop_size = self.image_shape[1] - - data = np.fromstring(sample, np.uint8) - img = cv2.imdecode(data, cv2.IMREAD_COLOR) - - if img is None: - print("img is None, pass it.") - return None - - if crop_size > 0: - target_size = self.resize_short_size - img = self.resize_short( - img, target_size, interpolation=self.interpolation) - img = self.crop_image(img, target_size=crop_size, center=True) - - img = img[:, :, ::-1] - - img = img.astype('float32').transpose((2, 0, 1)) / 255 - img_mean = np.array(mean).reshape((3, 1, 1)) - img_std = np.array(std).reshape((3, 1, 1)) - img -= img_mean - img /= img_std - return img diff --git a/python/examples/imagenet/imagenet.label b/python/examples/imagenet/imagenet.label new file mode 100644 index 0000000000000000000000000000000000000000..d7146735146ea1894173d6d0e20fb90af36be849 --- /dev/null +++ b/python/examples/imagenet/imagenet.label @@ -0,0 +1,1000 @@ +tench, Tinca tinca, +goldfish, Carassius auratus, +great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias, +tiger shark, Galeocerdo cuvieri, +hammerhead, hammerhead shark, +electric ray, crampfish, numbfish, torpedo, +stingray, +cock, +hen, +ostrich, Struthio camelus, +brambling, Fringilla montifringilla, +goldfinch, Carduelis carduelis, +house finch, linnet, Carpodacus mexicanus, +junco, snowbird, +indigo bunting, indigo finch, indigo bird, Passerina cyanea, +robin, American robin, Turdus migratorius, +bulbul, +jay, +magpie, +chickadee, +water ouzel, dipper, +kite, +bald eagle, American eagle, Haliaeetus leucocephalus, +vulture, +great grey owl, great gray owl, Strix nebulosa, +European fire salamander, Salamandra salamandra, +common newt, Triturus vulgaris, +eft, +spotted salamander, Ambystoma maculatum, +axolotl, mud puppy, Ambystoma mexicanum, +bullfrog, Rana catesbeiana, +tree frog, tree-frog, +tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui, +loggerhead, loggerhead turtle, Caretta caretta, +leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea, +mud turtle, +terrapin, +box turtle, box tortoise, +banded gecko, +common iguana, iguana, Iguana iguana, +American chameleon, anole, Anolis carolinensis, +whiptail, whiptail lizard, +agama, +frilled lizard, Chlamydosaurus kingi, +alligator lizard, +Gila monster, Heloderma suspectum, +green lizard, Lacerta viridis, +African chameleon, Chamaeleo chamaeleon, +Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis, +African crocodile, Nile crocodile, Crocodylus niloticus, +American alligator, Alligator mississipiensis, +triceratops, +thunder snake, worm snake, Carphophis amoenus, +ringneck snake, ring-necked snake, ring snake, +hognose snake, puff adder, sand viper, +green snake, grass snake, +king snake, kingsnake, +garter snake, grass snake, +water snake, +vine snake, +night snake, Hypsiglena torquata, +boa constrictor, Constrictor constrictor, +rock python, rock snake, Python sebae, +Indian cobra, Naja naja, +green mamba, +sea snake, +horned viper, cerastes, sand viper, horned asp, Cerastes cornutus, +diamondback, diamondback rattlesnake, Crotalus adamanteus, +sidewinder, horned rattlesnake, Crotalus cerastes, +trilobite, +harvestman, daddy longlegs, Phalangium opilio, +scorpion, +black and gold garden spider, Argiope aurantia, +barn spider, Araneus cavaticus, +garden spider, Aranea diademata, +black widow, Latrodectus mactans, +tarantula, +wolf spider, hunting spider, +tick, +centipede, +black grouse, +ptarmigan, +ruffed grouse, partridge, Bonasa umbellus, +prairie chicken, prairie grouse, prairie fowl, +peacock, +quail, +partridge, +African grey, African gray, Psittacus erithacus, +macaw, +sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita, +lorikeet, +coucal, +bee eater, +hornbill, +hummingbird, +jacamar, +toucan, +drake, +red-breasted merganser, Mergus serrator, +goose, +black swan, Cygnus atratus, +tusker, +echidna, spiny anteater, anteater, +platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus, +wallaby, brush kangaroo, +koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus, +wombat, +jellyfish, +sea anemone, anemone, +brain coral, +flatworm, platyhelminth, +nematode, nematode worm, roundworm, +conch, +snail, +slug, +sea slug, nudibranch, +chiton, coat-of-mail shell, sea cradle, polyplacophore, +chambered nautilus, pearly nautilus, nautilus, +Dungeness crab, Cancer magister, +rock crab, Cancer irroratus, +fiddler crab, +king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica, +American lobster, Northern lobster, Maine lobster, Homarus americanus, +spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish, +crayfish, crawfish, crawdad, crawdaddy, +hermit crab, +isopod, +white stork, Ciconia ciconia, +black stork, Ciconia nigra, +spoonbill, +flamingo, +little blue heron, Egretta caerulea, +American egret, great white heron, Egretta albus, +bittern, +crane, +limpkin, Aramus pictus, +European gallinule, Porphyrio porphyrio, +American coot, marsh hen, mud hen, water hen, Fulica americana, +bustard, +ruddy turnstone, Arenaria interpres, +red-backed sandpiper, dunlin, Erolia alpina, +redshank, Tringa totanus, +dowitcher, +oystercatcher, oyster catcher, +pelican, +king penguin, Aptenodytes patagonica, +albatross, mollymawk, +grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus, +killer whale, killer, orca, grampus, sea wolf, Orcinus orca, +dugong, Dugong dugon, +sea lion, +Chihuahua, +Japanese spaniel, +Maltese dog, Maltese terrier, Maltese, +Pekinese, Pekingese, Peke, +Shih-Tzu, +Blenheim spaniel, +papillon, +toy terrier, +Rhodesian ridgeback, +Afghan hound, Afghan, +basset, basset hound, +beagle, +bloodhound, sleuthhound, +bluetick, +black-and-tan coonhound, +Walker hound, Walker foxhound, +English foxhound, +redbone, +borzoi, Russian wolfhound, +Irish wolfhound, +Italian greyhound, +whippet, +Ibizan hound, Ibizan Podenco, +Norwegian elkhound, elkhound, +otterhound, otter hound, +Saluki, gazelle hound, +Scottish deerhound, deerhound, +Weimaraner, +Staffordshire bullterrier, Staffordshire bull terrier, +American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier, +Bedlington terrier, +Border terrier, +Kerry blue terrier, +Irish terrier, +Norfolk terrier, +Norwich terrier, +Yorkshire terrier, +wire-haired fox terrier, +Lakeland terrier, +Sealyham terrier, Sealyham, +Airedale, Airedale terrier, +cairn, cairn terrier, +Australian terrier, +Dandie Dinmont, Dandie Dinmont terrier, +Boston bull, Boston terrier, +miniature schnauzer, +giant schnauzer, +standard schnauzer, +Scotch terrier, Scottish terrier, Scottie, +Tibetan terrier, chrysanthemum dog, +silky terrier, Sydney silky, +soft-coated wheaten terrier, +West Highland white terrier, +Lhasa, Lhasa apso, +flat-coated retriever, +curly-coated retriever, +golden retriever, +Labrador retriever, +Chesapeake Bay retriever, +German short-haired pointer, +vizsla, Hungarian pointer, +English setter, +Irish setter, red setter, +Gordon setter, +Brittany spaniel, +clumber, clumber spaniel, +English springer, English springer spaniel, +Welsh springer spaniel, +cocker spaniel, English cocker spaniel, cocker, +Sussex spaniel, +Irish water spaniel, +kuvasz, +schipperke, +groenendael, +malinois, +briard, +kelpie, +komondor, +Old English sheepdog, bobtail, +Shetland sheepdog, Shetland sheep dog, Shetland, +collie, +Border collie, +Bouvier des Flandres, Bouviers des Flandres, +Rottweiler, +German shepherd, German shepherd dog, German police dog, alsatian, +Doberman, Doberman pinscher, +miniature pinscher, +Greater Swiss Mountain dog, +Bernese mountain dog, +Appenzeller, +EntleBucher, +boxer, +bull mastiff, +Tibetan mastiff, +French bulldog, +Great Dane, +Saint Bernard, St Bernard, +Eskimo dog, husky, +malamute, malemute, Alaskan malamute, +Siberian husky, +dalmatian, coach dog, carriage dog, +affenpinscher, monkey pinscher, monkey dog, +basenji, +pug, pug-dog, +Leonberg, +Newfoundland, Newfoundland dog, +Great Pyrenees, +Samoyed, Samoyede, +Pomeranian, +chow, chow chow, +keeshond, +Brabancon griffon, +Pembroke, Pembroke Welsh corgi, +Cardigan, Cardigan Welsh corgi, +toy poodle, +miniature poodle, +standard poodle, +Mexican hairless, +timber wolf, grey wolf, gray wolf, Canis lupus, +white wolf, Arctic wolf, Canis lupus tundrarum, +red wolf, maned wolf, Canis rufus, Canis niger, +coyote, prairie wolf, brush wolf, Canis latrans, +dingo, warrigal, warragal, Canis dingo, +dhole, Cuon alpinus, +African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus, +hyena, hyaena, +red fox, Vulpes vulpes, +kit fox, Vulpes macrotis, +Arctic fox, white fox, Alopex lagopus, +grey fox, gray fox, Urocyon cinereoargenteus, +tabby, tabby cat, +tiger cat, +Persian cat, +Siamese cat, Siamese, +Egyptian cat, +cougar, puma, catamount, mountain lion, painter, panther, Felis concolor, +lynx, catamount, +leopard, Panthera pardus, +snow leopard, ounce, Panthera uncia, +jaguar, panther, Panthera onca, Felis onca, +lion, king of beasts, Panthera leo, +tiger, Panthera tigris, +cheetah, chetah, Acinonyx jubatus, +brown bear, bruin, Ursus arctos, +American black bear, black bear, Ursus americanus, Euarctos americanus, +ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus, +sloth bear, Melursus ursinus, Ursus ursinus, +mongoose, +meerkat, mierkat, +tiger beetle, +ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle, +ground beetle, carabid beetle, +long-horned beetle, longicorn, longicorn beetle, +leaf beetle, chrysomelid, +dung beetle, +rhinoceros beetle, +weevil, +fly, +bee, +ant, emmet, pismire, +grasshopper, hopper, +cricket, +walking stick, walkingstick, stick insect, +cockroach, roach, +mantis, mantid, +cicada, cicala, +leafhopper, +lacewing, lacewing fly, +"dragonfly, darning needle, devils darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", +damselfly, +admiral, +ringlet, ringlet butterfly, +monarch, monarch butterfly, milkweed butterfly, Danaus plexippus, +cabbage butterfly, +sulphur butterfly, sulfur butterfly, +lycaenid, lycaenid butterfly, +starfish, sea star, +sea urchin, +sea cucumber, holothurian, +wood rabbit, cottontail, cottontail rabbit, +hare, +Angora, Angora rabbit, +hamster, +porcupine, hedgehog, +fox squirrel, eastern fox squirrel, Sciurus niger, +marmot, +beaver, +guinea pig, Cavia cobaya, +sorrel, +zebra, +hog, pig, grunter, squealer, Sus scrofa, +wild boar, boar, Sus scrofa, +warthog, +hippopotamus, hippo, river horse, Hippopotamus amphibius, +ox, +water buffalo, water ox, Asiatic buffalo, Bubalus bubalis, +bison, +ram, tup, +bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis, +ibex, Capra ibex, +hartebeest, +impala, Aepyceros melampus, +gazelle, +Arabian camel, dromedary, Camelus dromedarius, +llama, +weasel, +mink, +polecat, fitch, foulmart, foumart, Mustela putorius, +black-footed ferret, ferret, Mustela nigripes, +otter, +skunk, polecat, wood pussy, +badger, +armadillo, +three-toed sloth, ai, Bradypus tridactylus, +orangutan, orang, orangutang, Pongo pygmaeus, +gorilla, Gorilla gorilla, +chimpanzee, chimp, Pan troglodytes, +gibbon, Hylobates lar, +siamang, Hylobates syndactylus, Symphalangus syndactylus, +guenon, guenon monkey, +patas, hussar monkey, Erythrocebus patas, +baboon, +macaque, +langur, +colobus, colobus monkey, +proboscis monkey, Nasalis larvatus, +marmoset, +capuchin, ringtail, Cebus capucinus, +howler monkey, howler, +titi, titi monkey, +spider monkey, Ateles geoffroyi, +squirrel monkey, Saimiri sciureus, +Madagascar cat, ring-tailed lemur, Lemur catta, +indri, indris, Indri indri, Indri brevicaudatus, +Indian elephant, Elephas maximus, +African elephant, Loxodonta africana, +lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens, +giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca, +barracouta, snoek, +eel, +coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch, +rock beauty, Holocanthus tricolor, +anemone fish, +sturgeon, +gar, garfish, garpike, billfish, Lepisosteus osseus, +lionfish, +puffer, pufferfish, blowfish, globefish, +abacus, +abaya, +"academic gown, academic robe, judges robe", +accordion, piano accordion, squeeze box, +acoustic guitar, +aircraft carrier, carrier, flattop, attack aircraft carrier, +airliner, +airship, dirigible, +altar, +ambulance, +amphibian, amphibious vehicle, +analog clock, +apiary, bee house, +apron, +ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin, +assault rifle, assault gun, +backpack, back pack, knapsack, packsack, rucksack, haversack, +bakery, bakeshop, bakehouse, +balance beam, beam, +balloon, +ballpoint, ballpoint pen, ballpen, Biro, +Band Aid, +banjo, +bannister, banister, balustrade, balusters, handrail, +barbell, +barber chair, +barbershop, +barn, +barometer, +barrel, cask, +barrow, garden cart, lawn cart, wheelbarrow, +baseball, +basketball, +bassinet, +bassoon, +bathing cap, swimming cap, +bath towel, +bathtub, bathing tub, bath, tub, +beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon, +beacon, lighthouse, beacon light, pharos, +beaker, +bearskin, busby, shako, +beer bottle, +beer glass, +bell cote, bell cot, +bib, +bicycle-built-for-two, tandem bicycle, tandem, +bikini, two-piece, +binder, ring-binder, +binoculars, field glasses, opera glasses, +birdhouse, +boathouse, +bobsled, bobsleigh, bob, +bolo tie, bolo, bola tie, bola, +bonnet, poke bonnet, +bookcase, +bookshop, bookstore, bookstall, +bottlecap, +bow, +bow tie, bow-tie, bowtie, +brass, memorial tablet, plaque, +brassiere, bra, bandeau, +breakwater, groin, groyne, mole, bulwark, seawall, jetty, +breastplate, aegis, egis, +broom, +bucket, pail, +buckle, +bulletproof vest, +bullet train, bullet, +butcher shop, meat market, +cab, hack, taxi, taxicab, +caldron, cauldron, +candle, taper, wax light, +cannon, +canoe, +can opener, tin opener, +cardigan, +car mirror, +carousel, carrousel, merry-go-round, roundabout, whirligig, +"carpenters kit, tool kit", +carton, +car wheel, +cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM, +cassette, +cassette player, +castle, +catamaran, +CD player, +cello, violoncello, +cellular telephone, cellular phone, cellphone, cell, mobile phone, +chain, +chainlink fence, +chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour, +chain saw, chainsaw, +chest, +chiffonier, commode, +chime, bell, gong, +china cabinet, china closet, +Christmas stocking, +church, church building, +cinema, movie theater, movie theatre, movie house, picture palace, +cleaver, meat cleaver, chopper, +cliff dwelling, +cloak, +clog, geta, patten, sabot, +cocktail shaker, +coffee mug, +coffeepot, +coil, spiral, volute, whorl, helix, +combination lock, +computer keyboard, keypad, +confectionery, confectionary, candy store, +container ship, containership, container vessel, +convertible, +corkscrew, bottle screw, +cornet, horn, trumpet, trump, +cowboy boot, +cowboy hat, ten-gallon hat, +cradle, +crane, +crash helmet, +crate, +crib, cot, +Crock Pot, +croquet ball, +crutch, +cuirass, +dam, dike, dyke, +desk, +desktop computer, +dial telephone, dial phone, +diaper, nappy, napkin, +digital clock, +digital watch, +dining table, board, +dishrag, dishcloth, +dishwasher, dish washer, dishwashing machine, +disk brake, disc brake, +dock, dockage, docking facility, +dogsled, dog sled, dog sleigh, +dome, +doormat, welcome mat, +drilling platform, offshore rig, +drum, membranophone, tympan, +drumstick, +dumbbell, +Dutch oven, +electric fan, blower, +electric guitar, +electric locomotive, +entertainment center, +envelope, +espresso maker, +face powder, +feather boa, boa, +file, file cabinet, filing cabinet, +fireboat, +fire engine, fire truck, +fire screen, fireguard, +flagpole, flagstaff, +flute, transverse flute, +folding chair, +football helmet, +forklift, +fountain, +fountain pen, +four-poster, +freight car, +French horn, horn, +frying pan, frypan, skillet, +fur coat, +garbage truck, dustcart, +gasmask, respirator, gas helmet, +gas pump, gasoline pump, petrol pump, island dispenser, +goblet, +go-kart, +golf ball, +golfcart, golf cart, +gondola, +gong, tam-tam, +gown, +grand piano, grand, +greenhouse, nursery, glasshouse, +grille, radiator grille, +grocery store, grocery, food market, market, +guillotine, +hair slide, +hair spray, +half track, +hammer, +hamper, +hand blower, blow dryer, blow drier, hair dryer, hair drier, +hand-held computer, hand-held microcomputer, +handkerchief, hankie, hanky, hankey, +hard disc, hard disk, fixed disk, +harmonica, mouth organ, harp, mouth harp, +harp, +harvester, reaper, +hatchet, +holster, +home theater, home theatre, +honeycomb, +hook, claw, +hoopskirt, crinoline, +horizontal bar, high bar, +horse cart, horse-cart, +hourglass, +iPod, +iron, smoothing iron, +"jack-o-lantern", +jean, blue jean, denim, +jeep, landrover, +jersey, T-shirt, tee shirt, +jigsaw puzzle, +jinrikisha, ricksha, rickshaw, +joystick, +kimono, +knee pad, +knot, +lab coat, laboratory coat, +ladle, +lampshade, lamp shade, +laptop, laptop computer, +lawn mower, mower, +lens cap, lens cover, +letter opener, paper knife, paperknife, +library, +lifeboat, +lighter, light, igniter, ignitor, +limousine, limo, +liner, ocean liner, +lipstick, lip rouge, +Loafer, +lotion, +loudspeaker, speaker, speaker unit, loudspeaker system, speaker system, +"loupe, jewelers loupe", +lumbermill, sawmill, +magnetic compass, +mailbag, postbag, +mailbox, letter box, +maillot, +maillot, tank suit, +manhole cover, +maraca, +marimba, xylophone, +mask, +matchstick, +maypole, +maze, labyrinth, +measuring cup, +medicine chest, medicine cabinet, +megalith, megalithic structure, +microphone, mike, +microwave, microwave oven, +military uniform, +milk can, +minibus, +miniskirt, mini, +minivan, +missile, +mitten, +mixing bowl, +mobile home, manufactured home, +Model T, +modem, +monastery, +monitor, +moped, +mortar, +mortarboard, +mosque, +mosquito net, +motor scooter, scooter, +mountain bike, all-terrain bike, off-roader, +mountain tent, +mouse, computer mouse, +mousetrap, +moving van, +muzzle, +nail, +neck brace, +necklace, +nipple, +notebook, notebook computer, +obelisk, +oboe, hautboy, hautbois, +ocarina, sweet potato, +odometer, hodometer, mileometer, milometer, +oil filter, +organ, pipe organ, +oscilloscope, scope, cathode-ray oscilloscope, CRO, +overskirt, +oxcart, +oxygen mask, +packet, +paddle, boat paddle, +paddlewheel, paddle wheel, +padlock, +paintbrush, +"pajama, pyjama, pjs, jammies", +palace, +panpipe, pandean pipe, syrinx, +paper towel, +parachute, chute, +parallel bars, bars, +park bench, +parking meter, +passenger car, coach, carriage, +patio, terrace, +pay-phone, pay-station, +pedestal, plinth, footstall, +pencil box, pencil case, +pencil sharpener, +perfume, essence, +Petri dish, +photocopier, +pick, plectrum, plectron, +pickelhaube, +picket fence, paling, +pickup, pickup truck, +pier, +piggy bank, penny bank, +pill bottle, +pillow, +ping-pong ball, +pinwheel, +pirate, pirate ship, +pitcher, ewer, +"plane, carpenters plane, woodworking plane", +planetarium, +plastic bag, +plate rack, +plow, plough, +"plunger, plumbers helper", +Polaroid camera, Polaroid Land camera, +pole, +police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria, +poncho, +pool table, billiard table, snooker table, +pop bottle, soda bottle, +pot, flowerpot, +"potters wheel", +power drill, +prayer rug, prayer mat, +printer, +prison, prison house, +projectile, missile, +projector, +puck, hockey puck, +punching bag, punch bag, punching ball, punchball, +purse, +quill, quill pen, +quilt, comforter, comfort, puff, +racer, race car, racing car, +racket, racquet, +radiator, +radio, wireless, +radio telescope, radio reflector, +rain barrel, +recreational vehicle, RV, R.V., +reel, +reflex camera, +refrigerator, icebox, +remote control, remote, +restaurant, eating house, eating place, eatery, +revolver, six-gun, six-shooter, +rifle, +rocking chair, rocker, +rotisserie, +rubber eraser, rubber, pencil eraser, +rugby ball, +rule, ruler, +running shoe, +safe, +safety pin, +saltshaker, salt shaker, +sandal, +sarong, +sax, saxophone, +scabbard, +scale, weighing machine, +school bus, +schooner, +scoreboard, +screen, CRT screen, +screw, +screwdriver, +seat belt, seatbelt, +sewing machine, +shield, buckler, +shoe shop, shoe-shop, shoe store, +shoji, +shopping basket, +shopping cart, +shovel, +shower cap, +shower curtain, +ski, +ski mask, +sleeping bag, +slide rule, slipstick, +sliding door, +slot, one-armed bandit, +snorkel, +snowmobile, +snowplow, snowplough, +soap dispenser, +soccer ball, +sock, +solar dish, solar collector, solar furnace, +sombrero, +soup bowl, +space bar, +space heater, +space shuttle, +spatula, +speedboat, +"spider web, spiders web", +spindle, +sports car, sport car, +spotlight, spot, +stage, +steam locomotive, +steel arch bridge, +steel drum, +stethoscope, +stole, +stone wall, +stopwatch, stop watch, +stove, +strainer, +streetcar, tram, tramcar, trolley, trolley car, +stretcher, +studio couch, day bed, +stupa, tope, +submarine, pigboat, sub, U-boat, +suit, suit of clothes, +sundial, +sunglass, +sunglasses, dark glasses, shades, +sunscreen, sunblock, sun blocker, +suspension bridge, +swab, swob, mop, +sweatshirt, +swimming trunks, bathing trunks, +swing, +switch, electric switch, electrical switch, +syringe, +table lamp, +tank, army tank, armored combat vehicle, armoured combat vehicle, +tape player, +teapot, +teddy, teddy bear, +television, television system, +tennis ball, +thatch, thatched roof, +theater curtain, theatre curtain, +thimble, +thresher, thrasher, threshing machine, +throne, +tile roof, +toaster, +tobacco shop, tobacconist shop, tobacconist, +toilet seat, +torch, +totem pole, +tow truck, tow car, wrecker, +toyshop, +tractor, +trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi, +tray, +trench coat, +tricycle, trike, velocipede, +trimaran, +tripod, +triumphal arch, +trolleybus, trolley coach, trackless trolley, +trombone, +tub, vat, +turnstile, +typewriter keyboard, +umbrella, +unicycle, monocycle, +upright, upright piano, +vacuum, vacuum cleaner, +vase, +vault, +velvet, +vending machine, +vestment, +viaduct, +violin, fiddle, +volleyball, +waffle iron, +wall clock, +wallet, billfold, notecase, pocketbook, +wardrobe, closet, press, +warplane, military plane, +washbasin, handbasin, washbowl, lavabo, wash-hand basin, +washer, automatic washer, washing machine, +water bottle, +water jug, +water tower, +whiskey jug, +whistle, +wig, +window screen, +window shade, +Windsor tie, +wine bottle, +wing, +wok, +wooden spoon, +wool, woolen, woollen, +worm fence, snake fence, snake-rail fence, Virginia fence, +wreck, +yawl, +yurt, +web site, website, internet site, site, +comic book, +crossword puzzle, crossword, +street sign, +traffic light, traffic signal, stoplight, +book jacket, dust cover, dust jacket, dust wrapper, +menu, +plate, +guacamole, +consomme, +hot pot, hotpot, +trifle, +ice cream, icecream, +ice lolly, lolly, lollipop, popsicle, +French loaf, +bagel, beigel, +pretzel, +cheeseburger, +hotdog, hot dog, red hot, +mashed potato, +head cabbage, +broccoli, +cauliflower, +zucchini, courgette, +spaghetti squash, +acorn squash, +butternut squash, +cucumber, cuke, +artichoke, globe artichoke, +bell pepper, +cardoon, +mushroom, +Granny Smith, +strawberry, +orange, +lemon, +fig, +pineapple, ananas, +banana, +jackfruit, jak, jack, +custard apple, +pomegranate, +hay, +carbonara, +chocolate sauce, chocolate syrup, +dough, +meat loaf, meatloaf, +pizza, pizza pie, +potpie, +burrito, +red wine, +espresso, +cup, +eggnog, +alp, +bubble, +cliff, drop, drop-off, +coral reef, +geyser, +lakeside, lakeshore, +promontory, headland, head, foreland, +sandbar, sand bar, +seashore, coast, seacoast, sea-coast, +valley, vale, +volcano, +ballplayer, baseball player, +groom, bridegroom, +scuba diver, +rapeseed, +daisy, +"yellow ladys slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum", +corn, +acorn, +hip, rose hip, rosehip, +buckeye, horse chestnut, conker, +coral fungus, +agaric, +gyromitra, +stinkhorn, carrion fungus, +earthstar, +hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa, +bolete, +ear, spike, capitulum, +toilet tissue, toilet paper, bathroom tissue diff --git a/python/examples/imagenet/image_rpc_client.py b/python/examples/imagenet/resnet50_rpc_client.py similarity index 56% rename from python/examples/imagenet/image_rpc_client.py rename to python/examples/imagenet/resnet50_rpc_client.py index 4d74d2ed26a757a6f7978d8071286d3d4bcd5dfb..7888ab6302b483672ec1d7270f7db0c551f1778d 100644 --- a/python/examples/imagenet/image_rpc_client.py +++ b/python/examples/imagenet/resnet50_rpc_client.py @@ -14,23 +14,35 @@ import sys from paddle_serving_client import Client -from paddle_serving_app.reader import Sequential, File2Image, Resize, CenterCrop, RGB2BGR, Transpose, Div, Normalize +from paddle_serving_app.reader import Sequential, URL2Image, Resize +from paddle_serving_app.reader import CenterCrop, RGB2BGR, Transpose, Div, Normalize import time client = Client() client.load_client_config(sys.argv[1]) -client.connect(["127.0.0.1:9393"]) +client.connect(["127.0.0.1:9696"]) + +label_dict = {} +label_idx = 0 +with open("imagenet.label") as fin: + for line in fin: + label_dict[label_idx] = line.strip() + label_idx += 1 seq = Sequential([ - File2Image(), Resize(256), CenterCrop(224), RGB2BGR(), Transpose((2, 0, 1)), - Div(255), Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) + URL2Image(), Resize(256), CenterCrop(224), RGB2BGR(), Transpose((2, 0, 1)), + Div(255), Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225], True) ]) -print(seq) start = time.time() -image_file = "daisy.jpg" -for i in range(1000): +image_file = "https://paddle-serving.bj.bcebos.com/imagenet-example/daisy.jpg" +for i in range(10): img = seq(image_file) fetch_map = client.predict(feed={"image": img}, fetch=["score"]) + prob = max(fetch_map["score"][0]) + label = label_dict[fetch_map["score"][0].tolist().index(prob)].strip( + ).replace(",", "") + print("prediction: {}, probability: {}".format(label, prob)) + end = time.time() print(end - start) diff --git a/python/examples/imagenet/resnet50_web_service.py b/python/examples/imagenet/resnet50_web_service.py new file mode 100644 index 0000000000000000000000000000000000000000..ba40b41bbd9b773910ba0265b3604edd650570ff --- /dev/null +++ b/python/examples/imagenet/resnet50_web_service.py @@ -0,0 +1,72 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sys +from paddle_serving_client import Client +from paddle_serving_app.reader import Sequential, URL2Image, Resize, CenterCrop, RGB2BGR, Transpose, Div, Normalize + +if len(sys.argv) != 4: + print("python resnet50_web_service.py model device port") + sys.exit(-1) + +device = sys.argv[2] + +if device == "cpu": + from paddle_serving_server.web_service import WebService +else: + from paddle_serving_server_gpu.web_service import WebService + + +class ImageService(WebService): + def init_imagenet_setting(self): + self.seq = Sequential([ + URL2Image(), Resize(256), CenterCrop(224), RGB2BGR(), Transpose( + (2, 0, 1)), Div(255), Normalize([0.485, 0.456, 0.406], + [0.229, 0.224, 0.225], True) + ]) + self.label_dict = {} + label_idx = 0 + with open("imagenet.label") as fin: + for line in fin: + self.label_dict[label_idx] = line.strip() + label_idx += 1 + + def preprocess(self, feed=[], fetch=[]): + feed_batch = [] + for ins in feed: + if "image" not in ins: + raise ("feed data error!") + img = self.seq(ins["image"]) + feed_batch.append({"image": img}) + return feed_batch, fetch + + def postprocess(self, feed=[], fetch=[], fetch_map={}): + score_list = fetch_map["score"] + result = {"label": [], "prob": []} + for score in score_list: + max_score = max(score) + result["label"].append(self.label_dict[score.index(max_score)] + .strip().replace(",", "")) + result["prob"].append(max_score) + return result + + +image_service = ImageService(name="image") +image_service.load_model_config(sys.argv[1]) +image_service.init_imagenet_setting() +if device == "gpu": + image_service.set_gpus("0,1") +image_service.prepare_server( + workdir="workdir", port=int(sys.argv[3]), device=device) +image_service.run_server() +image_service.run_flask() diff --git a/python/examples/imdb/benchmark.py b/python/examples/imdb/benchmark.py index a734e80ef78a7710ca09a211132e248580c5a48c..b8d7a70f30c5cf2d0ee985a8c30fada8fa9481b3 100644 --- a/python/examples/imdb/benchmark.py +++ b/python/examples/imdb/benchmark.py @@ -16,7 +16,7 @@ import sys import time import requests -from imdb_reader import IMDBDataset +from paddle_serving_app import IMDBDataset from paddle_serving_client import Client from paddle_serving_client.utils import MultiThreadRunner from paddle_serving_client.utils import benchmark_args @@ -37,26 +37,39 @@ def single_func(idx, resource): client.load_client_config(args.model) client.connect([args.endpoint]) for i in range(1000): - if args.batch_size == 1: - word_ids, label = imdb_dataset.get_words_and_label(line) - fetch_map = client.predict( - feed={"words": word_ids}, fetch=["prediction"]) + if args.batch_size >= 1: + feed_batch = [] + for bi in range(args.batch_size): + word_ids, label = imdb_dataset.get_words_and_label(dataset[ + bi]) + feed_batch.append({"words": word_ids}) + result = client.predict(feed=feed_batch, fetch=["prediction"]) + if result is None: + raise ("predict failed.") else: print("unsupport batch size {}".format(args.batch_size)) elif args.request == "http": - for fn in filelist: - fin = open(fn) - for line in fin: - word_ids, label = imdb_dataset.get_words_and_label(line) - r = requests.post( - "http://{}/imdb/prediction".format(args.endpoint), - data={"words": word_ids, - "fetch": ["prediction"]}) + if args.batch_size >= 1: + feed_batch = [] + for bi in range(args.batch_size): + feed_batch.append({"words": dataset[bi]}) + r = requests.post( + "http://{}/imdb/prediction".format(args.endpoint), + json={"feed": feed_batch, + "fetch": ["prediction"]}) + if r.status_code != 200: + print('HTTP status code -ne 200') + raise ("predict failed.") + else: + print("unsupport batch size {}".format(args.batch_size)) end = time.time() return [[end - start]] multi_thread_runner = MultiThreadRunner() result = multi_thread_runner.run(single_func, args.thread, {}) -print(result) +avg_cost = 0 +for cost in result[0]: + avg_cost += cost +print("total cost {} s of each thread".format(avg_cost / args.thread)) diff --git a/python/examples/imdb/benchmark.sh b/python/examples/imdb/benchmark.sh index d77e184180d5c36de6cb865f6b9797511410a3ba..93dbf830c84bd38f72dd0d8a32139ad6098dc6f8 100644 --- a/python/examples/imdb/benchmark.sh +++ b/python/examples/imdb/benchmark.sh @@ -1,9 +1,12 @@ rm profile_log for thread_num in 1 2 4 8 16 do - $PYTHONROOT/bin/python benchmark.py --thread $thread_num --model imdbo_bow_client_conf/serving_client_conf.prototxt --request rpc > profile 2>&1 +for batch_size in 1 2 4 8 16 32 64 128 256 512 +do + $PYTHONROOT/bin/python benchmark.py --thread $thread_num --batch_size $batch_size --model imdb_bow_client_conf/serving_client_conf.prototxt --request rpc > profile 2>&1 echo "========================================" echo "batch size : $batch_size" >> profile_log $PYTHONROOT/bin/python ../util/show_profile.py profile $thread_num >> profile_log tail -n 1 profile >> profile_log done +done diff --git a/python/examples/imdb/benchmark_batch.py b/python/examples/imdb/benchmark_batch.py deleted file mode 100644 index 5891970b5decc34f35723187e44b166e0482c6e9..0000000000000000000000000000000000000000 --- a/python/examples/imdb/benchmark_batch.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# pylint: disable=doc-string-missing - -import sys -import time -import requests -from imdb_reader import IMDBDataset -from paddle_serving_client import Client -from paddle_serving_client.utils import MultiThreadRunner -from paddle_serving_client.utils import benchmark_args - -args = benchmark_args() - - -def single_func(idx, resource): - imdb_dataset = IMDBDataset() - imdb_dataset.load_resource("./imdb.vocab") - dataset = [] - with open("./test_data/part-0") as fin: - for line in fin: - dataset.append(line.strip()) - start = time.time() - if args.request == "rpc": - client = Client() - client.load_client_config(args.model) - client.connect([args.endpoint]) - for i in range(1000): - if args.batch_size >= 1: - feed_batch = [] - for bi in range(args.batch_size): - word_ids, label = imdb_dataset.get_words_and_label(dataset[ - bi]) - feed_batch.append({"words": word_ids}) - result = client.predict(feed=feed_batch, fetch=["prediction"]) - if result is None: - raise ("predict failed.") - else: - print("unsupport batch size {}".format(args.batch_size)) - - elif args.request == "http": - if args.batch_size >= 1: - feed_batch = [] - for bi in range(args.batch_size): - feed_batch.append({"words": dataset[bi]}) - r = requests.post( - "http://{}/imdb/prediction".format(args.endpoint), - json={"feed": feed_batch, - "fetch": ["prediction"]}) - if r.status_code != 200: - print('HTTP status code -ne 200') - raise ("predict failed.") - else: - print("unsupport batch size {}".format(args.batch_size)) - end = time.time() - return [[end - start]] - - -multi_thread_runner = MultiThreadRunner() -result = multi_thread_runner.run(single_func, args.thread, {}) -avg_cost = 0 -for cost in result[0]: - avg_cost += cost -print("total cost {} s of each thread".format(avg_cost / args.thread)) diff --git a/python/examples/imdb/benchmark_batch.sh b/python/examples/imdb/benchmark_batch.sh deleted file mode 100644 index 15b65338b21675fd89056cf32f9a247b385a6a36..0000000000000000000000000000000000000000 --- a/python/examples/imdb/benchmark_batch.sh +++ /dev/null @@ -1,12 +0,0 @@ -rm profile_log -for thread_num in 1 2 4 8 16 -do -for batch_size in 1 2 4 8 16 32 64 128 256 512 -do - $PYTHONROOT/bin/python benchmark_batch.py --thread $thread_num --batch_size $batch_size --model imdb_bow_client_conf/serving_client_conf.prototxt --request rpc > profile 2>&1 - echo "========================================" - echo "batch size : $batch_size" >> profile_log - $PYTHONROOT/bin/python ../util/show_profile.py profile $thread_num >> profile_log - tail -n 1 profile >> profile_log -done -done diff --git a/python/examples/imdb/test_client.py b/python/examples/imdb/test_client.py index fdc3ced25377487a2844d57c4e6121801e9fa7fa..74364e5854d223e380cb386f9a8bc68b8517305a 100644 --- a/python/examples/imdb/test_client.py +++ b/python/examples/imdb/test_client.py @@ -13,7 +13,7 @@ # limitations under the License. # pylint: disable=doc-string-missing from paddle_serving_client import Client -from imdb_reader import IMDBDataset +from paddle_serving_app import IMDBDataset import sys client = Client() diff --git a/python/examples/imdb/test_client_batch.py b/python/examples/imdb/test_client_batch.py deleted file mode 100644 index 972b2c9609ca690542fa802f187fb30ed0467a04..0000000000000000000000000000000000000000 --- a/python/examples/imdb/test_client_batch.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# pylint: disable=doc-string-missing - -from paddle_serving_client import Client -import sys -import subprocess -from multiprocessing import Pool -import time - - -def batch_predict(batch_size=4): - client = Client() - client.load_client_config(conf_file) - client.connect(["127.0.0.1:9292"]) - fetch = ["acc", "cost", "prediction"] - feed_batch = [] - for line in sys.stdin: - group = line.strip().split() - words = [int(x) for x in group[1:int(group[0])]] - label = [int(group[-1])] - feed = {"words": words, "label": label} - feed_batch.append(feed) - if len(feed_batch) == batch_size: - fetch_batch = client.batch_predict( - feed_batch=feed_batch, fetch=fetch) - for i in range(batch_size): - print("{} {}".format(fetch_batch[i]["prediction"][1], - feed_batch[i]["label"][0])) - feed_batch = [] - if len(feed_batch) > 0: - fetch_batch = client.batch_predict(feed_batch=feed_batch, fetch=fetch) - for i in range(len(feed_batch)): - print("{} {}".format(fetch_batch[i]["prediction"][1], feed_batch[i][ - "label"][0])) - - -if __name__ == '__main__': - conf_file = sys.argv[1] - batch_size = int(sys.argv[2]) - batch_predict(batch_size) diff --git a/python/examples/imdb/text_classify_service.py b/python/examples/imdb/text_classify_service.py index 4420a99facc7bd3db1c8bf1df0c58765467517de..ae54b99030ee777ad127242d26c13cdbc05645e9 100755 --- a/python/examples/imdb/text_classify_service.py +++ b/python/examples/imdb/text_classify_service.py @@ -14,7 +14,7 @@ # pylint: disable=doc-string-missing from paddle_serving_server.web_service import WebService -from imdb_reader import IMDBDataset +from paddle_serving_app import IMDBDataset import sys diff --git a/python/examples/senta/senta_web_service.py b/python/examples/senta/senta_web_service.py index 0c0205e73cdd26231a94b2f0c9c41da84aaca961..5d20020c46d3b5ed23914cb9813ac889e232a2b3 100644 --- a/python/examples/senta/senta_web_service.py +++ b/python/examples/senta/senta_web_service.py @@ -51,13 +51,11 @@ class SentaService(WebService): def init_lac_service(self): ps = Process(target=self.start_lac_service()) ps.start() - #self.init_lac_client() + self.init_lac_client() def lac_predict(self, feed_data): - self.init_lac_client() lac_result = self.lac_client.predict( feed={"words": feed_data}, fetch=["crf_decode"]) - self.lac_client.release() return lac_result def init_lac_client(self): diff --git a/python/paddle_serving_app/__init__.py b/python/paddle_serving_app/__init__.py index fd9260284b4103f00ca8b9cda8b99173591d23eb..2a6225570c3de61ba6e0a0587f81175816cd0f8d 100644 --- a/python/paddle_serving_app/__init__.py +++ b/python/paddle_serving_app/__init__.py @@ -12,8 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. from .reader.chinese_bert_reader import ChineseBertReader -from .reader.image_reader import ImageReader, File2Image, URL2Image, Sequential, Normalize, CenterCrop, Resize +from .reader.image_reader import ImageReader, File2Image, URL2Image, Sequential, Normalize, CenterCrop, Resize, PadStride from .reader.lac_reader import LACReader from .reader.senta_reader import SentaReader +from .reader.imdb_reader import IMDBDataset from .models import ServingModels from .local_predict import Debugger diff --git a/python/paddle_serving_app/reader/__init__.py b/python/paddle_serving_app/reader/__init__.py index 01cad9e6bbdbe11191e3bc44ec2c63f2db3939bc..9b556a119d47ec693a667cf7c5ab10c0e56ace53 100644 --- a/python/paddle_serving_app/reader/__init__.py +++ b/python/paddle_serving_app/reader/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from .image_reader import ImageReader, File2Image, URL2Image, Sequential, Normalize, CenterCrop, Resize, Transpose, Div, RGB2BGR, BGR2RGB, RCNNPostprocess, SegPostprocess +from .image_reader import ImageReader, File2Image, URL2Image, Sequential, Normalize, CenterCrop, Resize, Transpose, Div, RGB2BGR, BGR2RGB, RCNNPostprocess, SegPostprocess, PadStride diff --git a/python/paddle_serving_app/reader/image_reader.py b/python/paddle_serving_app/reader/image_reader.py index 8791e94ba8456f25deed1cbd5a2262218327c44e..a5afb9c84743fe401ab62608b7b38b5ccd6623ae 100644 --- a/python/paddle_serving_app/reader/image_reader.py +++ b/python/paddle_serving_app/reader/image_reader.py @@ -465,6 +465,24 @@ class Resize(object): _cv2_interpolation_to_str[self.interpolation]) +class PadStride(object): + def __init__(self, stride): + self.coarsest_stride = stride + + def __call__(self, img): + coarsest_stride = self.coarsest_stride + if coarsest_stride == 0: + return img + im_c, im_h, im_w = img.shape + pad_h = int(np.ceil(float(im_h) / coarsest_stride) * coarsest_stride) + pad_w = int(np.ceil(float(im_w) / coarsest_stride) * coarsest_stride) + padding_im = np.zeros((im_c, pad_h, pad_w), dtype=np.float32) + padding_im[:, :im_h, :im_w] = img + im_info = {} + im_info['resize_shape'] = padding_im.shape[1:] + return padding_im + + class Transpose(object): def __init__(self, transpose_target): self.transpose_target = transpose_target diff --git a/python/paddle_serving_app/reader/imdb_reader.py b/python/paddle_serving_app/reader/imdb_reader.py new file mode 100644 index 0000000000000000000000000000000000000000..a4ef3e163a50b0dc244ac2653df1e38d7f91699b --- /dev/null +++ b/python/paddle_serving_app/reader/imdb_reader.py @@ -0,0 +1,92 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# pylint: disable=doc-string-missing + +import sys +import os +import paddle +import re +import paddle.fluid.incubate.data_generator as dg + +py_version = sys.version_info[0] + + +class IMDBDataset(dg.MultiSlotDataGenerator): + def load_resource(self, dictfile): + self._vocab = {} + wid = 0 + if py_version == 2: + with open(dictfile) as f: + for line in f: + self._vocab[line.strip()] = wid + wid += 1 + else: + with open(dictfile, encoding="utf-8") as f: + for line in f: + self._vocab[line.strip()] = wid + wid += 1 + self._unk_id = len(self._vocab) + self._pattern = re.compile(r'(;|,|\.|\?|!|\s|\(|\))') + self.return_value = ("words", [1, 2, 3, 4, 5, 6]), ("label", [0]) + + def get_words_only(self, line): + sent = line.lower().replace("
", " ").strip() + words = [x for x in self._pattern.split(sent) if x and x != " "] + feas = [ + self._vocab[x] if x in self._vocab else self._unk_id for x in words + ] + return feas + + def get_words_and_label(self, line): + send = '|'.join(line.split('|')[:-1]).lower().replace("
", + " ").strip() + label = [int(line.split('|')[-1])] + + words = [x for x in self._pattern.split(send) if x and x != " "] + feas = [ + self._vocab[x] if x in self._vocab else self._unk_id for x in words + ] + return feas, label + + def infer_reader(self, infer_filelist, batch, buf_size): + def local_iter(): + for fname in infer_filelist: + with open(fname, "r") as fin: + for line in fin: + feas, label = self.get_words_and_label(line) + yield feas, label + + import paddle + batch_iter = paddle.batch( + paddle.reader.shuffle( + local_iter, buf_size=buf_size), + batch_size=batch) + return batch_iter + + def generate_sample(self, line): + def memory_iter(): + for i in range(1000): + yield self.return_value + + def data_iter(): + feas, label = self.get_words_and_label(line) + yield ("words", feas), ("label", label) + + return data_iter + + +if __name__ == "__main__": + imdb = IMDBDataset() + imdb.load_resource("imdb.vocab") + imdb.run_from_stdin() diff --git a/python/paddle_serving_client/__init__.py b/python/paddle_serving_client/__init__.py index d97a88f51cc20db1a5c3a6acafd949cbdd153112..537a7bae29e73b839c5bde73d388a94cf5c0413a 100644 --- a/python/paddle_serving_client/__init__.py +++ b/python/paddle_serving_client/__init__.py @@ -329,9 +329,9 @@ class Client(object): # result map needs to be a numpy array for i, name in enumerate(fetch_names): if self.fetch_names_to_type_[name] == int_type: + # result_map[name] will be py::array(numpy array) result_map[name] = result_batch.get_int64_by_name(mi, name) shape = result_batch.get_shape(mi, name) - result_map[name] = np.array(result_map[name], dtype='int64') result_map[name].shape = shape if name in self.lod_tensor_set: result_map["{}.lod".format(name)] = np.array( @@ -339,8 +339,6 @@ class Client(object): elif self.fetch_names_to_type_[name] == float_type: result_map[name] = result_batch.get_float_by_name(mi, name) shape = result_batch.get_shape(mi, name) - result_map[name] = np.array( - result_map[name], dtype='float32') result_map[name].shape = shape if name in self.lod_tensor_set: result_map["{}.lod".format(name)] = np.array( diff --git a/python/paddle_serving_client/io/__init__.py b/python/paddle_serving_client/io/__init__.py index 4f174866e5521577ba35f39216f7dd0793879a6c..93ae37056320c2c7d779c5bbfc4d004a1be4f639 100644 --- a/python/paddle_serving_client/io/__init__.py +++ b/python/paddle_serving_client/io/__init__.py @@ -104,10 +104,10 @@ def save_model(server_model_folder, def inference_model_to_serving(dirname, - model_filename=None, - params_filename=None, serving_server="serving_server", - serving_client="serving_client"): + serving_client="serving_client", + model_filename=None, + params_filename=None): place = fluid.CPUPlace() exe = fluid.Executor(place) inference_program, feed_target_names, fetch_targets = \ diff --git a/python/paddle_serving_server/__init__.py b/python/paddle_serving_server/__init__.py index 971359fca0df3a122b28889e0711c86364a1c45d..3cb96a8f04922362fdb4b4c497f7679355e3879f 100644 --- a/python/paddle_serving_server/__init__.py +++ b/python/paddle_serving_server/__init__.py @@ -274,7 +274,8 @@ class Server(object): self.model_config_paths[node.name] = path print("You have specified multiple model paths, please ensure " "that the input and output of multiple models are the same.") - workflow_oi_config_path = self.model_config_paths.items()[0][1] + workflow_oi_config_path = list(self.model_config_paths.items())[0][ + 1] else: raise Exception("The type of model_config_paths must be str or " "dict({op: model_path}), not {}.".format( diff --git a/python/paddle_serving_server/web_service.py b/python/paddle_serving_server/web_service.py index 7e69b241f50255aa69d34c1405b72eacb675be04..f8c43707660e08e1bc44fdd62e40e20523f6cb6d 100755 --- a/python/paddle_serving_server/web_service.py +++ b/python/paddle_serving_server/web_service.py @@ -101,7 +101,6 @@ class WebService(object): p_rpc = Process(target=self._launch_rpc_service) p_rpc.start() - def run_flask(self): app_instance = Flask(__name__) @app_instance.before_first_request @@ -114,10 +113,16 @@ class WebService(object): def run(): return self.get_prediction(request) - app_instance.run(host="0.0.0.0", - port=self.port, - threaded=False, - processes=4) + self.app_instance = app_instance + + def run_flask(self): + self.app_instance.run(host="0.0.0.0", + port=self.port, + threaded=False, + processes=1) + + def get_app_instance(self): + return self.app_instance def preprocess(self, feed=[], fetch=[]): return feed, fetch diff --git a/python/paddle_serving_server_gpu/__init__.py b/python/paddle_serving_server_gpu/__init__.py index 5a06bd712a836617047b0cc947956fc5d2213daa..7acc926c7f7fc465da20a7609bc767a5289d2e61 100644 --- a/python/paddle_serving_server_gpu/__init__.py +++ b/python/paddle_serving_server_gpu/__init__.py @@ -320,7 +320,8 @@ class Server(object): self.model_config_paths[node.name] = path print("You have specified multiple model paths, please ensure " "that the input and output of multiple models are the same.") - workflow_oi_config_path = self.model_config_paths.items()[0][1] + workflow_oi_config_path = list(self.model_config_paths.items())[0][ + 1] else: raise Exception("The type of model_config_paths must be str or " "dict({op: model_path}), not {}.".format( diff --git a/python/paddle_serving_server_gpu/web_service.py b/python/paddle_serving_server_gpu/web_service.py index 2ec996b1db89bdff3c4550caa566bec5af2d9506..e64e73197d02a80e43bbc77a7589ab43efe2f244 100644 --- a/python/paddle_serving_server_gpu/web_service.py +++ b/python/paddle_serving_server_gpu/web_service.py @@ -151,7 +151,6 @@ class WebService(object): for p in server_pros: p.start() - def run_flask(self): app_instance = Flask(__name__) @app_instance.before_first_request @@ -164,10 +163,16 @@ class WebService(object): def run(): return self.get_prediction(request) - app_instance.run(host="0.0.0.0", - port=self.port, - threaded=False, - processes=4) + self.app_instance = app_instance + + def run_flask(self): + self.app_instance.run(host="0.0.0.0", + port=self.port, + threaded=False, + processes=1) + + def get_app_instance(self): + return app_instance def preprocess(self, feed=[], fetch=[]): return feed, fetch diff --git a/tools/Dockerfile.centos6.devel b/tools/Dockerfile.centos6.devel index dd5a2ef786ed8a9c239a99cabbcfe2d482e6341c..5223693d846bdbc90bdefe58c26db29d6a81359d 100644 --- a/tools/Dockerfile.centos6.devel +++ b/tools/Dockerfile.centos6.devel @@ -43,5 +43,5 @@ RUN yum -y install wget && \ source /root/.bashrc && \ cd .. && rm -rf Python-3.6.8* && \ pip3 install google protobuf setuptools wheel flask numpy==1.16.4 && \ - yum -y install epel-release && yum -y install patchelf && \ + yum -y install epel-release && yum -y install patchelf libXext libSM libXrender && \ yum clean all diff --git a/tools/Dockerfile.centos6.gpu.devel b/tools/Dockerfile.centos6.gpu.devel index c34780c151e960134af5f8b448e0465b8285e8b2..1432d49abe9a4aec3b558d855c9cfcf30efef461 100644 --- a/tools/Dockerfile.centos6.gpu.devel +++ b/tools/Dockerfile.centos6.gpu.devel @@ -43,5 +43,5 @@ RUN yum -y install wget && \ source /root/.bashrc && \ cd .. && rm -rf Python-3.6.8* && \ pip3 install google protobuf setuptools wheel flask numpy==1.16.4 && \ - yum -y install epel-release && yum -y install patchelf && \ + yum -y install epel-release && yum -y install patchelf libXext libSM libXrender && \ yum clean all diff --git a/tools/Dockerfile.devel b/tools/Dockerfile.devel index 6cb228f587054d5b579df0d85109d41c15c128e9..385e568273eab54f7dfa51a20bb7dcd89cfa98a8 100644 --- a/tools/Dockerfile.devel +++ b/tools/Dockerfile.devel @@ -20,5 +20,5 @@ RUN yum -y install wget >/dev/null \ && rm get-pip.py \ && yum install -y python3 python3-devel \ && pip3 install google protobuf setuptools wheel flask \ - && yum -y install epel-release && yum -y install patchelf \ + && yum -y install epel-release && yum -y install patchelf libXext libSM libXrender\ && yum clean all diff --git a/tools/Dockerfile.gpu.devel b/tools/Dockerfile.gpu.devel index 8cd7a6dbbddd5e1b60b7833086aa25cd849da519..2ffbe4601e1f7e9b05c87f9562b3e0ffc4b967ff 100644 --- a/tools/Dockerfile.gpu.devel +++ b/tools/Dockerfile.gpu.devel @@ -21,5 +21,5 @@ RUN yum -y install wget >/dev/null \ && rm get-pip.py \ && yum install -y python3 python3-devel \ && pip3 install google protobuf setuptools wheel flask \ - && yum -y install epel-release && yum -y install patchelf \ + && yum -y install epel-release && yum -y install patchelf libXext libSM libXrender\ && yum clean all diff --git a/tools/serving_build.sh b/tools/serving_build.sh index a522efe19cb9f4170341f291d8c30db0e6749ad1..43e55174ab30374d853ed1bb25aa4a9cc637afd5 100644 --- a/tools/serving_build.sh +++ b/tools/serving_build.sh @@ -343,7 +343,7 @@ function python_test_imdb() { sleep 5 check_cmd "head test_data/part-0 | python test_client.py imdb_cnn_client_conf/serving_client_conf.prototxt imdb.vocab" # test batch predict - check_cmd "python benchmark_batch.py --thread 4 --batch_size 8 --model imdb_bow_client_conf/serving_client_conf.prototxt --request rpc --endpoint 127.0.0.1:9292" + check_cmd "python benchmark.py --thread 4 --batch_size 8 --model imdb_bow_client_conf/serving_client_conf.prototxt --request rpc --endpoint 127.0.0.1:9292" echo "imdb CPU RPC inference pass" kill_server_process rm -rf work_dir1 @@ -359,7 +359,7 @@ function python_test_imdb() { exit 1 fi # test batch predict - check_cmd "python benchmark_batch.py --thread 4 --batch_size 8 --model imdb_bow_client_conf/serving_client_conf.prototxt --request http --endpoint 127.0.0.1:9292" + check_cmd "python benchmark.py --thread 4 --batch_size 8 --model imdb_bow_client_conf/serving_client_conf.prototxt --request http --endpoint 127.0.0.1:9292" setproxy # recover proxy state kill_server_process ps -ef | grep "text_classify_service.py" | grep -v grep | awk '{print $2}' | xargs kill