diff --git a/README.md b/README.md index a95dd6a8697c77e47071a5b9ac6c86502aee98b2..3faf89429a506aa26c6311d457de5716242359a8 100644 --- a/README.md +++ b/README.md @@ -166,7 +166,7 @@ python image_classification_service_demo.py resnet50_serving_model
``` shell -curl -H "Content-Type:application/json" -X POST -d '{"url": "https://paddle-serving.bj.bcebos.com/imagenet-example/daisy.jpg", "fetch": ["score"]}' http://127.0.0.1:9292/image/prediction +curl -H "Content-Type:application/json" -X POST -d '{"feed":[{"url": "https://paddle-serving.bj.bcebos.com/imagenet-example/daisy.jpg"}], "fetch": ["score"]}' http://127.0.0.1:9292/image/prediction ``` - **Request result**: ``` shell diff --git a/README_CN.md b/README_CN.md index 2b91ab9e75bf6ffedc2df421b1cb40cc651bf8c7..266fca330d7597d6188fa0022e6376bc23149c74 100644 --- a/README_CN.md +++ b/README_CN.md @@ -171,7 +171,7 @@ python image_classification_service_demo.py resnet50_serving_model
``` shell -curl -H "Content-Type:application/json" -X POST -d '{"url": "https://paddle-serving.bj.bcebos.com/imagenet-example/daisy.jpg", "fetch": ["score"]}' http://127.0.0.1:9292/image/prediction +curl -H "Content-Type:application/json" -X POST -d '{"feed":[{"url": "https://paddle-serving.bj.bcebos.com/imagenet-example/daisy.jpg"}], "fetch": ["score"]}' http://127.0.0.1:9292/image/prediction ``` - **返回结果示例**: ``` shell diff --git a/python/examples/senta/README.md b/python/examples/senta/README.md index 307f4829407b2fb03b64035c94ac00c3d55c27f5..9d6c3a0221f924e5d8f1893e6c618e3b2f88a3e1 100644 --- a/python/examples/senta/README.md +++ b/python/examples/senta/README.md @@ -1,5 +1,5 @@ # Chinese sentence sentiment classification - +([简体中文](./README_CN.md)|English) ## Get model files and sample data ``` sh get_data.sh @@ -12,5 +12,5 @@ In the Chinese sentiment classification task, the Chinese word segmentation need In this demo, the LAC task is placed in the preprocessing part of the HTTP prediction service of the sentiment classification task. The LAC prediction service is deployed on the CPU, and the sentiment classification task is deployed on the GPU, which can be changed according to the actual situation. ## Client prediction ``` -curl -H "Content-Type:application/json" -X POST -d '{"words": "天气不错 | 0", "fetch":["sentence_feature"]}' http://127.0.0.1:9292/senta/prediction +curl -H "Content-Type:application/json" -X POST -d '{"feed":[{"words": "天气不错"}], "fetch":["class_probs"]}' http://127.0.0.1:9292/senta/prediction ``` diff --git a/python/examples/senta/README_CN.md b/python/examples/senta/README_CN.md index cd7cd8564242db8c30525978a4b806c866cd0d0f..bb1e706554a57b29fc784d064dd4b550846f6e76 100644 --- a/python/examples/senta/README_CN.md +++ b/python/examples/senta/README_CN.md @@ -1,5 +1,5 @@ # 中文语句情感分类 - +(简体中文|[English](./README.md)) ## 获取模型文件和样例数据 ``` sh get_data.sh @@ -13,5 +13,5 @@ python senta_web_service.py senta_bilstm_model/ workdir 9292 ## 客户端预测 ``` -curl -H "Content-Type:application/json" -X POST -d '{"words": "天气不错 | 0", "fetch":["sentence_feature"]}' http://127.0.0.1:9292/senta/prediction +curl -H "Content-Type:application/json" -X POST -d '{"feed":[{"words": "天气不错"}], "fetch":["class_probs"]}' http://127.0.0.1:9292/senta/prediction ``` diff --git a/python/examples/senta/get_data.sh b/python/examples/senta/get_data.sh index 75bc8539721268aa212d5d6d726e1e9d600188b1..f1fb3844a703503177906a029bd42810e5fa3f33 100644 --- a/python/examples/senta/get_data.sh +++ b/python/examples/senta/get_data.sh @@ -1,5 +1,5 @@ -#wget https://paddle-serving.bj.bcebos.com/paddle_hub_models/text/SentimentAnalysis/senta_bilstm.tar.gz --no-check-certificate -#tar -xzvf senta_bilstm.tar.gz +wget https://paddle-serving.bj.bcebos.com/paddle_hub_models/text/SentimentAnalysis/senta_bilstm.tar.gz --no-check-certificate +tar -xzvf senta_bilstm.tar.gz wget https://paddle-serving.bj.bcebos.com/paddle_hub_models/text/LexicalAnalysis/lac_model.tar.gz --no-check-certificate tar -xzvf lac_model.tar.gz wget https://paddle-serving.bj.bcebos.com/reader/lac/lac_dict.tar.gz --no-check-certificate diff --git a/python/examples/senta/senta_web_service.py b/python/examples/senta/senta_web_service.py index 7077b84b7a97cac6387b8cb2e88e31c0b0e5d70e..35dab1911ec18af55ef19750b1b239b5aba2c8a9 100644 --- a/python/examples/senta/senta_web_service.py +++ b/python/examples/senta/senta_web_service.py @@ -69,11 +69,8 @@ class SentaService(WebService): def init_senta_reader(self): self.senta_reader = SentaReader(vocab_path=self.senta_dict_path) - def preprocess(self, feed={}, fetch={}): - if "words" not in feed: - raise ("feed data error!") - feed_data = self.lac_reader.process(feed["words"]) - fetch = ["crf_decode"] + def preprocess(self, feed=[], fetch=[]): + feed_data = self.lac_reader.process(feed[0]["words"]) if self.show: print("---- lac reader ----") print(feed_data) @@ -81,7 +78,7 @@ class SentaService(WebService): if self.show: print("---- lac out ----") print(lac_result) - segs = self.lac_reader.parse_result(feed["words"], + segs = self.lac_reader.parse_result(feed[0]["words"], lac_result["crf_decode"]) if self.show: print("---- lac parse ----") @@ -90,7 +87,6 @@ class SentaService(WebService): if self.show: print("---- senta reader ----") print("feed_data", feed_data) - fetch = ["class_probs"] return {"words": feed_data}, fetch @@ -107,31 +103,4 @@ senta_service.init_lac_reader() senta_service.init_senta_reader() senta_service.init_lac_service() senta_service.run_server() -#senta_service.run_flask() - -from flask import Flask, request - -app_instance = Flask(__name__) - - -@app_instance.before_first_request -def init(): - global uci_service - senta_service._launch_web_service() - - -service_name = "/" + senta_service.name + "/prediction" - - -@app_instance.route(service_name, methods=["POST"]) -def run(): - print("---- run ----") - print(request.json) - return senta_service.get_prediction(request) - - -if __name__ == "__main__": - app_instance.run(host="0.0.0.0", - port=senta_service.port, - threaded=False, - processes=4) +senta_service.run_flask()