From 47c5b837c1dc409f04a80cbba65af35de4ba605e Mon Sep 17 00:00:00 2001 From: MRXLT Date: Wed, 29 Apr 2020 15:17:24 +0800 Subject: [PATCH] fix senta --- python/examples/senta/README.md | 2 +- python/examples/senta/README_CN.md | 2 +- python/examples/senta/get_data.sh | 4 +-- python/examples/senta/senta_web_service.py | 35 ++-------------------- 4 files changed, 7 insertions(+), 36 deletions(-) diff --git a/python/examples/senta/README.md b/python/examples/senta/README.md index 307f4829..0a1ea9fa 100644 --- a/python/examples/senta/README.md +++ b/python/examples/senta/README.md @@ -12,5 +12,5 @@ In the Chinese sentiment classification task, the Chinese word segmentation need In this demo, the LAC task is placed in the preprocessing part of the HTTP prediction service of the sentiment classification task. The LAC prediction service is deployed on the CPU, and the sentiment classification task is deployed on the GPU, which can be changed according to the actual situation. ## Client prediction ``` -curl -H "Content-Type:application/json" -X POST -d '{"words": "天气不错 | 0", "fetch":["sentence_feature"]}' http://127.0.0.1:9292/senta/prediction +curl -H "Content-Type:application/json" -X POST -d '{"feed":[{"words": "天气不错"}], "fetch":["class_probs"]}' http://127.0.0.1:9292/senta/prediction ``` diff --git a/python/examples/senta/README_CN.md b/python/examples/senta/README_CN.md index cd7cd856..24ee9b3e 100644 --- a/python/examples/senta/README_CN.md +++ b/python/examples/senta/README_CN.md @@ -13,5 +13,5 @@ python senta_web_service.py senta_bilstm_model/ workdir 9292 ## 客户端预测 ``` -curl -H "Content-Type:application/json" -X POST -d '{"words": "天气不错 | 0", "fetch":["sentence_feature"]}' http://127.0.0.1:9292/senta/prediction +curl -H "Content-Type:application/json" -X POST -d '{"feed":[{"words": "天气不错"}], "fetch":["class_probs"]}' http://127.0.0.1:9292/senta/prediction ``` diff --git a/python/examples/senta/get_data.sh b/python/examples/senta/get_data.sh index 75bc8539..f1fb3844 100644 --- a/python/examples/senta/get_data.sh +++ b/python/examples/senta/get_data.sh @@ -1,5 +1,5 @@ -#wget https://paddle-serving.bj.bcebos.com/paddle_hub_models/text/SentimentAnalysis/senta_bilstm.tar.gz --no-check-certificate -#tar -xzvf senta_bilstm.tar.gz +wget https://paddle-serving.bj.bcebos.com/paddle_hub_models/text/SentimentAnalysis/senta_bilstm.tar.gz --no-check-certificate +tar -xzvf senta_bilstm.tar.gz wget https://paddle-serving.bj.bcebos.com/paddle_hub_models/text/LexicalAnalysis/lac_model.tar.gz --no-check-certificate tar -xzvf lac_model.tar.gz wget https://paddle-serving.bj.bcebos.com/reader/lac/lac_dict.tar.gz --no-check-certificate diff --git a/python/examples/senta/senta_web_service.py b/python/examples/senta/senta_web_service.py index 7077b84b..103c4c31 100644 --- a/python/examples/senta/senta_web_service.py +++ b/python/examples/senta/senta_web_service.py @@ -70,9 +70,7 @@ class SentaService(WebService): self.senta_reader = SentaReader(vocab_path=self.senta_dict_path) def preprocess(self, feed={}, fetch={}): - if "words" not in feed: - raise ("feed data error!") - feed_data = self.lac_reader.process(feed["words"]) + feed_data = self.lac_reader.process(feed[0]["words"]) fetch = ["crf_decode"] if self.show: print("---- lac reader ----") @@ -81,7 +79,7 @@ class SentaService(WebService): if self.show: print("---- lac out ----") print(lac_result) - segs = self.lac_reader.parse_result(feed["words"], + segs = self.lac_reader.parse_result(feed[0]["words"], lac_result["crf_decode"]) if self.show: print("---- lac parse ----") @@ -107,31 +105,4 @@ senta_service.init_lac_reader() senta_service.init_senta_reader() senta_service.init_lac_service() senta_service.run_server() -#senta_service.run_flask() - -from flask import Flask, request - -app_instance = Flask(__name__) - - -@app_instance.before_first_request -def init(): - global uci_service - senta_service._launch_web_service() - - -service_name = "/" + senta_service.name + "/prediction" - - -@app_instance.route(service_name, methods=["POST"]) -def run(): - print("---- run ----") - print(request.json) - return senta_service.get_prediction(request) - - -if __name__ == "__main__": - app_instance.run(host="0.0.0.0", - port=senta_service.port, - threaded=False, - processes=4) +senta_service.run_flask() -- GitLab