From f183345ca325c4ab9d992421457cd46630ed5ce7 Mon Sep 17 00:00:00 2001 From: MRXLT Date: Fri, 24 Apr 2020 17:45:45 +0800 Subject: [PATCH] add senta web service --- python/examples/senta/senta_web_service.py | 119 +++++++++++++++++++++ python/paddle_serving_server/serve.py | 1 + 2 files changed, 120 insertions(+) create mode 100644 python/examples/senta/senta_web_service.py diff --git a/python/examples/senta/senta_web_service.py b/python/examples/senta/senta_web_service.py new file mode 100644 index 00000000..7785c0d4 --- /dev/null +++ b/python/examples/senta/senta_web_service.py @@ -0,0 +1,119 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from paddle_serving_server_gpu.web_service import WebService +from paddle_serving_client import Client +from lac_reader import LACReader +from senta_reader import SentaReader +import numpy as np +import os +import io +import sys +import subprocess +from multiprocessing import Process, Queue + + +class SentaService(WebService): + def start_lac_service(self): + print(" ---- start lac service ---- ") + os.chdir('./lac_serving') + r = os.popen( + "GLOG_v=2 python -m paddle_serving_server.serve --model ../../lac/jieba_server_model/ --port 9292 &" + ) + os.chdir('..') + + def init_lac_service(self): + ps = Process(target=self.start_lac_service()) + ps.start() + #self.init_lac_client() + + def lac_predict(self, feed_data): + self.init_lac_client() + lac_result = self.lac_client.predict( + feed={"words": feed_data}, fetch=["crf_decode"]) + self.lac_client.release() + return lac_result + + def init_lac_client(self): + self.lac_client = Client() + self.lac_client.load_client_config( + "../lac/jieba_client_conf/serving_client_conf.prototxt") + self.lac_client.connect(["127.0.0.1:9292"]) + + def init_lac_reader(self): + self.lac_reader = LACReader("../lac/lac_dict") + + def init_senta_reader(self): + self.senta_reader = SentaReader(vocab_path="./senta_data/word_dict.txt") + + def preprocess(self, feed={}, fetch={}): + print("---- preprocess ----") + print(feed) + if "words" not in feed: + raise ("feed data error!") + feed_data = self.lac_reader.process(feed["words"]) + fetch = ["crf_decode"] + print("---- lac reader ----") + print(feed_data) + lac_result = self.lac_predict(feed_data) + print("---- lac out ----") + print(lac_result) + segs = self.lac_reader.parse_result(feed["words"], + lac_result["crf_decode"]) + print("---- lac parse ----") + feed_data = self.senta_reader.process(segs) + print("---- senta reader ----") + print("feed_data", feed_data) + fetch = ["sentence_feature"] + return {"words": feed_data}, fetch + + +senta_service = SentaService(name="senta") +senta_service.load_model_config(sys.argv[1]) +senta_service.init_lac_reader() +senta_service.init_senta_reader() +print("Init senta done") +senta_service.init_lac_service() +print("init lac service done") +senta_service.prepare_server( + workdir=sys.argv[2], port=int(sys.argv[3]), device="cpu") +senta_service.run_server() +#senta_service.run_flask() + +from flask import Flask, request + +app_instance = Flask(__name__) + + +@app_instance.before_first_request +def init(): + global uci_service + senta_service._launch_web_service() + + +service_name = "/" + senta_service.name + "/prediction" + + +@app_instance.route(service_name, methods=["POST"]) +def run(): + print("---- run ----") + print(request.json) + return senta_service.get_prediction(request) + + +if __name__ == "__main__": + app_instance.run(host="0.0.0.0", + port=senta_service.port, + threaded=False, + processes=4) diff --git a/python/paddle_serving_server/serve.py b/python/paddle_serving_server/serve.py index 396ed17c..395177a8 100644 --- a/python/paddle_serving_server/serve.py +++ b/python/paddle_serving_server/serve.py @@ -79,6 +79,7 @@ def start_standard_model(): # pylint: disable=doc-string-missing server.set_num_threads(thread_num) server.set_memory_optimize(mem_optim) server.set_max_body_size(max_body_size) + server.set_port(port) server.load_model_config(model) server.prepare_server(workdir=workdir, port=port, device=device) -- GitLab