diff --git a/python/examples/lac/README.md b/python/examples/lac/README.md index bc420186a09dfd0066c1abf0c0d95063e9cb0699..8d7adfb583f8e8e1fde0681a73f2bba65452fa87 100644 --- a/python/examples/lac/README.md +++ b/python/examples/lac/README.md @@ -2,28 +2,27 @@ ([简体中文](./README_CN.md)|English) -### Get model files and sample data +### Get Model ``` -sh get_data.sh +python -m paddle_serving_app.package --get_model lac +tar -xzvf lac.tar.gz ``` -the package downloaded contains lac model config along with lac dictionary. - #### Start RPC inference service ``` -python -m paddle_serving_server.serve --model jieba_server_model/ --port 9292 +python -m paddle_serving_server.serve --model lac_model/ --port 9292 ``` ### RPC Infer ``` -echo "我爱北京天安门" | python lac_client.py jieba_client_conf/serving_client_conf.prototxt lac_dict/ +echo "我爱北京天安门" | python lac_client.py lac_client/serving_client_conf.prototxt ``` -it will get the segmentation result +It will get the segmentation result. ### Start HTTP inference service ``` -python lac_web_service.py jieba_server_model/ lac_workdir 9292 +python lac_web_service.py lac_model/ lac_workdir 9292 ``` ### HTTP Infer diff --git a/python/examples/lac/README_CN.md b/python/examples/lac/README_CN.md index 449f474ca291053eb6880166c52814c9d4180f36..2379aa8ed69c026c6afd94b8b791774882eaf567 100644 --- a/python/examples/lac/README_CN.md +++ b/python/examples/lac/README_CN.md @@ -2,28 +2,27 @@ (简体中文|[English](./README.md)) -### 获取模型和字典文件 +### 获取模型 ``` -sh get_data.sh +python -m paddle_serving_app.package --get_model lac +tar -xzvf lac.tar.gz ``` -下载包里包含了lac模型和lac模型预测需要的字典文件 - #### 开启RPC预测服务 ``` -python -m paddle_serving_server.serve --model jieba_server_model/ --port 9292 +python -m paddle_serving_server.serve --model lac_model/ --port 9292 ``` ### 执行RPC预测 ``` -echo "我爱北京天安门" | python lac_client.py jieba_client_conf/serving_client_conf.prototxt lac_dict/ +echo "我爱北京天安门" | python lac_client.py lac_client/serving_client_conf.prototxt ``` 我们就能得到分词结果 ### 开启HTTP预测服务 ``` -python lac_web_service.py jieba_server_model/ lac_workdir 9292 +python lac_web_service.py lac_model/ lac_workdir 9292 ``` ### 执行HTTP预测 diff --git a/python/examples/lac/benchmark.py b/python/examples/lac/benchmark.py index 53d0881ed74e5e19104a70fb93d6872141d27afd..64e935a608477d5841df1b64abf7b6eb35dd1a4b 100644 --- a/python/examples/lac/benchmark.py +++ b/python/examples/lac/benchmark.py @@ -16,7 +16,7 @@ import sys import time import requests -from lac_reader import LACReader +from paddle_serving_app.reader import LACReader from paddle_serving_client import Client from paddle_serving_client.utils import MultiThreadRunner from paddle_serving_client.utils import benchmark_args @@ -25,7 +25,7 @@ args = benchmark_args() def single_func(idx, resource): - reader = LACReader("lac_dict") + reader = LACReader() start = time.time() if args.request == "rpc": client = Client() diff --git a/python/examples/lac/get_data.sh b/python/examples/lac/get_data.sh deleted file mode 100644 index 29e6a6b2b3e995f78c37e15baf2f9a3b627ca9ef..0000000000000000000000000000000000000000 --- a/python/examples/lac/get_data.sh +++ /dev/null @@ -1,2 +0,0 @@ -wget --no-check-certificate https://paddle-serving.bj.bcebos.com/lac/lac_model_jieba_web.tar.gz -tar -zxvf lac_model_jieba_web.tar.gz diff --git a/python/examples/lac/lac_client.py b/python/examples/lac/lac_client.py index 9c485a923e4d42b72af41f7b9ad45c5702ca93a1..ab9af730abb2f5b33f4d0292115b2f7bf682f278 100644 --- a/python/examples/lac/lac_client.py +++ b/python/examples/lac/lac_client.py @@ -15,7 +15,7 @@ # pylint: disable=doc-string-missing from paddle_serving_client import Client -from lac_reader import LACReader +from paddle_serving_app.reader import LACReader import sys import os import io @@ -24,7 +24,7 @@ client = Client() client.load_client_config(sys.argv[1]) client.connect(["127.0.0.1:9292"]) -reader = LACReader(sys.argv[2]) +reader = LACReader() for line in sys.stdin: if len(line) <= 0: continue @@ -32,4 +32,8 @@ for line in sys.stdin: if len(feed_data) <= 0: continue fetch_map = client.predict(feed={"words": feed_data}, fetch=["crf_decode"]) - print(fetch_map) + begin = fetch_map['crf_decode.lod'][0] + end = fetch_map['crf_decode.lod'][1] + segs = reader.parse_result(line, fetch_map["crf_decode"][begin:end]) + + print({"word_seg": "|".join(segs)}) diff --git a/python/examples/lac/lac_web_service.py b/python/examples/lac/lac_web_service.py index 62a7148b230029bc781fa550597df25471a7fc8d..9b1c6693b52393aee1294b521fe30fb1a9fd0d79 100644 --- a/python/examples/lac/lac_web_service.py +++ b/python/examples/lac/lac_web_service.py @@ -14,7 +14,7 @@ from paddle_serving_server.web_service import WebService import sys -from lac_reader import LACReader +from paddle_serving_app.reader import LACReader class LACService(WebService): diff --git a/python/paddle_serving_server/web_service.py b/python/paddle_serving_server/web_service.py index 7f37b10be05e84e29cf6cda3cd3cc3d939910027..b3fcc1b880fcbffa1da884e4b68350c1870997c1 100755 --- a/python/paddle_serving_server/web_service.py +++ b/python/paddle_serving_server/web_service.py @@ -86,7 +86,7 @@ class WebService(object): for key in fetch_map: fetch_map[key] = fetch_map[key].tolist() fetch_map = self.postprocess( - feed=feed, fetch=fetch, fetch_map=fetch_map) + feed=request.json["feed"], fetch=fetch, fetch_map=fetch_map) result = {"result": fetch_map} except ValueError: result = {"result": "Request Value Error"} diff --git a/tools/serving_build.sh b/tools/serving_build.sh index 7ac1ed31c22b0271e06266d9f905d94b77d1897c..989e48ead9864e717e573f7f0800a1afba2e934a 100644 --- a/tools/serving_build.sh +++ b/tools/serving_build.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash - +set -x function unsetproxy() { HTTP_PROXY_TEMP=$http_proxy HTTPS_PROXY_TEMP=$https_proxy @@ -455,15 +455,16 @@ function python_test_lac() { cd lac # pwd: /Serving/python/examples/lac case $TYPE in CPU) - sh get_data.sh - check_cmd "python -m paddle_serving_server.serve --model jieba_server_model/ --port 9292 &" + python -m paddle_serving_app.package --get_model lac + tar -xzvf lac.tar.gz + check_cmd "python -m paddle_serving_server.serve --model lac_model/ --port 9292 &" sleep 5 - check_cmd "echo \"我爱北京天安门\" | python lac_client.py jieba_client_conf/serving_client_conf.prototxt lac_dict/" + check_cmd "echo \"我爱北京天安门\" | python lac_client.py lac_client/serving_client_conf.prototxt " echo "lac CPU RPC inference pass" kill_server_process unsetproxy # maybe the proxy is used on iPipe, which makes web-test failed. - check_cmd "python lac_web_service.py jieba_server_model/ lac_workdir 9292 &" + check_cmd "python lac_web_service.py lac_model/ lac_workdir 9292 &" sleep 5 check_cmd "curl -H \"Content-Type:application/json\" -X POST -d '{\"feed\":[{\"words\": \"我爱北京天安门\"}], \"fetch\":[\"word_seg\"]}' http://127.0.0.1:9292/lac/prediction" # check http code