未验证 提交 1fde1258 编写于 作者: M MissPenguin 提交者: GitHub

Merge pull request #905 from wangjiawei04/ocr_1008

pdserving fetch var fix
......@@ -74,6 +74,7 @@ class TextClassifierHelper(TextClassifier):
prob_out = outputs[0]
label_out = outputs[1]
indices = args["indices"]
img_list = args["img_list"]
cls_res = [['', 0.0]] * len(label_out)
if len(label_out.shape) != 1:
prob_out, label_out = label_out, prob_out
......@@ -84,7 +85,7 @@ class TextClassifierHelper(TextClassifier):
cls_res[indices[rno]] = [label, score]
if '180' in label and score > self.cls_thresh:
img_list[indices[rno]] = cv2.rotate(img_list[indices[rno]], 1)
return args["img_list"], cls_res
return img_list, cls_res
class OCRService(WebService):
......
......@@ -79,6 +79,7 @@ class TextClassifierHelper(TextClassifier):
prob_out = outputs[0]
label_out = outputs[1]
indices = args["indices"]
img_list = args["img_list"]
cls_res = [['', 0.0]] * len(label_out)
if len(label_out.shape) != 1:
prob_out, label_out = label_out, prob_out
......@@ -89,7 +90,7 @@ class TextClassifierHelper(TextClassifier):
cls_res[indices[rno]] = [label, score]
if '180' in label and score > self.cls_thresh:
img_list[indices[rno]] = cv2.rotate(img_list[indices[rno]], 1)
return args["img_list"], cls_res
return img_list, cls_res
class OCRService(WebService):
......
......@@ -50,7 +50,7 @@ class TextSystemHelper(TextSystem):
self.det_client = Debugger()
self.det_client.load_model_config(
global_args.det_model_dir, gpu=True, profile=False)
self.fetch = ["ctc_greedy_decoder_0.tmp_0", "softmax_0.tmp_0"]
self.fetch = ["save_infer_model/scale_0.tmp_0", "save_infer_model/scale_1.tmp_0"]
def preprocess(self, img):
feed, fetch, self.tmp_args = self.text_detector.preprocess(img)
......
......@@ -43,14 +43,14 @@ class TextSystemHelper(TextSystem):
if self.use_angle_cls:
self.clas_client = Client()
self.clas_client.load_client_config(
"ocr_clas_client/serving_client_conf.prototxt")
"cls_infer_client/serving_client_conf.prototxt")
self.clas_client.connect(["127.0.0.1:9294"])
self.text_classifier = TextClassifierHelper(args)
self.det_client = Client()
self.det_client.load_client_config(
"det_db_client/serving_client_conf.prototxt")
"det_infer_client/serving_client_conf.prototxt")
self.det_client.connect(["127.0.0.1:9293"])
self.fetch = ["ctc_greedy_decoder_0.tmp_0", "softmax_0.tmp_0"]
self.fetch = ["save_infer_model/scale_0.tmp_0", "save_infer_model/scale_1.tmp_0"]
def preprocess(self, img):
feed, fetch, self.tmp_args = self.text_detector.preprocess(img)
......
......@@ -16,17 +16,23 @@
**操作系统版本:Linux/Windows**
**Python版本: 2.7/3.6/3.7**
**Python版本: 2.7/3.5/3.6/3.7**
**Python操作指南:**
目前Serving用于OCR的部分功能还在测试当中,因此在这里我们给出[Servnig latest package](https://github.com/PaddlePaddle/Serving/blob/develop/doc/LATEST_PACKAGES.md)
大家根据自己的环境选择需要安装的whl包即可,例如以Python 3.5为例,执行下列命令
```
#CPU/GPU版本选择一个
#GPU版本服务端
python -m pip install paddle_serving_server_gpu
#CUDA 9
python -m pip install -U https://paddle-serving.bj.bcebos.com/whl/paddle_serving_server_gpu-0.0.0.post9-py3-none-any.whl
#CUDA 10
python -m pip install -U https://paddle-serving.bj.bcebos.com/whl/paddle_serving_server_gpu-0.0.0.post10-py3-none-any.whl
#CPU版本服务端
python -m pip install paddle_serving_server
python -m pip install -U https://paddle-serving.bj.bcebos.com/whl/paddle_serving_server-0.0.0-py3-none-any.whl
#客户端和App包使用以下链接(CPU,GPU通用)
python -m pip install paddle_serving_app paddle_serving_client
python -m pip install -U https://paddle-serving.bj.bcebos.com/whl/paddle_serving_client-0.0.0-cp36-none-any.whl https://paddle-serving.bj.bcebos.com/whl/paddle_serving_app-0.0.0-py3-none-any.whl
```
## 二、训练模型转Serving模型
......@@ -214,12 +220,12 @@ python rec_web_client.py
```
#标准版,Linux用户
#GPU用户
python -m paddle_serving_server_gpu.serve --model det_mv_server --port 9293 --gpu_id 0
python -m paddle_serving_server_gpu.serve --model ocr_cls_server --port 9294 --gpu_id 0
python -m paddle_serving_server_gpu.serve --model det_infer_server --port 9293 --gpu_id 0
python -m paddle_serving_server_gpu.serve --model cls_infer_server --port 9294 --gpu_id 0
python ocr_rpc_server.py
#CPU用户
python -m paddle_serving_server.serve --model det_mv_server --port 9293
python -m paddle_serving_server.serve --model ocr_cls_server --port 9294
python -m paddle_serving_server.serve --model det_infer_server --port 9293
python -m paddle_serving_server.serve --model cls_infer_server --port 9294
python ocr_rpc_server.py
#快速版,Windows/Linux用户
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册