提交 0390de0f 编写于 作者: B barriery

update url

上级 2bf2d9d0
......@@ -24,7 +24,7 @@ def cv2_to_base64(image):
return base64.b64encode(image).decode('utf8')
url = "http://127.0.0.1:9999/prediction"
url = "http://127.0.0.1:9999/ocr/prediction"
test_img_dir = "imgs/"
for img_file in os.listdir(test_img_dir):
with open(os.path.join(test_img_dir, img_file), 'rb') as file:
......
......@@ -15,5 +15,5 @@ python web_service.py &>log.txt &
## Http test
```
curl -X POST -k http://localhost:18080/prediction -d '{"key": ["x"], "value": ["0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332"]}'
curl -X POST -k http://localhost:18080/uci/prediction -d '{"key": ["x"], "value": ["0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332"]}'
```
......@@ -15,5 +15,5 @@ python web_service.py &>log.txt &
## 测试
```
curl -X POST -k http://localhost:18080/prediction -d '{"key": ["x"], "value": ["0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332"]}'
curl -X POST -k http://localhost:18080/uci/prediction -d '{"key": ["x"], "value": ["0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332"]}'
```
......@@ -35,7 +35,7 @@ class UciOp(Op):
return input_dict
def postprocess(self, input_dicts, fetch_dict):
_LOGGER.info(fetch_dict)
# _LOGGER.info(fetch_dict)
fetch_dict["price"] = str(fetch_dict["price"][0][0])
return fetch_dict
......
......@@ -29,7 +29,7 @@ class WebService(object):
def __init__(self, name="default_service"):
self.name = name
# pipeline
self._server = pipeline.PipelineServer()
self._server = pipeline.PipelineServer(self.name)
def get_pipeline_response(self, read_op):
return None
......
......@@ -32,7 +32,7 @@ class WebService(object):
def __init__(self, name="default_service"):
self.name = name
# pipeline
self._server = pipeline.PipelineServer()
self._server = pipeline.PipelineServer(self.name)
self.gpus = [] # deprecated
self.rpc_service_list = [] # deprecated
......
......@@ -40,7 +40,8 @@ class ChannelDataEcode(enum.Enum):
RPC_PACKAGE_ERROR = 4
CLIENT_ERROR = 5
CLOSED_ERROR = 6
UNKNOW = 7
NO_SERVICE = 7
UNKNOW = 8
class ChannelDataType(enum.Enum):
......
......@@ -28,12 +28,13 @@ message Response {
message Request {
repeated string key = 1;
repeated string value = 2;
string name = 3;
}
service PipelineService {
rpc inference(Request) returns (Response) {
option (google.api.http) = {
post : "/prediction"
post : "/{name=*}/prediction"
body : "*"
};
}
......
......@@ -22,23 +22,31 @@ from contextlib import closing
import multiprocessing
import yaml
from .proto import pipeline_service_pb2_grpc
from .proto import pipeline_service_pb2_grpc, pipeline_service_pb2
from . import operator
from . import dag
from . import util
from . import channel
_LOGGER = logging.getLogger(__name__)
class PipelineServicer(pipeline_service_pb2_grpc.PipelineServiceServicer):
def __init__(self, response_op, dag_conf, worker_idx=-1):
def __init__(self, name, response_op, dag_conf, worker_idx=-1):
super(PipelineServicer, self).__init__()
self._name = name
# init dag executor
self._dag_executor = dag.DAGExecutor(response_op, dag_conf, worker_idx)
self._dag_executor.start()
_LOGGER.info("[PipelineServicer] succ init")
def inference(self, request, context):
if request.name != "" and request.name != self._name:
resp = pipeline_service_pb2.Response()
resp.ecode = channel.ChannelDataEcode.NO_SERVICE.value
resp.error_info = "Failed to inference: Service name error."
return resp
resp = self._dag_executor.call(request)
return resp
......@@ -58,7 +66,8 @@ def _reserve_port(port):
class PipelineServer(object):
def __init__(self):
def __init__(self, name=None):
self._name = name # for grpc-gateway path
self._rpc_port = None
self._worker_num = None
self._response_op = None
......@@ -201,7 +210,8 @@ class PipelineServer(object):
('grpc.max_receive_message_length', 256 * 1024 * 1024)
])
pipeline_service_pb2_grpc.add_PipelineServiceServicer_to_server(
PipelineServicer(self._response_op, self._conf), server)
PipelineServicer(self._name, self._response_op, self._conf),
server)
server.add_insecure_port('[::]:{}'.format(self._rpc_port))
server.start()
self._run_grpc_gateway(
......@@ -217,7 +227,8 @@ class PipelineServer(object):
futures.ThreadPoolExecutor(
max_workers=1, ), options=options)
pipeline_service_pb2_grpc.add_PipelineServiceServicer_to_server(
PipelineServicer(response_op, dag_conf, worker_idx), server)
PipelineServicer(self._name, response_op, dag_conf, worker_idx),
server)
server.add_insecure_port(bind_address)
server.start()
server.wait_for_termination()
......
......@@ -18,6 +18,7 @@ package baidu.paddle_serving.pipeline_serving;
message Request {
repeated string key = 1;
repeated string value = 2;
optional string name = 3;
};
message Response {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册