提交 b3b257e6 编写于 作者: B barrierye

change status_code to serving_status_code

上级 682461fe
...@@ -26,7 +26,7 @@ x = [ ...@@ -26,7 +26,7 @@ x = [
for i in range(3): for i in range(3):
batch_feed = [{"x": x} for j in range(batch_size)] batch_feed = [{"x": x} for j in range(batch_size)]
fetch_map = client.predict(feed=batch_feed, fetch=["price"]) fetch_map = client.predict(feed=batch_feed, fetch=["price"])
if fetch_map["status_code"] == 0: if fetch_map["serving_status_code"] == 0:
print(fetch_map) print(fetch_map)
else: else:
print(fetch_map["status_code"]) print(fetch_map["serving_status_code"])
...@@ -24,7 +24,7 @@ x = [ ...@@ -24,7 +24,7 @@ x = [
] ]
for i in range(3): for i in range(3):
fetch_map = client.predict(feed={"x": x}, fetch=["price"], is_python=False) fetch_map = client.predict(feed={"x": x}, fetch=["price"], is_python=False)
if fetch_map["status_code"] == 0: if fetch_map["serving_status_code"] == 0:
print(fetch_map) print(fetch_map)
else: else:
print(fetch_map["status_code"]) print(fetch_map["serving_status_code"])
...@@ -25,7 +25,7 @@ x = [ ...@@ -25,7 +25,7 @@ x = [
] ]
for i in range(3): for i in range(3):
fetch_map = client.predict(feed={"x": np.array(x)}, fetch=["price"]) fetch_map = client.predict(feed={"x": np.array(x)}, fetch=["price"])
if fetch_map["status_code"] == 0: if fetch_map["serving_status_code"] == 0:
print(fetch_map) print(fetch_map)
else: else:
print(fetch_map["status_code"]) print(fetch_map["serving_status_code"])
...@@ -24,7 +24,7 @@ x = [ ...@@ -24,7 +24,7 @@ x = [
] ]
for i in range(3): for i in range(3):
fetch_map = client.predict(feed={"x": x}, fetch=["price"]) fetch_map = client.predict(feed={"x": x}, fetch=["price"])
if fetch_map["status_code"] == 0: if fetch_map["serving_status_code"] == 0:
print(fetch_map) print(fetch_map)
else: else:
print(fetch_map["status_code"]) print(fetch_map["serving_status_code"])
...@@ -26,9 +26,9 @@ x = [ ...@@ -26,9 +26,9 @@ x = [
] ]
for i in range(3): for i in range(3):
fetch_map = client.predict(feed={"x": x}, fetch=["price"]) fetch_map = client.predict(feed={"x": x}, fetch=["price"])
if fetch_map["status_code"] == 0: if fetch_map["serving_status_code"] == 0:
print(fetch_map) print(fetch_map)
elif fetch_map["status_code"] == grpc.StatusCode.DEADLINE_EXCEEDED: elif fetch_map["serving_status_code"] == grpc.StatusCode.DEADLINE_EXCEEDED:
print('timeout') print('timeout')
else: else:
print(fetch_map["status_code"]) print(fetch_map["serving_status_code"])
...@@ -569,7 +569,7 @@ class MultiLangClient(object): ...@@ -569,7 +569,7 @@ class MultiLangClient(object):
ret = list(multi_result_map.values())[0] ret = list(multi_result_map.values())[0]
else: else:
ret = multi_result_map ret = multi_result_map
ret["status_code"] = 0 ret["serving_status_code"] = 0
return ret if not need_variant_tag else [ret, tag] return ret if not need_variant_tag else [ret, tag]
def _done_callback_func(self, fetch, is_python, need_variant_tag): def _done_callback_func(self, fetch, is_python, need_variant_tag):
...@@ -598,7 +598,7 @@ class MultiLangClient(object): ...@@ -598,7 +598,7 @@ class MultiLangClient(object):
is_python=is_python, is_python=is_python,
need_variant_tag=need_variant_tag) need_variant_tag=need_variant_tag)
except grpc.RpcError as e: except grpc.RpcError as e:
return {"status_code": e.code()} return {"serving_status_code": e.code()}
else: else:
call_future = self.stub_.Inference.future( call_future = self.stub_.Inference.future(
req, timeout=self.rpc_timeout_s_) req, timeout=self.rpc_timeout_s_)
...@@ -619,7 +619,7 @@ class MultiLangPredictFuture(object): ...@@ -619,7 +619,7 @@ class MultiLangPredictFuture(object):
try: try:
resp = self.call_future_.result() resp = self.call_future_.result()
except grpc.RpcError as e: except grpc.RpcError as e:
return {"status_code": e.code()} return {"serving_status_code": e.code()}
return self.callback_func_(resp) return self.callback_func_(resp)
def add_done_callback(self, fn): def add_done_callback(self, fn):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册