diff --git a/python/examples/grpc_impl_example/criteo_ctr_with_cube/test_client.py b/python/examples/grpc_impl_example/criteo_ctr_with_cube/test_client.py index fe26e3d927b3ab7b73bcc06103419e485f305a81..ccbd61a4ad50a8a831d751b28dc24163003c55cf 100755 --- a/python/examples/grpc_impl_example/criteo_ctr_with_cube/test_client.py +++ b/python/examples/grpc_impl_example/criteo_ctr_with_cube/test_client.py @@ -39,13 +39,8 @@ for ei in range(10000): feed_dict['dense_input'] = data[0][0] for i in range(1, 27): feed_dict["embedding_{}.tmp_0".format(i - 1)] = data[0][i] - try: - fetch_map = client.predict(feed=feed_dict, fetch=["prob"]) - except grpc.RpcError as e: - status_code = e.code() - if grpc.StatusCode.DEADLINE_EXCEEDED == status_code: - print('timeout') - else: + fetch_map = client.predict(feed=feed_dict, fetch=["prob"]) + if fetch_map["status_code"] == 0: prob_list.append(fetch_map['prob'][0][1]) label_list.append(data[0][-1][0]) diff --git a/python/examples/grpc_impl_example/fit_a_line/test_asyn_client.py b/python/examples/grpc_impl_example/fit_a_line/test_asyn_client.py index ac851a37b4111126687c47c93816b55fdbb49397..b01a9372585bae42abca213fe8fb8a55505dfe57 100644 --- a/python/examples/grpc_impl_example/fit_a_line/test_asyn_client.py +++ b/python/examples/grpc_impl_example/fit_a_line/test_asyn_client.py @@ -14,7 +14,6 @@ # pylint: disable=doc-string-missing from paddle_serving_client import MultiLangClient as Client -import paddle import functools import time import threading @@ -23,34 +22,30 @@ import grpc client = Client() client.connect(["127.0.0.1:9393"]) -test_reader = paddle.batch( - paddle.reader.shuffle( - paddle.dataset.uci_housing.test(), buf_size=500), - batch_size=1) - complete_task_count = [0] lock = threading.Lock() -def call_back(call_future, data): - fetch_map = call_future.result() - print("{} {}".format(fetch_map["price"][0], data[0][1][0])) - with lock: - complete_task_count[0] += 1 +def call_back(call_future): + try: + fetch_map = call_future.result() + print(fetch_map) + except grpc.RpcError as e: + print(e.code()) + finally: + with lock: + complete_task_count[0] += 1 +x = [ + 0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, + 0.4919, 0.1856, 0.0795, -0.0332 +] task_count = 0 -for data in test_reader(): - try: - future = client.predict( - feed={"x": data[0][0]}, fetch=["price"], asyn=True) - except grpc.RpcError as e: - status_code = e.code() - if grpc.StatusCode.DEADLINE_EXCEEDED == status_code: - print('timeout') - else: - task_count += 1 - future.add_done_callback(functools.partial(call_back, data=data)) +for i in range(3): + future = client.predict(feed={"x": x}, fetch=["price"], asyn=True) + task_count += 1 + future.add_done_callback(functools.partial(call_back)) while complete_task_count[0] != task_count: time.sleep(0.1) diff --git a/python/examples/grpc_impl_example/fit_a_line/test_batch_client.py b/python/examples/grpc_impl_example/fit_a_line/test_batch_client.py index b4a19c716cb76656da1dd7c2f2481f7cac2ea682..3a01040e5ba557974b38ff7e067eaca15e7ebdd0 100644 --- a/python/examples/grpc_impl_example/fit_a_line/test_batch_client.py +++ b/python/examples/grpc_impl_example/fit_a_line/test_batch_client.py @@ -12,27 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # pylint: disable=doc-string-missing - from paddle_serving_client import MultiLangClient as Client -import paddle -import grpc client = Client() client.connect(["127.0.0.1:9393"]) batch_size = 2 -test_reader = paddle.batch( - paddle.reader.shuffle( - paddle.dataset.uci_housing.test(), buf_size=500), - batch_size=batch_size) +x = [ + 0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, + 0.4919, 0.1856, 0.0795, -0.0332 +] -for data in test_reader(): - batch_feed = [{"x": x[0]} for x in data] - try: - fetch_map = client.predict(feed=batch_feed, fetch=["price"]) - except grpc.RpcError as e: - status_code = e.code() - if grpc.StatusCode.DEADLINE_EXCEEDED == status_code: - print('timeout') - else: +for i in range(3): + batch_feed = [{"x": x} for j in range(batch_size)] + fetch_map = client.predict(feed=batch_feed, fetch=["price"]) + if fetch_map["status_code"] == 0: print(fetch_map) + else: + print(fetch_map["status_code"]) diff --git a/python/examples/grpc_impl_example/fit_a_line/test_general_pb_client.py b/python/examples/grpc_impl_example/fit_a_line/test_general_pb_client.py index 177d1c906d3dd79a55f0006f23520f0d88cf86a4..a5ad17e0315215432c2d058941a43996304342bb 100644 --- a/python/examples/grpc_impl_example/fit_a_line/test_general_pb_client.py +++ b/python/examples/grpc_impl_example/fit_a_line/test_general_pb_client.py @@ -14,24 +14,17 @@ # pylint: disable=doc-string-missing from paddle_serving_client import MultiLangClient as Client -import grpc -import paddle client = Client() client.connect(["127.0.0.1:9393"]) -test_reader = paddle.batch( - paddle.reader.shuffle( - paddle.dataset.uci_housing.test(), buf_size=500), - batch_size=1) - -for data in test_reader(): - try: - fetch_map = client.predict( - feed={"x": data[0][0]}, fetch=["price"], is_python=False) - except grpc.RpcError as e: - status_code = e.code() - if grpc.StatusCode.DEADLINE_EXCEEDED == status_code: - print('timeout') +x = [ + 0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, + 0.4919, 0.1856, 0.0795, -0.0332 +] +for i in range(3): + fetch_map = client.predict(feed={"x": x}, fetch=["price"], is_python=False) + if fetch_map["status_code"] == 0: + print(fetch_map) else: - print("{} {}".format(fetch_map["price"][0], data[0][1][0])) + print(fetch_map["status_code"]) diff --git a/python/examples/grpc_impl_example/fit_a_line/test_list_input_client.py b/python/examples/grpc_impl_example/fit_a_line/test_numpy_input_client.py similarity index 60% rename from python/examples/grpc_impl_example/fit_a_line/test_list_input_client.py rename to python/examples/grpc_impl_example/fit_a_line/test_numpy_input_client.py index 6c69ccfcce28cb674682174f867e85c72a351c0e..329eae6a656747f1f52f4c986acd1be7ed90ff4a 100644 --- a/python/examples/grpc_impl_example/fit_a_line/test_list_input_client.py +++ b/python/examples/grpc_impl_example/fit_a_line/test_numpy_input_client.py @@ -14,24 +14,18 @@ # pylint: disable=doc-string-missing from paddle_serving_client import MultiLangClient as Client -import grpc -import paddle +import numpy as np client = Client() client.connect(["127.0.0.1:9393"]) -test_reader = paddle.batch( - paddle.reader.shuffle( - paddle.dataset.uci_housing.test(), buf_size=500), - batch_size=1) - -for data in test_reader(): - try: - fetch_map = client.predict( - feed={"x": data[0][0].tolist()}, fetch=["price"]) - except grpc.RpcError as e: - status_code = e.code() - if grpc.StatusCode.DEADLINE_EXCEEDED == status_code: - print('timeout') +x = [ + 0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, + 0.4919, 0.1856, 0.0795, -0.0332 +] +for i in range(3): + fetch_map = client.predict(feed={"x": np.array(x)}, fetch=["price"]) + if fetch_map["status_code"] == 0: + print(fetch_map) else: - print("{} {}".format(fetch_map["price"][0], data[0][1][0])) + print(fetch_map["status_code"]) diff --git a/python/examples/grpc_impl_example/fit_a_line/test_sync_client.py b/python/examples/grpc_impl_example/fit_a_line/test_sync_client.py index 2a9d07f21a1a5b94c1a39c7a680d5a57767781f2..9f699846c70e16705004581f4ce2511986063942 100644 --- a/python/examples/grpc_impl_example/fit_a_line/test_sync_client.py +++ b/python/examples/grpc_impl_example/fit_a_line/test_sync_client.py @@ -14,23 +14,17 @@ # pylint: disable=doc-string-missing from paddle_serving_client import MultiLangClient as Client -import paddle -import grpc client = Client() client.connect(["127.0.0.1:9393"]) -test_reader = paddle.batch( - paddle.reader.shuffle( - paddle.dataset.uci_housing.test(), buf_size=500), - batch_size=1) - -for data in test_reader(): - try: - fetch_map = client.predict(feed={"x": data[0][0]}, fetch=["price"]) - except grpc.RpcError as e: - status_code = e.code() - if grpc.StatusCode.DEADLINE_EXCEEDED == status_code: - print('timeout') +x = [ + 0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, + 0.4919, 0.1856, 0.0795, -0.0332 +] +for i in range(3): + fetch_map = client.predict(feed={"x": x}, fetch=["price"]) + if fetch_map["status_code"] == 0: + print(fetch_map) else: - print("{} {}".format(fetch_map["price"][0], data[0][1][0])) + print(fetch_map["status_code"]) diff --git a/python/examples/grpc_impl_example/fit_a_line/test_timeout_client.py b/python/examples/grpc_impl_example/fit_a_line/test_timeout_client.py index 14967681c2682278cec219e09d6d49c4144b4a60..4a2c2cff304e7fd04f293f90833e9d7dd5cd373a 100644 --- a/python/examples/grpc_impl_example/fit_a_line/test_timeout_client.py +++ b/python/examples/grpc_impl_example/fit_a_line/test_timeout_client.py @@ -14,24 +14,21 @@ # pylint: disable=doc-string-missing from paddle_serving_client import MultiLangClient as Client -import paddle import grpc client = Client() client.connect(["127.0.0.1:9393"]) client.set_rpc_timeout_ms(1) -test_reader = paddle.batch( - paddle.reader.shuffle( - paddle.dataset.uci_housing.test(), buf_size=500), - batch_size=1) - -for data in test_reader(): - try: - fetch_map = client.predict(feed={"x": data[0][0]}, fetch=["price"]) - except grpc.RpcError as e: - status_code = e.code() - if grpc.StatusCode.DEADLINE_EXCEEDED == status_code: - print('timeout') +x = [ + 0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, + 0.4919, 0.1856, 0.0795, -0.0332 +] +for i in range(3): + fetch_map = client.predict(feed={"x": x}, fetch=["price"]) + if fetch_map["status_code"] == 0: + print(fetch_map) + elif fetch_map["status_code"] == grpc.StatusCode.DEADLINE_EXCEEDED: + print('timeout') else: - print("{} {}".format(fetch_map["price"][0], data[0][1][0])) + print(fetch_map["status_code"]) diff --git a/python/examples/imdb/test_multilang_ensemble_client.py b/python/examples/imdb/test_multilang_ensemble_client.py index 6d745e8bb536235251bd2d95d65470aa09348445..6686d4c8c38d6a17cb9c5701abf7d76773031772 100644 --- a/python/examples/imdb/test_multilang_ensemble_client.py +++ b/python/examples/imdb/test_multilang_ensemble_client.py @@ -19,7 +19,6 @@ from imdb_reader import IMDBDataset client = MultiLangClient() # If you have more than one model, make sure that the input # and output of more than one model are the same. -client.load_client_config('imdb_bow_client_conf/serving_client_conf.prototxt') client.connect(["127.0.0.1:9393"]) # you can define any english sentence or dataset here diff --git a/python/paddle_serving_client/__init__.py b/python/paddle_serving_client/__init__.py index 864dc39cdbf64b0ccbc0e1ca4e25fdacc096c137..6eb92fa05767ed10d7ff719981cb4898fd7475d8 100644 --- a/python/paddle_serving_client/__init__.py +++ b/python/paddle_serving_client/__init__.py @@ -569,6 +569,7 @@ class MultiLangClient(object): ret = multi_result_map.values()[0] else: ret = multi_result_map + ret["status_code"] = 0 return ret if not need_variant_tag else [ret, tag] def _done_callback_func(self, fetch, is_python, need_variant_tag): @@ -589,12 +590,15 @@ class MultiLangClient(object): is_python=True): req = self._pack_inference_request(feed, fetch, is_python=is_python) if not asyn: - resp = self.stub_.Inference(req, timeout=self.rpc_timeout_s_) - return self._unpack_inference_response( - resp, - fetch, - is_python=is_python, - need_variant_tag=need_variant_tag) + try: + resp = self.stub_.Inference(req, timeout=self.rpc_timeout_s_) + return self._unpack_inference_response( + resp, + fetch, + is_python=is_python, + need_variant_tag=need_variant_tag) + except grpc.RpcError as e: + return {"status_code": e.code()} else: call_future = self.stub_.Inference.future( req, timeout=self.rpc_timeout_s_) @@ -612,7 +616,10 @@ class MultiLangPredictFuture(object): self.callback_func_ = callback_func def result(self): - resp = self.call_future_.result() + try: + resp = self.call_future_.result() + except grpc.RpcError as e: + return {"status_code": e.code()} return self.callback_func_(resp) def add_done_callback(self, fn):