From f86a565be66f2037e779b2b818a84de0369c37a2 Mon Sep 17 00:00:00 2001 From: barrierye Date: Mon, 23 Mar 2020 20:02:02 +0800 Subject: [PATCH] recover client.connect() function --- python/examples/bert/benchmark.py | 4 +--- python/examples/bert/benchmark_batch.py | 4 +--- python/examples/bert/bert_client.py | 3 +-- python/examples/criteo_ctr/benchmark.py | 4 +--- python/examples/criteo_ctr/benchmark_batch.py | 4 +--- python/examples/criteo_ctr/test_client.py | 3 +-- .../examples/criteo_ctr_with_cube/benchmark.py | 3 +-- .../criteo_ctr_with_cube/benchmark_batch.py | 3 +-- .../criteo_ctr_with_cube/test_client.py | 3 +-- python/examples/fit_a_line/benchmark.py | 3 +-- python/examples/fit_a_line/test_client.py | 3 +-- python/examples/imagenet/benchmark.py | 4 +--- python/examples/imagenet/benchmark_batch.py | 4 +--- python/examples/imagenet/image_rpc_client.py | 3 +-- python/examples/imdb/benchmark_batch.py | 3 +-- python/examples/imdb/test_client.py | 3 +-- python/examples/imdb/test_client_batch.py | 3 +-- python/examples/lac/benchmark.py | 3 +-- python/examples/lac/lac_client.py | 3 +-- python/paddle_serving_client/__init__.py | 18 ++++++++++++++++-- python/paddle_serving_server/web_service.py | 4 +--- .../paddle_serving_server_gpu/web_service.py | 6 ++---- 22 files changed, 38 insertions(+), 53 deletions(-) diff --git a/python/examples/bert/benchmark.py b/python/examples/bert/benchmark.py index baffb85d..e14c02fe 100644 --- a/python/examples/bert/benchmark.py +++ b/python/examples/bert/benchmark.py @@ -41,9 +41,7 @@ def single_func(idx, resource): fetch = ["pooled_output"] client = Client() client.load_client_config(args.model) - client.add_variant( - "var1", [resource["endpoint"][idx % len(resource["endpoint"])]], 50) - client.connect() + client.connect([resource["endpoint"][idx % len(resource["endpoint"])]]) start = time.time() for i in range(1000): diff --git a/python/examples/bert/benchmark_batch.py b/python/examples/bert/benchmark_batch.py index a762842d..e0f67714 100644 --- a/python/examples/bert/benchmark_batch.py +++ b/python/examples/bert/benchmark_batch.py @@ -40,9 +40,7 @@ def single_func(idx, resource): fetch = ["pooled_output"] client = Client() client.load_client_config(args.model) - client.add_variant( - "var1", [resource["endpoint"][idx % len(resource["endpoint"])]], 50) - client.connect() + client.connect([resource["endpoint"][idx % len(resource["endpoint"])]]) feed_batch = [] for bi in range(args.batch_size): feed_batch.append(reader.process(dataset[bi])) diff --git a/python/examples/bert/bert_client.py b/python/examples/bert/bert_client.py index 53c3ed93..91323bc1 100644 --- a/python/examples/bert/bert_client.py +++ b/python/examples/bert/bert_client.py @@ -33,8 +33,7 @@ fetch = ["pooled_output"] endpoint_list = ["127.0.0.1:9494"] client = Client() client.load_client_config(args.model) -client.add_variant("var1", endpoint_list, 50) -client.connect() +client.connect(endpoint_list) for line in fin: feed_dict = reader.process(line) diff --git a/python/examples/criteo_ctr/benchmark.py b/python/examples/criteo_ctr/benchmark.py index a1ba1193..8be7387d 100644 --- a/python/examples/criteo_ctr/benchmark.py +++ b/python/examples/criteo_ctr/benchmark.py @@ -43,9 +43,7 @@ def single_func(idx, resource): fetch = ["prob"] client = Client() client.load_client_config(args.model) - client.add_variant( - "var1", [resource["endpoint"][idx % len(resource["endpoint"])]], 50) - client.connect() + client.connect([resource["endpoint"][idx % len(resource["endpoint"])]]) start = time.time() for i in range(1000): diff --git a/python/examples/criteo_ctr/benchmark_batch.py b/python/examples/criteo_ctr/benchmark_batch.py index ea706f5b..47b63a6a 100644 --- a/python/examples/criteo_ctr/benchmark_batch.py +++ b/python/examples/criteo_ctr/benchmark_batch.py @@ -43,9 +43,7 @@ def single_func(idx, resource): fetch = ["prob"] client = Client() client.load_client_config(args.model) - client.add_variant( - "var1", [resource["endpoint"][idx % len(resource["endpoint"])]], 50) - client.connect() + client.connect([resource["endpoint"][idx % len(resource["endpoint"])]]) start = time.time() for i in range(1000): diff --git a/python/examples/criteo_ctr/test_client.py b/python/examples/criteo_ctr/test_client.py index 03e35575..9b3681c4 100644 --- a/python/examples/criteo_ctr/test_client.py +++ b/python/examples/criteo_ctr/test_client.py @@ -23,8 +23,7 @@ from paddle_serving_client.metric import auc client = Client() client.load_client_config(sys.argv[1]) -client.add_variant("var1", ["127.0.0.1:9292"], 50) -client.connect() +client.connect(["127.0.0.1:9292"]) batch = 1 buf_size = 100 diff --git a/python/examples/criteo_ctr_with_cube/benchmark.py b/python/examples/criteo_ctr_with_cube/benchmark.py index 51e35289..e5bde9f9 100755 --- a/python/examples/criteo_ctr_with_cube/benchmark.py +++ b/python/examples/criteo_ctr_with_cube/benchmark.py @@ -30,8 +30,7 @@ args = benchmark_args() def single_func(idx, resource): client = Client() client.load_client_config('ctr_client_conf/serving_client_conf.prototxt') - client.add_variant("var1", ['127.0.0.1:9292'], 50) - client.connect() + client.connect(['127.0.0.1:9292']) batch = 1 buf_size = 100 dataset = criteo.CriteoDataset() diff --git a/python/examples/criteo_ctr_with_cube/benchmark_batch.py b/python/examples/criteo_ctr_with_cube/benchmark_batch.py index 4be1e0c5..b4b15892 100755 --- a/python/examples/criteo_ctr_with_cube/benchmark_batch.py +++ b/python/examples/criteo_ctr_with_cube/benchmark_batch.py @@ -31,8 +31,7 @@ def single_func(idx, resource): client = Client() print([resource["endpoint"][idx % len(resource["endpoint"])]]) client.load_client_config('ctr_client_conf/serving_client_conf.prototxt') - client.add_variant("var1", ['127.0.0.1:9292'], 50) - client.connect() + client.connect(['127.0.0.1:9292']) batch = 1 buf_size = 100 dataset = criteo.CriteoDataset() diff --git a/python/examples/criteo_ctr_with_cube/test_client.py b/python/examples/criteo_ctr_with_cube/test_client.py index cdecaa58..bb667f88 100755 --- a/python/examples/criteo_ctr_with_cube/test_client.py +++ b/python/examples/criteo_ctr_with_cube/test_client.py @@ -22,8 +22,7 @@ from paddle_serving_client.metric import auc client = Client() client.load_client_config(sys.argv[1]) -client.add_variant("var1", ['127.0.0.1:9292'], 50) -client.connect() +client.connect(['127.0.0.1:9292']) batch = 1 buf_size = 100 diff --git a/python/examples/fit_a_line/benchmark.py b/python/examples/fit_a_line/benchmark.py index e5a5398a..0ddda2a0 100644 --- a/python/examples/fit_a_line/benchmark.py +++ b/python/examples/fit_a_line/benchmark.py @@ -28,8 +28,7 @@ def single_func(idx, resource): if args.request == "rpc": client = Client() client.load_client_config(args.model) - client.add_variant("var1", [args.endpoint], 50) - client.connect() + client.connect([args.endpoint]) train_reader = paddle.batch( paddle.reader.shuffle( paddle.dataset.uci_housing.train(), buf_size=500), diff --git a/python/examples/fit_a_line/test_client.py b/python/examples/fit_a_line/test_client.py index 3bdb9e82..442ed230 100644 --- a/python/examples/fit_a_line/test_client.py +++ b/python/examples/fit_a_line/test_client.py @@ -18,8 +18,7 @@ import sys client = Client() client.load_client_config(sys.argv[1]) -client.add_variant("var1", ["127.0.0.1:9393"], 50) -client.connect() +client.connect(["127.0.0.1:9393"]) import paddle test_reader = paddle.batch( diff --git a/python/examples/imagenet/benchmark.py b/python/examples/imagenet/benchmark.py index 28ea5600..ece222f7 100644 --- a/python/examples/imagenet/benchmark.py +++ b/python/examples/imagenet/benchmark.py @@ -36,9 +36,7 @@ def single_func(idx, resource): fetch = ["score"] client = Client() client.load_client_config(args.model) - client.add_variant( - "var1", [resource["endpoint"][idx % len(resource["endpoint"])]], 50) - client.connect() + client.connect([resource["endpoint"][idx % len(resource["endpoint"])]]) start = time.time() for i in range(1000): diff --git a/python/examples/imagenet/benchmark_batch.py b/python/examples/imagenet/benchmark_batch.py index eeee38d6..74771009 100644 --- a/python/examples/imagenet/benchmark_batch.py +++ b/python/examples/imagenet/benchmark_batch.py @@ -41,9 +41,7 @@ def single_func(idx, resource): fetch = ["score"] client = Client() client.load_client_config(args.model) - client.add_variant( - "var1", [resource["endpoint"][idx % len(resource["endpoint"])]], 50) - client.connect() + client.connect([resource["endpoint"][idx % len(resource["endpoint"])]]) start = time.time() for i in range(1000): if args.batch_size >= 1: diff --git a/python/examples/imagenet/image_rpc_client.py b/python/examples/imagenet/image_rpc_client.py index c840e1b1..2367f509 100644 --- a/python/examples/imagenet/image_rpc_client.py +++ b/python/examples/imagenet/image_rpc_client.py @@ -19,8 +19,7 @@ import time client = Client() client.load_client_config(sys.argv[1]) -client.add_variant("var1", ["127.0.0.1:9295"], 50) -client.connect() +client.connect(["127.0.0.1:9295"]) reader = ImageReader() start = time.time() diff --git a/python/examples/imdb/benchmark_batch.py b/python/examples/imdb/benchmark_batch.py index 17919bde..302d6335 100644 --- a/python/examples/imdb/benchmark_batch.py +++ b/python/examples/imdb/benchmark_batch.py @@ -35,8 +35,7 @@ def single_func(idx, resource): if args.request == "rpc": client = Client() client.load_client_config(args.model) - client.add_variant("var1", [args.endpoint], 50) - client.connect() + client.connect([args.endpoint]) for i in range(1000): if args.batch_size >= 1: feed_batch = [] diff --git a/python/examples/imdb/test_client.py b/python/examples/imdb/test_client.py index 66d56afd..548a40e4 100644 --- a/python/examples/imdb/test_client.py +++ b/python/examples/imdb/test_client.py @@ -18,8 +18,7 @@ import sys client = Client() client.load_client_config(sys.argv[1]) -client.add_variant("var1", ["127.0.0.1:9292"], 50) -client.connect() +client.connect(["127.0.0.1:9292"]) # you can define any english sentence or dataset here # This example reuses imdb reader in training, you diff --git a/python/examples/imdb/test_client_batch.py b/python/examples/imdb/test_client_batch.py index 13a12637..972b2c96 100644 --- a/python/examples/imdb/test_client_batch.py +++ b/python/examples/imdb/test_client_batch.py @@ -23,8 +23,7 @@ import time def batch_predict(batch_size=4): client = Client() client.load_client_config(conf_file) - client.add_variant("var1", ["127.0.0.1:9292"], 50) - client.connect() + client.connect(["127.0.0.1:9292"]) fetch = ["acc", "cost", "prediction"] feed_batch = [] for line in sys.stdin: diff --git a/python/examples/lac/benchmark.py b/python/examples/lac/benchmark.py index 2124257a..53d0881e 100644 --- a/python/examples/lac/benchmark.py +++ b/python/examples/lac/benchmark.py @@ -30,8 +30,7 @@ def single_func(idx, resource): if args.request == "rpc": client = Client() client.load_client_config(args.model) - client.add_variant("var1", [args.endpoint], 50) - client.connect() + client.connect([args.endpoint]) fin = open("jieba_test.txt") for line in fin: feed_data = reader.process(line) diff --git a/python/examples/lac/lac_client.py b/python/examples/lac/lac_client.py index 2d6e250e..f2a8e858 100644 --- a/python/examples/lac/lac_client.py +++ b/python/examples/lac/lac_client.py @@ -22,8 +22,7 @@ import io client = Client() client.load_client_config(sys.argv[1]) -client.add_variant("var1", ["127.0.0.1:9280"], 50) -client.connect() +client.connect(["127.0.0.1:9280"]) reader = LACReader(sys.argv[2]) for line in sys.stdin: diff --git a/python/paddle_serving_client/__init__.py b/python/paddle_serving_client/__init__.py index 9c69a2d6..7cabdd2f 100644 --- a/python/paddle_serving_client/__init__.py +++ b/python/paddle_serving_client/__init__.py @@ -85,7 +85,7 @@ class Client(object): self.feed_names_to_idx_ = {} self.rpath() self.pid = os.getpid() - self.predictor_sdk_ = SDKConfig() + self.predictor_sdk_ = None def rpath(self): lib_path = os.path.dirname(paddle_serving_client.__file__) @@ -138,13 +138,27 @@ class Client(object): return def add_variant(self, tag, cluster, variant_weight): + if self.predictor_sdk_ is None: + self.predictor_sdk_ = SDKConfig() self.predictor_sdk_.add_server_variant(tag, cluster, str(variant_weight)) - def connect(self): + def connect(self, endpoints=None): # check whether current endpoint is available # init from client config # create predictor here + if endpoints is None: + if self.predictor_sdk_ is None: + raise SystemExit( + "You must set the endpoints parameter or use add_variant function to create a variant." + ) + else: + if self.predictor_sdk_ is None: + self.add_variant('var1', endpoints, 100) + else: + print( + "endpoints({}) will not be enabled because you use the add_variant function.". + format(endpoints)) sdk_desc = self.predictor_sdk_.gen_desc() print(sdk_desc) self.client_handle_.create_predictor_by_desc(sdk_desc.SerializeToString( diff --git a/python/paddle_serving_server/web_service.py b/python/paddle_serving_server/web_service.py index 0957cbea..298e65e7 100755 --- a/python/paddle_serving_server/web_service.py +++ b/python/paddle_serving_server/web_service.py @@ -54,9 +54,7 @@ class WebService(object): client_service = Client() client_service.load_client_config( "{}/serving_server_conf.prototxt".format(self.model_config)) - client_service.add_variant("var1", - ["0.0.0.0:{}".format(self.port + 1)], 100) - client_service.connect() + client_service.connect(["0.0.0.0:{}".format(self.port + 1)]) service_name = "/" + self.name + "/prediction" @app_instance.route(service_name, methods=['POST']) diff --git a/python/paddle_serving_server_gpu/web_service.py b/python/paddle_serving_server_gpu/web_service.py index 32e59b76..22b534dd 100755 --- a/python/paddle_serving_server_gpu/web_service.py +++ b/python/paddle_serving_server_gpu/web_service.py @@ -91,8 +91,7 @@ class WebService(object): client = Client() client.load_client_config("{}/serving_server_conf.prototxt".format( self.model_config)) - client.add_variant("var1", [endpoint], 100) - client.connect() + client.connect([endpoint]) while True: request_json = inputqueue.get() feed, fetch = self.preprocess(request_json, request_json["fetch"]) @@ -135,8 +134,7 @@ class WebService(object): client = Client() client.load_client_config("{}/serving_server_conf.prototxt".format( self.model_config)) - client.add_variant("var1", ["0.0.0.0:{}".format(self.port + 1)], 100) - client.connect() + client.connect(["0.0.0.0:{}".format(self.port + 1)]) self.idx = 0 -- GitLab