From 540d336f413fc75ebb200b2baac3a2d1fbfb7ea0 Mon Sep 17 00:00:00 2001 From: gongweibao Date: Mon, 15 Jun 2020 09:48:02 +0000 Subject: [PATCH] add fixtypo test=develop --- python/paddle_serving_server/__init__.py | 11 ++++++----- python/paddle_serving_server/web_service.py | 6 +++--- python/paddle_serving_server_gpu/web_service.py | 4 ++-- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/python/paddle_serving_server/__init__.py b/python/paddle_serving_server/__init__.py index 3a5c0701..2d28e92d 100644 --- a/python/paddle_serving_server/__init__.py +++ b/python/paddle_serving_server/__init__.py @@ -231,6 +231,7 @@ class Server(object): self.infer_service_conf.services.extend([infer_service]) def _prepare_resource(self, workdir): + self.workdir = workdir if self.resource_conf == None: with open("{}/{}".format(workdir, self.general_model_config_fn), "w") as fout: @@ -328,10 +329,10 @@ class Server(object): os.chdir(self.module_path) need_download = False device_version = self.get_device_version() - floder_name = device_version + serving_server_version - tar_name = floder_name + ".tar.gz" + folder_name = device_version + serving_server_version + tar_name = folder_name + ".tar.gz" bin_url = "https://paddle-serving.bj.bcebos.com/bin/" + tar_name - self.server_path = os.path.join(self.module_path, floder_name) + self.server_path = os.path.join(self.module_path, folder_name) #acquire lock version_file = open("{}/version.py".format(self.module_path), "r") @@ -357,7 +358,7 @@ class Server(object): os.remove(exe_path) raise SystemExit( 'Decompressing failed, please check your permission of {} or disk space left.'. - foemat(self.module_path)) + format(self.module_path)) finally: os.remove(tar_name) #release lock @@ -375,10 +376,10 @@ class Server(object): if not self.port_is_available(port): raise SystemExit("Prot {} is already used".format(port)) + self.set_port(port) self._prepare_resource(workdir) self._prepare_engine(self.model_config_paths, device) self._prepare_infer_service(port) - self.port = port self.workdir = workdir infer_service_fn = "{}/{}".format(workdir, self.infer_service_fn) diff --git a/python/paddle_serving_server/web_service.py b/python/paddle_serving_server/web_service.py index b3fcc1b8..8f859c2c 100755 --- a/python/paddle_serving_server/web_service.py +++ b/python/paddle_serving_server/web_service.py @@ -85,9 +85,9 @@ class WebService(object): fetch_map = self.client.predict(feed=feed, fetch=fetch) for key in fetch_map: fetch_map[key] = fetch_map[key].tolist() - fetch_map = self.postprocess( + result = self.postprocess( feed=request.json["feed"], fetch=fetch, fetch_map=fetch_map) - result = {"result": fetch_map} + result = {"result": result} except ValueError: result = {"result": "Request Value Error"} return result @@ -122,7 +122,7 @@ class WebService(object): processes=1) def get_app_instance(self): - return self.app_instance + return app_instance def preprocess(self, feed=[], fetch=[]): return feed, fetch diff --git a/python/paddle_serving_server_gpu/web_service.py b/python/paddle_serving_server_gpu/web_service.py index 76721de8..4c887f0e 100644 --- a/python/paddle_serving_server_gpu/web_service.py +++ b/python/paddle_serving_server_gpu/web_service.py @@ -50,12 +50,12 @@ class WebService(object): general_infer_op = op_maker.create('general_infer') general_response_op = op_maker.create('general_response') - op_seq_maker = serving.OpSeqMaker() + op_seq_maker = OpSeqMaker() op_seq_maker.add_op(read_op) op_seq_maker.add_op(general_infer_op) op_seq_maker.add_op(general_response_op) - server = serving.Server() + server = Server() server.set_op_sequence(op_seq_maker.get_op_sequence()) server.set_num_threads(thread_num) -- GitLab