提交 c223d381 编写于 作者: 走神的阿圆's avatar 走神的阿圆

update serving v2

上级 fcf87035
...@@ -27,6 +27,8 @@ from paddlehub.commands.base_command import BaseCommand, ENTRY ...@@ -27,6 +27,8 @@ from paddlehub.commands.base_command import BaseCommand, ENTRY
from paddlehub.serving import app_single as app from paddlehub.serving import app_single as app
from paddlehub.common.dir import CONF_HOME from paddlehub.common.dir import CONF_HOME
from paddlehub.common.hub_server import CacheUpdater from paddlehub.common.hub_server import CacheUpdater
from paddlehub.serving.model_service.base_model_service import cv_module_info
from paddlehub.serving.model_service.base_model_service import nlp_module_info
import multiprocessing import multiprocessing
import time import time
import signal import signal
...@@ -105,6 +107,11 @@ class ServingCommand(BaseCommand): ...@@ -105,6 +107,11 @@ class ServingCommand(BaseCommand):
self.parser.add_argument("--gpu", "-i", nargs="?", default=0) self.parser.add_argument("--gpu", "-i", nargs="?", default=0)
self.parser.add_argument( self.parser.add_argument(
"--use_singleprocess", action="store_true", default=False) "--use_singleprocess", action="store_true", default=False)
self.parser.add_argument(
"--modules_info", "-mi", default={}, type=json.loads)
self.parser.add_argument(
"--workers", "-w", nargs="?", default=number_of_workers())
self.modules_info = {}
def dump_pid_file(self): def dump_pid_file(self):
pid = os.getpid() pid = os.getpid()
...@@ -184,76 +191,59 @@ class ServingCommand(BaseCommand): ...@@ -184,76 +191,59 @@ class ServingCommand(BaseCommand):
except: except:
return False return False
@staticmethod def preinstall_modules(self):
def preinstall_modules(modules): for key, value in self.modules_info.items():
configs = [] init_args = value["init_args"]
module_exist = {} CacheUpdater(
if modules is not None: "hub_serving_start",
for module in modules: module=key,
module_name = module if "==" not in module else \ version=init_args.get("version", "0.0.0")).start()
module.split("==")[0]
module_version = None if "==" not in module else \ if "dir" not in init_args:
module.split("==")[1] init_args.update({"name": key})
if module_exist.get(module_name, "") != "": m = hub.Module(**init_args)
print(module_name, "==", module_exist.get(module_name), method_name = m.serving_func_name
" will be ignored cause new version is specified.") if method_name is None:
configs.pop() raise RuntimeError("{} cannot be use for "
module_exist.update({module_name: module_version}) "predicting".format(key))
try: exit(1)
CacheUpdater( category = str(m.type).split("/")[0].upper()
"hub_serving_start", self.modules_info[key].update({
module=module_name, "method_name": method_name,
version=module_version).start() "code_version": m.code_version,
m = hub.Module(name=module_name, version=module_version) "version": m.version,
method_name = m.desc.attr.map.data['default_signature'].s "category": category,
if method_name == "": "module": m,
raise RuntimeError("{} cannot be use for " "name": m.name
"predicting".format(module_name)) })
configs.append({
"module": module_name, def start_app_with_file(self):
"version": m.version, port = self.args.config.get("port", 8866)
"category": str(m.type).split("/")[0].upper()
})
except Exception as err:
print(err, ", start PaddleHub Serving unsuccessfully.")
exit(1)
return configs
def start_app_with_file(self, configs, workers):
port = configs.get("port", 8866)
if ServingCommand.is_port_occupied("127.0.0.1", port) is True: if ServingCommand.is_port_occupied("127.0.0.1", port) is True:
print("Port %s is occupied, please change it." % port) print("Port %s is occupied, please change it." % port)
return False return False
modules = configs.get("modules_info") self.modules_info = self.args.config.get("modules_info")
module = [str(i["module"]) + "==" + str(i["version"]) for i in modules] self.preinstall_modules()
module_info = ServingCommand.preinstall_modules(module)
for index in range(len(module_info)):
modules[index].update(module_info[index])
options = { options = {
"bind": "0.0.0.0:%s" % port, "bind": "0.0.0.0:%s" % port,
"workers": workers, "workers": self.args.workers,
"pid": "./pid.txt" "pid": "./pid.txt"
} }
configs["modules_info"] = modules
self.dump_pid_file() self.dump_pid_file()
StandaloneApplication( StandaloneApplication(
app.create_app(init_flag=False, configs=configs), options).run() app.create_app(init_flag=False, configs=self.modules_info),
options).run()
def start_single_app_with_file(self, configs): def start_single_app_with_file(self):
use_gpu = configs.get("use_gpu", False) port = self.args.config.get("port", 8866)
port = configs.get("port", 8866)
if ServingCommand.is_port_occupied("127.0.0.1", port) is True: if ServingCommand.is_port_occupied("127.0.0.1", port) is True:
print("Port %s is occupied, please change it." % port) print("Port %s is occupied, please change it." % port)
return False return False
configs = configs.get("modules_info") self.modules_info = self.args.config.get("modules_info")
module = [str(i["module"]) + "==" + str(i["version"]) for i in configs] self.preinstall_modules()
module_info = ServingCommand.preinstall_modules(module)
for index in range(len(module_info)):
configs[index].update(module_info[index])
self.dump_pid_file() self.dump_pid_file()
app.run(use_gpu, configs=configs, port=port) app.run(configs=self.modules_info, port=port)
@staticmethod @staticmethod
def start_multi_app_with_file(configs): def start_multi_app_with_file(configs):
...@@ -270,23 +260,15 @@ class ServingCommand(BaseCommand): ...@@ -270,23 +260,15 @@ class ServingCommand(BaseCommand):
def start_app_with_args(self, workers): def start_app_with_args(self, workers):
module = self.args.modules module = self.args.modules
if module is not None: if module is not None:
use_gpu = self.args.use_gpu
port = self.args.port port = self.args.port
if ServingCommand.is_port_occupied("127.0.0.1", port) is True: if ServingCommand.is_port_occupied("127.0.0.1", port) is True:
print("Port %s is occupied, please change it." % port) print("Port %s is occupied, please change it." % port)
return False return False
module_info = ServingCommand.preinstall_modules(module) self.preinstall_modules()
[
item.update({
"batch_size": 1,
"queue_size": 20
}) for item in module_info
]
options = {"bind": "0.0.0.0:%s" % port, "workers": workers} options = {"bind": "0.0.0.0:%s" % port, "workers": workers}
configs = {"use_gpu": use_gpu, "modules_info": module_info}
self.dump_pid_file() self.dump_pid_file()
StandaloneApplication( StandaloneApplication(
app.create_app(init_flag=False, configs=configs), app.create_app(init_flag=False, configs=self.modules_info),
options).run() options).run()
else: else:
print("Lack of necessary parameters!") print("Lack of necessary parameters!")
...@@ -294,41 +276,27 @@ class ServingCommand(BaseCommand): ...@@ -294,41 +276,27 @@ class ServingCommand(BaseCommand):
def start_single_app_with_args(self): def start_single_app_with_args(self):
module = self.args.modules module = self.args.modules
if module is not None: if module is not None:
use_gpu = self.args.use_gpu
port = self.args.port port = self.args.port
if ServingCommand.is_port_occupied("127.0.0.1", port) is True: if ServingCommand.is_port_occupied("127.0.0.1", port) is True:
print("Port %s is occupied, please change it." % port) print("Port %s is occupied, please change it." % port)
return False return False
module_info = ServingCommand.preinstall_modules(module) self.preinstall_modules()
[
item.update({
"batch_size": 1,
"queue_size": 20
}) for item in module_info
]
self.dump_pid_file() self.dump_pid_file()
app.run(use_gpu, configs=module_info, port=port) app.run(configs=self.modules_info, port=port)
else: else:
print("Lack of necessary parameters!") print("Lack of necessary parameters!")
def start_multi_app_with_args(self): def start_multi_app_with_args(self):
module = self.args.modules module = self.args.modules
if module is not None: if module is not None:
use_gpu = self.args.use_gpu
port = self.args.port port = self.args.port
workers = number_of_workers() workers = number_of_workers()
if ServingCommand.is_port_occupied("127.0.0.1", port) is True: if ServingCommand.is_port_occupied("127.0.0.1", port) is True:
print("Port %s is occupied, please change it." % port) print("Port %s is occupied, please change it." % port)
return False return False
module_info = ServingCommand.preinstall_modules(module) self.preinstall_modules()
[
item.update({
"batch_size": 1,
"queue_size": 20
}) for item in module_info
]
options = {"bind": "0.0.0.0:%s" % port, "workers": workers} options = {"bind": "0.0.0.0:%s" % port, "workers": workers}
configs = {"use_gpu": use_gpu, "modules_info": module_info} configs = {"modules_info": self.module_info}
StandaloneApplication( StandaloneApplication(
app.create_app(init_flag=False, configs=configs), app.create_app(init_flag=False, configs=configs),
options).run() options).run()
...@@ -336,31 +304,51 @@ class ServingCommand(BaseCommand): ...@@ -336,31 +304,51 @@ class ServingCommand(BaseCommand):
else: else:
print("Lack of necessary parameters!") print("Lack of necessary parameters!")
def link_module_info(self):
if self.args.config:
if os.path.exists(self.args.config):
with open(self.args.config, "r") as fp:
self.args.config = json.load(fp)
self.modules_info = self.args.config["modules_info"]
else:
raise RuntimeError("{} not exists.".format(self.args.config))
exit(1)
else:
for item in self.args.modules:
version = None
if "==" in item:
module = item.split("==")[0]
version = item.split("==")[1]
else:
module = item
self.modules_info.update({
module: {
"init_args": {
"version": version
},
"predict_args": {
"use_gpu": self.args.use_gpu
}
}
})
def start_serving(self): def start_serving(self):
config_file = self.args.config
single_mode = self.args.use_singleprocess single_mode = self.args.use_singleprocess
if config_file is not None: if self.args.config is not None:
if os.path.exists(config_file): self.args.workers = self.args.config.get("workers",
with open(config_file, "r") as fp:
configs = json.load(fp)
use_multiprocess = configs.get("use_multiprocess", False)
if single_mode is True:
ServingCommand.start_single_app_with_file(configs)
elif platform.system() == "Windows":
print(
"Warning: Windows cannot use multiprocess working "
"mode, PaddleHub Serving will switch to single process mode"
)
ServingCommand.start_single_app_with_file(configs)
else:
if use_multiprocess is True:
self.start_app_with_file(configs,
number_of_workers()) number_of_workers())
else: use_multiprocess = self.args.config.get("use_multiprocess", False)
self.start_app_with_file(configs, 1) if use_multiprocess is False:
self.start_single_app_with_file()
elif platform.system() == "Windows":
print(
"Warning: Windows cannot use multiprocess working "
"mode, PaddleHub Serving will switch to single process mode"
)
self.start_single_app_with_file()
else: else:
print("config_file ", config_file, "not exists.") self.start_app_with_file()
else: else:
if single_mode is True: if single_mode is True:
self.start_single_app_with_args() self.start_single_app_with_args()
...@@ -372,7 +360,7 @@ class ServingCommand(BaseCommand): ...@@ -372,7 +360,7 @@ class ServingCommand(BaseCommand):
self.start_single_app_with_args() self.start_single_app_with_args()
else: else:
if self.args.use_multiprocess is True: if self.args.use_multiprocess is True:
self.start_app_with_args(number_of_workers()) self.start_app_with_args(self.args.workers)
else: else:
self.start_app_with_args(1) self.start_app_with_args(1)
...@@ -393,10 +381,10 @@ class ServingCommand(BaseCommand): ...@@ -393,10 +381,10 @@ class ServingCommand(BaseCommand):
str += "\tPre-install modules via the parameter list.\n" str += "\tPre-install modules via the parameter list.\n"
str += "--port/-p XXXX\n" str += "--port/-p XXXX\n"
str += "\tUse port XXXX for serving.\n" str += "\tUse port XXXX for serving.\n"
str += "--use_gpu\n"
str += "\tUse gpu for predicting if you specify the parameter.\n"
str += "--use_multiprocess\n" str += "--use_multiprocess\n"
str += "\tChoose multoprocess mode, cannot be use on Windows.\n" str += "\tChoose multoprocess mode, cannot be use on Windows.\n"
str += "--modules_info\n"
str += "\tSet module config in PaddleHub Serving."
str += "--config/-c file_path\n" str += "--config/-c file_path\n"
str += "\tUse configs in file to start PaddleHub Serving. " str += "\tUse configs in file to start PaddleHub Serving. "
str += "Other parameters will be ignored if you specify the parameter.\n" str += "Other parameters will be ignored if you specify the parameter.\n"
...@@ -422,6 +410,7 @@ class ServingCommand(BaseCommand): ...@@ -422,6 +410,7 @@ class ServingCommand(BaseCommand):
except: except:
ServingCommand.show_help() ServingCommand.show_help()
return False return False
self.link_module_info()
if self.args.sub_command == "start": if self.args.sub_command == "start":
if self.args.bert_service == "bert_service": if self.args.bert_service == "bert_service":
ServingCommand.start_bert_serving(self.args) ServingCommand.start_bert_serving(self.args)
......
...@@ -65,10 +65,14 @@ def base64s_to_cvmats(base64s): ...@@ -65,10 +65,14 @@ def base64s_to_cvmats(base64s):
return base64s return base64s
def handle_mask_results(results): def handle_mask_results(results, data_len):
result = [] result = []
if len(results) <= 0: if len(results) <= 0 and data_len != 0:
return results return [{
"data": "No face.",
"id": i,
"path": ""
} for i in range(1, data_len + 1)]
_id = results[0]["id"] _id = results[0]["id"]
_item = { _item = {
"data": [], "data": [],
...@@ -87,6 +91,15 @@ def handle_mask_results(results): ...@@ -87,6 +91,15 @@ def handle_mask_results(results):
"id": item.get("id", _id) "id": item.get("id", _id)
} }
result.append(_item) result.append(_item)
for index in range(1, data_len + 1):
if index > len(result):
result.append({"data": "No face.", "id": index, "path": ""})
elif result[index - 1]["id"] != index:
result.insert(index - 1, {
"data": "No face.",
"id": index,
"path": ""
})
return result return result
......
...@@ -134,6 +134,19 @@ def runnable(func): ...@@ -134,6 +134,19 @@ def runnable(func):
return _wrapper return _wrapper
_module_serving_func = {}
def serving(func):
mod = func.__module__ + "." + inspect.stack()[1][3]
_module_serving_func[mod] = func.__name__
def _wrapper(*args, **kwargs):
return func(*args, **kwargs)
return _wrapper
class Module(object): class Module(object):
_record = {} _record = {}
...@@ -184,6 +197,7 @@ class Module(object): ...@@ -184,6 +197,7 @@ class Module(object):
self._run_func = getattr(self, _run_func_name) self._run_func = getattr(self, _run_func_name)
else: else:
self._run_func = None self._run_func = None
self._serving_func_name = _module_serving_func.get(mod, None)
self._code_version = "v2" self._code_version = "v2"
self._directory = directory self._directory = directory
self.module_desc_path = os.path.join(self.directory, MODULE_DESC_PBNAME) self.module_desc_path = os.path.join(self.directory, MODULE_DESC_PBNAME)
...@@ -292,6 +306,10 @@ class Module(object): ...@@ -292,6 +306,10 @@ class Module(object):
def is_runnable(self): def is_runnable(self):
return self._run_func != None return self._run_func != None
@property
def serving_func_name(self):
return self._serving_func_name
def _initialize(self): def _initialize(self):
pass pass
...@@ -353,6 +371,11 @@ class ModuleV1(Module): ...@@ -353,6 +371,11 @@ class ModuleV1(Module):
self._restore_parameter(self.program) self._restore_parameter(self.program)
self._recover_variable_info(self.program) self._recover_variable_info(self.program)
@property
def serving_func_name(self):
serving_func_name = self.desc.attr.map.data['default_signature'].s
return serving_func_name if serving_func_name != "" else None
def _dump_processor(self): def _dump_processor(self):
import inspect import inspect
pymodule = inspect.getmodule(self.processor) pymodule = inspect.getmodule(self.processor)
...@@ -576,6 +599,10 @@ class ModuleV1(Module): ...@@ -576,6 +599,10 @@ class ModuleV1(Module):
def is_runnable(self): def is_runnable(self):
return self.default_signature != None return self.default_signature != None
@property
def code_version(self):
return self._code_version
def context(self, def context(self,
sign_name=None, sign_name=None,
for_test=False, for_test=False,
......
...@@ -12,62 +12,47 @@ ...@@ -12,62 +12,47 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
from flask import Flask, request, render_template from flask import Flask, request, render_template
from paddlehub.serving.model_service.model_manage import default_module_manager from paddlehub.serving.model_service.base_model_service import cv_module_info
from paddlehub.serving.model_service.base_model_service import nlp_module_info
from paddlehub.common import utils from paddlehub.common import utils
import functools
import time import time
import os import os
import base64 import base64
import logging import logging
import shutil import glob
cv_module_method = {
"vgg19_imagenet": "predict_classification", def predict_v2(module_info, input):
"vgg16_imagenet": "predict_classification", serving_method_name = module_info["method_name"]
"vgg13_imagenet": "predict_classification", serving_method = getattr(module_info["module"], serving_method_name)
"vgg11_imagenet": "predict_classification", predict_args = module_info["predict_args"]
"shufflenet_v2_imagenet": "predict_classification", predict_args.update({"data": input})
"se_resnext50_32x4d_imagenet": "predict_classification",
"se_resnext101_32x4d_imagenet": "predict_classification", for item in serving_method.__code__.co_varnames:
"resnet_v2_50_imagenet": "predict_classification", if item in module_info.keys():
"resnet_v2_34_imagenet": "predict_classification", predict_args.update({item: module_info[item]})
"resnet_v2_18_imagenet": "predict_classification", output = serving_method(**predict_args)
"resnet_v2_152_imagenet": "predict_classification", return {"results": output}
"resnet_v2_101_imagenet": "predict_classification",
"pnasnet_imagenet": "predict_classification",
"nasnet_imagenet": "predict_classification", def predict_nlp(module_info, input_text, req_id, extra=None):
"mobilenet_v2_imagenet": "predict_classification", method_name = module_info["method_name"]
"googlenet_imagenet": "predict_classification", predict_method = getattr(module_info["module"], method_name)
"alexnet_imagenet": "predict_classification",
"yolov3_coco2017": "predict_object_detection", predict_args = {"data": input_text}
"ultra_light_fast_generic_face_detector_1mb_640": if isinstance(predict_method, functools.partial):
"predict_object_detection", predict_method = predict_method.func
"ultra_light_fast_generic_face_detector_1mb_320": predict_args.update({"sign_name": method_name})
"predict_object_detection",
"ssd_mobilenet_v1_pascal": "predict_object_detection", for item in predict_method.__code__.co_varnames:
"pyramidbox_face_detection": "predict_object_detection", if item in module_info.keys():
"faster_rcnn_coco2017": "predict_object_detection", predict_args.update({item: module_info[item]})
"cyclegan_cityscapes": "predict_gan",
"deeplabv3p_xception65_humanseg": "predict_semantic_segmentation", if module_info["name"] == "lac" and extra.get("user_dict", []) != []:
"ace2p": "predict_semantic_segmentation", predict_args.update({"user_dict": extra.get("user_dict", [])[0]})
"pyramidbox_lite_server_mask": "predict_mask",
"pyramidbox_lite_mobile_mask": "predict_mask"
}
def predict_nlp(module, input_text, req_id, batch_size, extra=None):
method_name = module.desc.attr.map.data['default_signature'].s
predict_method = getattr(module, method_name)
try: try:
data = input_text res = predict_method(**predict_args)
if module.name == "lac" and extra.get("user_dict", []) != []:
res = predict_method(
data=data,
user_dict=extra.get("user_dict", [])[0],
use_gpu=use_gpu,
batch_size=batch_size)
else:
res = predict_method(
data=data, use_gpu=use_gpu, batch_size=batch_size)
except Exception as err: except Exception as err:
curr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time())) curr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
print(curr, " - ", err) print(curr, " - ", err)
...@@ -80,35 +65,45 @@ def predict_nlp(module, input_text, req_id, batch_size, extra=None): ...@@ -80,35 +65,45 @@ def predict_nlp(module, input_text, req_id, batch_size, extra=None):
return {"results": res} return {"results": res}
def predict_classification(module, input_img, id, batch_size, extra={}): def predict_classification(module_info, input_img, id, extra={}):
global use_gpu method_name = module_info["method_name"]
method_name = module.desc.attr.map.data['default_signature'].s module = module_info["module"]
predict_method = getattr(module, method_name) predict_method = getattr(module, method_name)
predict_args = {"data": {"image": input_img}}
if isinstance(predict_method, functools.partial):
predict_method = predict_method.func
predict_args.update({"sign_name": method_name})
for item in predict_method.__code__.co_varnames:
if item in module_info.keys():
predict_args.update({item: module_info[item]})
try: try:
input_img = {"image": input_img} results = predict_method(**predict_args)
results = predict_method(
data=input_img, use_gpu=use_gpu, batch_size=batch_size)
except Exception as err: except Exception as err:
curr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time())) curr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
print(curr, " - ", err) print(curr, " - ", err)
return {"result": "Please check data format!"} return {"result": "Please check data format!"}
finally: finally:
for item in input_img["image"]: for item in input_img:
if os.path.exists(item): if os.path.exists(item):
os.remove(item) os.remove(item)
return results return results
def predict_gan(module, input_img, id, batch_size, extra={}): def predict_gan(module_info, input_img, id, extra={}):
output_folder = module.name.split("_")[0] + "_" + "output" method_name = module_info["method_name"]
global use_gpu module = module_info["module"]
method_name = module.desc.attr.map.data['default_signature'].s
predict_method = getattr(module, method_name) predict_method = getattr(module, method_name)
predict_args = {"data": {"image": input_img}}
predict_args["data"].update(extra)
if isinstance(predict_method, functools.partial):
predict_method = predict_method.func
predict_args.update({"sign_name": method_name})
for item in predict_method.__code__.co_varnames:
if item in module_info.keys():
predict_args.update({item: module_info[item]})
results = predict_method(**predict_args)
try: try:
extra.update({"image": input_img}) pass
input_img = {"image": input_img}
results = predict_method(
data=extra, use_gpu=use_gpu, batch_size=batch_size)
except Exception as err: except Exception as err:
curr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time())) curr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
print(curr, " - ", err) print(curr, " - ", err)
...@@ -116,7 +111,6 @@ def predict_gan(module, input_img, id, batch_size, extra={}): ...@@ -116,7 +111,6 @@ def predict_gan(module, input_img, id, batch_size, extra={}):
finally: finally:
base64_list = [] base64_list = []
results_pack = [] results_pack = []
input_img = input_img.get("image", [])
for index in range(len(input_img)): for index in range(len(input_img)):
item = input_img[index] item = input_img[index]
output_file = results[index].split(" ")[-1] output_file = results[index].split(" ")[-1]
...@@ -135,22 +129,29 @@ def predict_gan(module, input_img, id, batch_size, extra={}): ...@@ -135,22 +129,29 @@ def predict_gan(module, input_img, id, batch_size, extra={}):
return results_pack return results_pack
def predict_mask(module, input_img, id, batch_size, extra=None, r_img=False): def predict_mask(module_info, input_img, id, extra=None, r_img=True):
output_folder = "detection_result" output_folder = "detection_result"
global use_gpu method_name = module_info["method_name"]
method_name = module.desc.attr.map.data['default_signature'].s module = module_info["module"]
predict_method = getattr(module, method_name) predict_method = getattr(module, method_name)
data_len = len(input_img) if input_img is not None else 0
data = {}
if input_img is not None:
input_img = {"image": input_img}
data.update(input_img)
if extra is not None:
data.update(extra)
r_img = True if "r_img" in extra.keys() else False
predict_args = {"data": data}
if isinstance(predict_method, functools.partial):
predict_method = predict_method.func
predict_args.update({"sign_name": method_name})
for item in predict_method.__code__.co_varnames:
if item in module_info.keys():
predict_args.update({item: module_info[item]})
try: try:
data = {} results = predict_method(**predict_args)
if input_img is not None: results = utils.handle_mask_results(results, data_len)
input_img = {"image": input_img}
data.update(input_img)
if extra is not None:
data.update(extra)
r_img = True if "r_img" in extra.keys() else False
results = predict_method(
data=data, use_gpu=use_gpu, batch_size=batch_size)
results = utils.handle_mask_results(results)
except Exception as err: except Exception as err:
curr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time())) curr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
print(curr, " - ", err) print(curr, " - ", err)
...@@ -160,43 +161,59 @@ def predict_mask(module, input_img, id, batch_size, extra=None, r_img=False): ...@@ -160,43 +161,59 @@ def predict_mask(module, input_img, id, batch_size, extra=None, r_img=False):
results_pack = [] results_pack = []
if input_img is not None: if input_img is not None:
if r_img is False: if r_img is False:
shutil.rmtree(output_folder)
for index in range(len(results)): for index in range(len(results)):
results[index]["path"] = "" results[index]["path"] = ""
results_pack = results results_pack = results
str_id = id + "*"
files_deleted = glob.glob(str_id)
for path in files_deleted:
if os.path.exists(path):
os.remove(path)
else: else:
input_img = input_img.get("image", []) input_img = input_img.get("image", [])
for index in range(len(input_img)): for index in range(len(input_img)):
item = input_img[index] item = input_img[index]
with open(os.path.join(output_folder, item), "rb") as fp: file_path = os.path.join(output_folder, item)
b_head = "data:image/" + item.split(".")[-1] + ";base64" if not os.path.exists(file_path):
b_body = base64.b64encode(fp.read())
b_body = str(b_body).replace("b'", "").replace("'", "")
b_img = b_head + "," + b_body
base64_list.append(b_img)
results[index]["path"] = results[index]["path"].replace(
id + "_", "") if results[index]["path"] != "" \
else ""
results[index].update({"base64": b_img})
results_pack.append(results[index]) results_pack.append(results[index])
os.remove(item) os.remove(item)
os.remove(os.path.join(output_folder, item)) else:
with open(file_path, "rb") as fp:
b_head = "data:image/" + item.split(
".")[-1] + ";base64"
b_body = base64.b64encode(fp.read())
b_body = str(b_body).replace("b'", "").replace(
"'", "")
b_img = b_head + "," + b_body
base64_list.append(b_img)
results[index]["path"] = results[index]["path"].replace(
id + "_", "") if results[index]["path"] != "" \
else ""
results[index].update({"base64": b_img})
results_pack.append(results[index])
os.remove(item)
os.remove(os.path.join(output_folder, item))
else: else:
results_pack = results results_pack = results
return results_pack return results_pack
def predict_object_detection(module, input_img, id, batch_size, extra={}): def predict_object_detection(module_info, input_img, id, extra={}):
output_folder = "detection_result" output_folder = "detection_result"
global use_gpu method_name = module_info["method_name"]
method_name = module.desc.attr.map.data['default_signature'].s module = module_info["module"]
predict_method = getattr(module, method_name) predict_method = getattr(module, method_name)
predict_args = {"data": {"image": input_img}}
if isinstance(predict_method, functools.partial):
predict_method = predict_method.func
predict_args.update({"sign_name": method_name})
for item in predict_method.__code__.co_varnames:
if item in module_info.keys():
predict_args.update({item: module_info[item]})
try: try:
input_img = {"image": input_img} results = predict_method(**predict_args)
results = predict_method(
data=input_img, use_gpu=use_gpu, batch_size=batch_size)
except Exception as err: except Exception as err:
curr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time())) curr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
print(curr, " - ", err) print(curr, " - ", err)
...@@ -204,7 +221,6 @@ def predict_object_detection(module, input_img, id, batch_size, extra={}): ...@@ -204,7 +221,6 @@ def predict_object_detection(module, input_img, id, batch_size, extra={}):
finally: finally:
base64_list = [] base64_list = []
results_pack = [] results_pack = []
input_img = input_img.get("image", [])
for index in range(len(input_img)): for index in range(len(input_img)):
item = input_img[index] item = input_img[index]
with open(os.path.join(output_folder, item), "rb") as fp: with open(os.path.join(output_folder, item), "rb") as fp:
...@@ -222,15 +238,19 @@ def predict_object_detection(module, input_img, id, batch_size, extra={}): ...@@ -222,15 +238,19 @@ def predict_object_detection(module, input_img, id, batch_size, extra={}):
return results_pack return results_pack
def predict_semantic_segmentation(module, input_img, id, batch_size, extra={}): def predict_semantic_segmentation(module_info, input_img, id, extra={}):
output_folder = module.name.split("_")[-1] + "_" + "output" method_name = module_info["method_name"]
global use_gpu module = module_info["module"]
method_name = module.desc.attr.map.data['default_signature'].s
predict_method = getattr(module, method_name) predict_method = getattr(module, method_name)
predict_args = {"data": {"image": input_img}}
if isinstance(predict_method, functools.partial):
predict_method = predict_method.func
predict_args.update({"sign_name": method_name})
for item in predict_method.__code__.co_varnames:
if item in module_info.keys():
predict_args.update({item: module_info[item]})
try: try:
input_img = {"image": input_img} results = predict_method(**predict_args)
results = predict_method(
data=input_img, use_gpu=use_gpu, batch_size=batch_size)
except Exception as err: except Exception as err:
curr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time())) curr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
print(curr, " - ", err) print(curr, " - ", err)
...@@ -238,11 +258,8 @@ def predict_semantic_segmentation(module, input_img, id, batch_size, extra={}): ...@@ -238,11 +258,8 @@ def predict_semantic_segmentation(module, input_img, id, batch_size, extra={}):
finally: finally:
base64_list = [] base64_list = []
results_pack = [] results_pack = []
input_img = input_img.get("image", [])
for index in range(len(input_img)): for index in range(len(input_img)):
# special
item = input_img[index] item = input_img[index]
output_file_path = ""
with open(results[index]["processed"], "rb") as fp: with open(results[index]["processed"], "rb") as fp:
b_head = "data:image/png;base64" b_head = "data:image/png;base64"
b_body = base64.b64encode(fp.read()) b_body = base64.b64encode(fp.read())
...@@ -266,13 +283,11 @@ def create_app(init_flag=False, configs=None): ...@@ -266,13 +283,11 @@ def create_app(init_flag=False, configs=None):
if init_flag is False: if init_flag is False:
if configs is None: if configs is None:
raise RuntimeError("Lack of necessary configs.") raise RuntimeError("Lack of necessary configs.")
global use_gpu, time_out config_with_file(configs)
time_out = 60
use_gpu = configs.get("use_gpu", False)
config_with_file(configs.get("modules_info", []))
app_instance = Flask(__name__) app_instance = Flask(__name__)
app_instance.config["JSON_AS_ASCII"] = False app_instance.config["JSON_AS_ASCII"] = False
logging.basicConfig()
gunicorn_logger = logging.getLogger('gunicorn.error') gunicorn_logger = logging.getLogger('gunicorn.error')
app_instance.logger.handlers = gunicorn_logger.handlers app_instance.logger.handlers = gunicorn_logger.handlers
app_instance.logger.setLevel(gunicorn_logger.level) app_instance.logger.setLevel(gunicorn_logger.level)
...@@ -287,24 +302,22 @@ def create_app(init_flag=False, configs=None): ...@@ -287,24 +302,22 @@ def create_app(init_flag=False, configs=None):
@app_instance.route("/get/modules", methods=["GET", "POST"]) @app_instance.route("/get/modules", methods=["GET", "POST"])
def get_modules_info(): def get_modules_info():
global nlp_module, cv_module
module_info = {} module_info = {}
if len(nlp_module) > 0: if len(nlp_module_info.nlp_modules) > 0:
module_info.update({"nlp_module": [{"Choose...": "Choose..."}]}) module_info.update({"nlp_module": [{"Choose...": "Choose..."}]})
for item in nlp_module: for item in nlp_module_info.nlp_modules:
module_info["nlp_module"].append({item: item}) module_info["nlp_module"].append({item: item})
if len(cv_module) > 0: if len(cv_module_info.cv_modules) > 0:
module_info.update({"cv_module": [{"Choose...": "Choose..."}]}) module_info.update({"cv_module": [{"Choose...": "Choose..."}]})
for item in cv_module: for item in cv_module_info.cv_modules:
module_info["cv_module"].append({item: item}) module_info["cv_module"].append({item: item})
return {"module_info": module_info} return {"module_info": module_info}
@app_instance.route("/predict/image/<module_name>", methods=["POST"]) @app_instance.route("/predict/image/<module_name>", methods=["POST"])
def predict_image(module_name): def predict_image(module_name):
if request.path.split("/")[-1] not in cv_module: if request.path.split("/")[-1] not in cv_module_info.modules_info:
return {"error": "Module {} is not available.".format(module_name)} return {"error": "Module {} is not available.".format(module_name)}
req_id = request.data.get("id") req_id = request.data.get("id")
global use_gpu, batch_size_dict
img_base64 = request.form.getlist("image") img_base64 = request.form.getlist("image")
extra_info = {} extra_info = {}
for item in list(request.form.keys()): for item in list(request.form.keys()):
...@@ -337,26 +350,28 @@ def create_app(init_flag=False, configs=None): ...@@ -337,26 +350,28 @@ def create_app(init_flag=False, configs=None):
file_name = req_id + "_" + item.filename file_name = req_id + "_" + item.filename
item.save(file_name) item.save(file_name)
file_name_list.append(file_name) file_name_list.append(file_name)
module = default_module_manager.get_module(module_name) # module = default_module_manager.get_module(module_name)
predict_func_name = cv_module_method.get(module_name, "") # predict_func_name = cv_module_info.get_module_info(module_name)[
# "method_name"]
module_info = cv_module_info.get_module_info(module_name)
module = module_info["module"]
predict_func_name = cv_module_info.cv_module_method.get(module_name, "")
if predict_func_name != "": if predict_func_name != "":
predict_func = eval(predict_func_name) predict_func = eval(predict_func_name)
else: else:
module_type = module.type.split("/")[-1].replace("-", "_").lower() module_type = module.type.split("/")[-1].replace("-", "_").lower()
predict_func = eval("predict_" + module_type) predict_func = eval("predict_" + module_type)
batch_size = batch_size_dict.get(module_name, 1)
if file_name_list == []: if file_name_list == []:
file_name_list = None file_name_list = None
if extra_info == {}: if extra_info == {}:
extra_info = None extra_info = None
results = predict_func(module, file_name_list, req_id, batch_size, results = predict_func(module_info, file_name_list, req_id, extra_info)
extra_info)
r = {"results": str(results)} r = {"results": str(results)}
return r return r
@app_instance.route("/predict/text/<module_name>", methods=["POST"]) @app_instance.route("/predict/text/<module_name>", methods=["POST"])
def predict_text(module_name): def predict_text(module_name):
if request.path.split("/")[-1] not in nlp_module: if request.path.split("/")[-1] not in nlp_module_info.nlp_modules:
return {"error": "Module {} is not available.".format(module_name)} return {"error": "Module {} is not available.".format(module_name)}
req_id = request.data.get("id") req_id = request.data.get("id")
inputs = {} inputs = {}
...@@ -369,37 +384,31 @@ def create_app(init_flag=False, configs=None): ...@@ -369,37 +384,31 @@ def create_app(init_flag=False, configs=None):
file_name = req_id + "_" + file.filename file_name = req_id + "_" + file.filename
files[file_key].append(file_name) files[file_key].append(file_name)
file.save(file_name) file.save(file_name)
module = default_module_manager.get_module(module_name) module_info = nlp_module_info.get_module_info(module_name)
results = predict_nlp(
module=module, if module_info["code_version"] == "v2":
input_text=inputs, results = predict_v2(module_info, inputs)
req_id=req_id, else:
batch_size=batch_size_dict.get(module_name, 1), results = predict_nlp(
extra=files) module_info=module_info,
input_text=inputs,
req_id=req_id,
extra=files)
return results return results
return app_instance return app_instance
def config_with_file(configs): def config_with_file(configs):
global nlp_module, cv_module, batch_size_dict for key, value in configs.items():
nlp_module = [] if "CV" == value["category"]:
cv_module = [] cv_module_info.add_module(key, {key: value})
batch_size_dict = {} elif "NLP" == value["category"]:
for item in configs: nlp_module_info.add_module(key, {key: value})
print(item) print(key, "==", value["version"])
if item["category"] == "CV":
cv_module.append(item["module"])
elif item["category"] == "NLP": def run(configs=None, port=8866):
nlp_module.append(item["module"])
batch_size_dict.update({item["module"]: item["batch_size"]})
default_module_manager.load_module([item["module"]])
def run(is_use_gpu=False, configs=None, port=8866, timeout=60):
global use_gpu, time_out
time_out = timeout
use_gpu = is_use_gpu
if configs is not None: if configs is not None:
config_with_file(configs) config_with_file(configs)
else: else:
......
...@@ -16,6 +16,92 @@ import six ...@@ -16,6 +16,92 @@ import six
import abc import abc
class BaseModuleInfo(object):
def __init__(self):
self._modules_info = {}
self._modules = []
def set_modules_info(self, modules_info):
# dict of modules info.
self._modules_info = modules_info
# list of modules name.
self._modules = list(self._modules_info.keys())
def get_module_info(self, module_name):
return self._modules_info[module_name]
def add_module(self, module_name, module_info):
self._modules_info.update(module_info)
self._modules.append(module_name)
def get_module(self, module_name):
return self.get_module_info(module_name).get("module", None)
@property
def modules_info(self):
return self._modules_info
class CVModuleInfo(BaseModuleInfo):
def __init__(self):
self.cv_module_method = {
"vgg19_imagenet": "predict_classification",
"vgg16_imagenet": "predict_classification",
"vgg13_imagenet": "predict_classification",
"vgg11_imagenet": "predict_classification",
"shufflenet_v2_imagenet": "predict_classification",
"se_resnext50_32x4d_imagenet": "predict_classification",
"se_resnext101_32x4d_imagenet": "predict_classification",
"resnet_v2_50_imagenet": "predict_classification",
"resnet_v2_34_imagenet": "predict_classification",
"resnet_v2_18_imagenet": "predict_classification",
"resnet_v2_152_imagenet": "predict_classification",
"resnet_v2_101_imagenet": "predict_classification",
"pnasnet_imagenet": "predict_classification",
"nasnet_imagenet": "predict_classification",
"mobilenet_v2_imagenet": "predict_classification",
"googlenet_imagenet": "predict_classification",
"alexnet_imagenet": "predict_classification",
"yolov3_coco2017": "predict_object_detection",
"ultra_light_fast_generic_face_detector_1mb_640":
"predict_object_detection",
"ultra_light_fast_generic_face_detector_1mb_320":
"predict_object_detection",
"ssd_mobilenet_v1_pascal": "predict_object_detection",
"pyramidbox_face_detection": "predict_object_detection",
"faster_rcnn_coco2017": "predict_object_detection",
"cyclegan_cityscapes": "predict_gan",
"deeplabv3p_xception65_humanseg": "predict_semantic_segmentation",
"ace2p": "predict_semantic_segmentation",
"pyramidbox_lite_server_mask": "predict_mask",
"pyramidbox_lite_mobile_mask": "predict_mask"
}
super(CVModuleInfo, self).__init__()
@property
def cv_modules(self):
return self._modules
def add_module(self, module_name, module_info):
if "CV" == module_info[module_name].get("category", ""):
self._modules_info.update(module_info)
self._modules.append(module_name)
class NLPModuleInfo(BaseModuleInfo):
def __init__(self):
super(NLPModuleInfo, self).__init__()
@property
def nlp_modules(self):
return self._modules
def add_module(self, module_name, module_info):
if "NLP" == module_info[module_name].get("category", ""):
self._modules_info.update(module_info)
self._modules.append(module_name)
class BaseModelService(object): class BaseModelService(object):
def _initialize(self): def _initialize(self):
pass pass
...@@ -31,3 +117,7 @@ class BaseModelService(object): ...@@ -31,3 +117,7 @@ class BaseModelService(object):
@abc.abstractmethod @abc.abstractmethod
def _post_processing(self, data): def _post_processing(self, data):
pass pass
cv_module_info = CVModuleInfo()
nlp_module_info = NLPModuleInfo()
{ {
"modules_info": [ "modules_info": {
{ "yolov3_darknet53_coco2017": {
"module": "lac", "init_args": {
"version": "1.0.0", "version": "1.0.0"
"batch_size": 200 },
"predict_args": {
"batch_size": 1,
"use_gpu": false
}
},
"lac-v2": {
"init_args": {
"version": "2.1.0",
"user_dict": "./dict.txt"
},
"predict_args": {
"batch_size": 1,
"use_gpu": false
}
}
}, },
{
"module": "senta_lstm",
"version": "1.0.0",
"batch_size": 1
},
{
"module": "yolov3_darknet53_coco2017",
"version": "1.0.0",
"batch_size": 1
},
{
"module": "faster_rcnn_coco2017",
"version": "1.0.0",
"batch_size": 1
}
],
"use_gpu": false,
"port": 8866, "port": 8866,
"use_multiprocess": true, "use_multiprocess": false,
"workers": 3 "workers": 2
} }
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册