serving.py 16.2 KB
Newer Older
走神的阿圆's avatar
走神的阿圆 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
# coding:utf-8
# Copyright (c) 2019  PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import argparse
import os
走神的阿圆's avatar
走神的阿圆 已提交
22 23
import platform
import socket
走神的阿圆's avatar
走神的阿圆 已提交
24 25 26
import json
import paddlehub as hub
from paddlehub.commands.base_command import BaseCommand, ENTRY
27
from paddlehub.serving import app_single as app
走神的阿圆's avatar
走神的阿圆 已提交
28 29
from paddlehub.common.dir import CONF_HOME
from paddlehub.common.hub_server import CacheUpdater
30
import multiprocessing
走神的阿圆's avatar
走神的阿圆 已提交
31 32
import time
import signal
33

走神的阿圆's avatar
走神的阿圆 已提交
34
if platform.system() == "Windows":
35

走神的阿圆's avatar
走神的阿圆 已提交
36 37 38
    class StandaloneApplication(object):
        def __init__(self):
            pass
39

走神的阿圆's avatar
走神的阿圆 已提交
40 41
        def load_config(self):
            pass
42

走神的阿圆's avatar
走神的阿圆 已提交
43 44 45 46
        def load(self):
            pass
else:
    import gunicorn.app.base
47

走神的阿圆's avatar
走神的阿圆 已提交
48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68
    class StandaloneApplication(gunicorn.app.base.BaseApplication):
        def __init__(self, app, options=None):
            self.options = options or {}
            self.application = app
            super(StandaloneApplication, self).__init__()

        def load_config(self):
            config = {
                key: value
                for key, value in self.options.items()
                if key in self.cfg.settings and value is not None
            }
            for key, value in config.items():
                self.cfg.set(key.lower(), value)

        def load(self):
            return self.application


def number_of_workers():
    return (multiprocessing.cpu_count() * 2) + 1
69

走神的阿圆's avatar
走神的阿圆 已提交
70 71 72 73 74 75 76 77

def pid_is_exist(pid):
    try:
        os.kill(pid, 0)
    except:
        return False
    else:
        return True
走神的阿圆's avatar
走神的阿圆 已提交
78 79 80 81 82 83 84 85 86


class ServingCommand(BaseCommand):
    name = "serving"
    module_list = []

    def __init__(self, name):
        super(ServingCommand, self).__init__(name)
        self.show_in_help = True
87
        self.description = "Start Module Serving or Bert Service for online predicting."
走神的阿圆's avatar
走神的阿圆 已提交
88 89 90 91 92 93
        self.parser = argparse.ArgumentParser(
            description=self.__class__.__doc__,
            prog='%s %s [COMMAND]' % (ENTRY, name),
            usage='%(prog)s',
            add_help=True)
        self.parser.add_argument("command")
走神的阿圆's avatar
走神的阿圆 已提交
94
        self.parser.add_argument("sub_command")
走神的阿圆's avatar
走神的阿圆 已提交
95
        self.parser.add_argument("bert_service", nargs="?")
走神的阿圆's avatar
走神的阿圆 已提交
96 97 98 99
        self.sub_parse = self.parser.add_mutually_exclusive_group(
            required=False)
        self.parser.add_argument(
            "--use_gpu", action="store_true", default=False)
走神的阿圆's avatar
走神的阿圆 已提交
100 101
        self.parser.add_argument(
            "--use_multiprocess", action="store_true", default=False)
走神的阿圆's avatar
走神的阿圆 已提交
102
        self.parser.add_argument("--modules", "-m", nargs="+")
走神的阿圆's avatar
走神的阿圆 已提交
103 104 105 106
        self.parser.add_argument("--config", "-c", nargs="?")
        self.parser.add_argument("--port", "-p", nargs="?", default=8866)
        self.parser.add_argument("--gpu", "-i", nargs="?", default=0)

走神的阿圆's avatar
走神的阿圆 已提交
107 108 109
    def dump_pid_file(self):
        pid = os.getpid()
        filepath = os.path.join(CONF_HOME,
110
                                "serving_" + str(self.args.port) + ".json")
走神的阿圆's avatar
走神的阿圆 已提交
111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126
        if os.path.exists(filepath):
            os.remove(filepath)
        with open(filepath, "w") as fp:
            info = {
                "pid": pid,
                "module": self.args.modules,
                "start_time": time.time()
            }
            json.dump(info, fp)

    @staticmethod
    def load_pid_file(filepath, port=None):
        if port is None:
            port = os.path.basename(filepath).split(".")[0].split("_")[1]
        if not os.path.exists(filepath):
            print(
127
                "PaddleHub Serving config file is not exists, "
走神的阿圆's avatar
走神的阿圆 已提交
128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145
                "please confirm the port [%s] you specified is correct." % port)
            return False
        with open(filepath, "r") as fp:
            info = json.load(fp)
            return info

    def stop_serving(self, port):
        filepath = os.path.join(CONF_HOME, "serving_" + str(port) + ".json")
        info = self.load_pid_file(filepath, port)
        if info is False:
            return
        pid = info["pid"]
        module = info["module"]
        start_time = info["start_time"]
        if os.path.exists(filepath):
            os.remove(filepath)

        if not pid_is_exist(pid):
146
            print("PaddleHub Serving has been stopped.")
走神的阿圆's avatar
走神的阿圆 已提交
147
            return
148
        print("PaddleHub Serving will stop.")
走神的阿圆's avatar
走神的阿圆 已提交
149 150 151 152 153 154 155 156 157 158 159
        CacheUpdater(
            "hub_serving_stop",
            module=module,
            addition={
                "period_time": time.time() - start_time
            }).start()
        if platform.system() == "Windows":
            os.kill(pid, signal.SIGTERM)
        else:
            os.killpg(pid, signal.SIGTERM)

走神的阿圆's avatar
走神的阿圆 已提交
160 161 162
    @staticmethod
    def start_bert_serving(args):
        if platform.system() != "Linux":
163
            print("Error. Bert Service only support linux.")
走神的阿圆's avatar
走神的阿圆 已提交
164 165 166 167 168 169 170 171 172
            return False

        if ServingCommand.is_port_occupied("127.0.0.1", args.port) is True:
            print("Port %s is occupied, please change it." % args.port)
            return False

        from paddle_gpu_serving.run import BertServer
        bs = BertServer(with_gpu=args.use_gpu)
        bs.with_model(model_name=args.modules[0])
M
MRXLT 已提交
173
        bs.run(gpu_index=args.gpu, port=int(args.port))
走神的阿圆's avatar
走神的阿圆 已提交
174 175 176 177 178 179 180 181 182 183

    @staticmethod
    def is_port_occupied(ip, port):
        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        try:
            s.connect((ip, int(port)))
            s.shutdown(2)
            return True
        except:
            return False
走神的阿圆's avatar
走神的阿圆 已提交
184 185 186 187

    @staticmethod
    def preinstall_modules(modules):
        configs = []
走神的阿圆's avatar
走神的阿圆 已提交
188
        module_exist = {}
走神的阿圆's avatar
走神的阿圆 已提交
189 190 191 192 193 194
        if modules is not None:
            for module in modules:
                module_name = module if "==" not in module else \
                module.split("==")[0]
                module_version = None if "==" not in module else \
                module.split("==")[1]
走神的阿圆's avatar
走神的阿圆 已提交
195 196 197 198 199
                if module_exist.get(module_name, "") != "":
                    print(module_name, "==", module_exist.get(module_name),
                          " will be ignored cause new version is specified.")
                    configs.pop()
                module_exist.update({module_name: module_version})
走神的阿圆's avatar
走神的阿圆 已提交
200
                try:
走神的阿圆's avatar
走神的阿圆 已提交
201 202 203 204
                    CacheUpdater(
                        "hub_serving_start",
                        module=module_name,
                        version=module_version).start()
走神的阿圆's avatar
走神的阿圆 已提交
205
                    m = hub.Module(name=module_name, version=module_version)
走神的阿圆's avatar
走神的阿圆 已提交
206 207 208 209
                    method_name = m.desc.attr.map.data['default_signature'].s
                    if method_name == "":
                        raise RuntimeError("{} cannot be use for "
                                           "predicting".format(module_name))
走神的阿圆's avatar
走神的阿圆 已提交
210 211 212 213 214 215
                    configs.append({
                        "module": module_name,
                        "version": m.version,
                        "category": str(m.type).split("/")[0].upper()
                    })
                except Exception as err:
216
                    print(err, ", start PaddleHub Serving unsuccessfully.")
走神的阿圆's avatar
走神的阿圆 已提交
217
                    exit(1)
走神的阿圆's avatar
走神的阿圆 已提交
218 219
            return configs

走神的阿圆's avatar
走神的阿圆 已提交
220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241
    def start_app_with_file(self, configs, workers):
        port = configs.get("port", 8866)
        if ServingCommand.is_port_occupied("127.0.0.1", port) is True:
            print("Port %s is occupied, please change it." % port)
            return False

        modules = configs.get("modules_info")
        module = [str(i["module"]) + "==" + str(i["version"]) for i in modules]
        module_info = ServingCommand.preinstall_modules(module)
        for index in range(len(module_info)):
            modules[index].update(module_info[index])
        options = {
            "bind": "0.0.0.0:%s" % port,
            "workers": workers,
            "pid": "./pid.txt"
        }

        configs["modules_info"] = modules
        self.dump_pid_file()
        StandaloneApplication(
            app.create_app(init_flag=False, configs=configs), options).run()

走神的阿圆's avatar
走神的阿圆 已提交
242
    def start_single_app_with_file(self, configs):
243 244 245 246 247 248 249 250 251 252
        use_gpu = configs.get("use_gpu", False)
        port = configs.get("port", 8866)
        if ServingCommand.is_port_occupied("127.0.0.1", port) is True:
            print("Port %s is occupied, please change it." % port)
            return False
        configs = configs.get("modules_info")
        module = [str(i["module"]) + "==" + str(i["version"]) for i in configs]
        module_info = ServingCommand.preinstall_modules(module)
        for index in range(len(module_info)):
            configs[index].update(module_info[index])
走神的阿圆's avatar
走神的阿圆 已提交
253
        self.dump_pid_file()
254 255 256 257 258 259 260 261 262
        app.run(use_gpu, configs=configs, port=port)

    @staticmethod
    def start_multi_app_with_file(configs):
        port = configs.get("port", 8866)
        if ServingCommand.is_port_occupied("127.0.0.1", port) is True:
            print("Port %s is occupied, please change it." % port)
            return False
        workers = configs.get("workers", number_of_workers())
263
        options = {"bind": "0.0.0.0:%s" % port, "workers": workers}
264 265
        StandaloneApplication(
            app.create_app(init_flag=False, configs=configs), options).run()
266
        print("PaddleHub Serving has been stopped.")
267

走神的阿圆's avatar
走神的阿圆 已提交
268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291
    def start_app_with_args(self, workers):
        module = self.args.modules
        if module is not None:
            use_gpu = self.args.use_gpu
            port = self.args.port
            if ServingCommand.is_port_occupied("127.0.0.1", port) is True:
                print("Port %s is occupied, please change it." % port)
                return False
            module_info = ServingCommand.preinstall_modules(module)
            [
                item.update({
                    "batch_size": 1,
                    "queue_size": 20
                }) for item in module_info
            ]
            options = {"bind": "0.0.0.0:%s" % port, "workers": workers}
            configs = {"use_gpu": use_gpu, "modules_info": module_info}
            self.dump_pid_file()
            StandaloneApplication(
                app.create_app(init_flag=False, configs=configs),
                options).run()
        else:
            print("Lack of necessary parameters!")

292 293 294 295 296 297 298 299 300 301 302 303 304 305 306
    def start_single_app_with_args(self):
        module = self.args.modules
        if module is not None:
            use_gpu = self.args.use_gpu
            port = self.args.port
            if ServingCommand.is_port_occupied("127.0.0.1", port) is True:
                print("Port %s is occupied, please change it." % port)
                return False
            module_info = ServingCommand.preinstall_modules(module)
            [
                item.update({
                    "batch_size": 1,
                    "queue_size": 20
                }) for item in module_info
            ]
走神的阿圆's avatar
走神的阿圆 已提交
307
            self.dump_pid_file()
308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327
            app.run(use_gpu, configs=module_info, port=port)
        else:
            print("Lack of necessary parameters!")

    def start_multi_app_with_args(self):
        module = self.args.modules
        if module is not None:
            use_gpu = self.args.use_gpu
            port = self.args.port
            workers = number_of_workers()
            if ServingCommand.is_port_occupied("127.0.0.1", port) is True:
                print("Port %s is occupied, please change it." % port)
                return False
            module_info = ServingCommand.preinstall_modules(module)
            [
                item.update({
                    "batch_size": 1,
                    "queue_size": 20
                }) for item in module_info
            ]
328
            options = {"bind": "0.0.0.0:%s" % port, "workers": workers}
329 330 331 332
            configs = {"use_gpu": use_gpu, "modules_info": module_info}
            StandaloneApplication(
                app.create_app(init_flag=False, configs=configs),
                options).run()
333
            print("PaddleHub Serving has been stopped.")
334 335 336 337 338
        else:
            print("Lack of necessary parameters!")

    def start_serving(self):
        config_file = self.args.config
走神的阿圆's avatar
走神的阿圆 已提交
339 340 341 342
        if config_file is not None:
            if os.path.exists(config_file):
                with open(config_file, "r") as fp:
                    configs = json.load(fp)
走神的阿圆's avatar
走神的阿圆 已提交
343
                    use_multiprocess = configs.get("use_multiprocess", False)
走神的阿圆's avatar
走神的阿圆 已提交
344 345 346
                    if platform.system() == "Windows":
                        print(
                            "Warning: Windows cannot use multiprocess working "
347
                            "mode, PaddleHub Serving will switch to single process mode"
走神的阿圆's avatar
走神的阿圆 已提交
348
                        )
349
                        ServingCommand.start_single_app_with_file(configs)
走神的阿圆's avatar
走神的阿圆 已提交
350 351 352 353 354 355 356
                    else:
                        if use_multiprocess is True:
                            self.start_app_with_file(configs,
                                                     number_of_workers())
                        else:
                            self.start_app_with_file(configs, 1)

走神的阿圆's avatar
走神的阿圆 已提交
357 358 359
            else:
                print("config_file ", config_file, "not exists.")
        else:
走神的阿圆's avatar
走神的阿圆 已提交
360
            if platform.system() == "Windows":
361 362 363 364
                print(
                    "Warning: Windows cannot use multiprocess working "
                    "mode, PaddleHub Serving will switch to single process mode"
                )
365
                self.start_single_app_with_args()
走神的阿圆's avatar
走神的阿圆 已提交
366 367 368 369 370
            else:
                if self.args.use_multiprocess is True:
                    self.start_app_with_args(number_of_workers())
                else:
                    self.start_app_with_args(1)
走神的阿圆's avatar
走神的阿圆 已提交
371 372 373 374

    @staticmethod
    def show_help():
        str = "serving <option>\n"
375
        str += "\tManage PaddleHub Serving.\n"
走神的阿圆's avatar
走神的阿圆 已提交
376
        str += "sub command:\n"
走神的阿圆's avatar
走神的阿圆 已提交
377
        str += "1. start\n"
378 379 380 381 382
        str += "\tStart PaddleHub Serving.\n"
        str += "2. stop\n"
        str += "\tStop PaddleHub Serving.\n"
        str += "3. start bert_service\n"
        str += "\tStart Bert Service.\n"
383
        str += "\n"
走神的阿圆's avatar
走神的阿圆 已提交
384
        str += "[start] option:\n"
走神的阿圆's avatar
走神的阿圆 已提交
385
        str += "--modules/-m [module1==version, module2==version...]\n"
386
        str += "\tPre-install modules via the parameter list.\n"
走神的阿圆's avatar
走神的阿圆 已提交
387 388 389
        str += "--port/-p XXXX\n"
        str += "\tUse port XXXX for serving.\n"
        str += "--use_gpu\n"
390
        str += "\tUse gpu for predicting if you specify the parameter.\n"
走神的阿圆's avatar
走神的阿圆 已提交
391 392
        str += "--use_multiprocess\n"
        str += "\tChoose multoprocess mode, cannot be use on Windows.\n"
走神的阿圆's avatar
走神的阿圆 已提交
393
        str += "--config/-c file_path\n"
394 395 396 397 398 399 400
        str += "\tUse configs in file to start PaddleHub Serving. "
        str += "Other parameters will be ignored if you specify the parameter.\n"
        str += "\n"
        str += "[stop] option:\n"
        str += "--port/-p XXXX\n"
        str += "\tStop PaddleHub Serving on port XXXX safely.\n"
        str += "\n"
走神的阿圆's avatar
走神的阿圆 已提交
401 402
        str += "[start bert_service] option:\n"
        str += "--modules/-m\n"
403
        str += "\tPre-install modules via the parameter.\n"
走神的阿圆's avatar
走神的阿圆 已提交
404 405 406
        str += "--port/-p XXXX\n"
        str += "\tUse port XXXX for serving.\n"
        str += "--use_gpu\n"
407
        str += "\tUse gpu for predicting if specifies the parameter.\n"
走神的阿圆's avatar
走神的阿圆 已提交
408
        str += "--gpu\n"
409
        str += "\tSpecify the GPU devices to use.\n"
走神的阿圆's avatar
走神的阿圆 已提交
410 411 412
        print(str)

    def execute(self, argv):
走神的阿圆's avatar
走神的阿圆 已提交
413
        try:
414
            self.args = self.parser.parse_args()
走神的阿圆's avatar
走神的阿圆 已提交
415 416 417
        except:
            ServingCommand.show_help()
            return False
418 419 420 421 422
        if self.args.sub_command == "start":
            if self.args.bert_service == "bert_service":
                ServingCommand.start_bert_serving(self.args)
            elif self.args.bert_service is None:
                self.start_serving()
423 424
            else:
                ServingCommand.show_help()
走神的阿圆's avatar
走神的阿圆 已提交
425 426
        elif self.args.sub_command == "stop":
            if self.args.bert_service == "bert_service":
427
                print("Please stop Bert Service by kill process by yourself")
走神的阿圆's avatar
走神的阿圆 已提交
428 429
            elif self.args.bert_service is None:
                self.stop_serving(port=self.args.port)
走神的阿圆's avatar
走神的阿圆 已提交
430 431 432 433 434
        else:
            ServingCommand.show_help()


command = ServingCommand.instance()