pdserving.cpp 6.0 KB
Newer Older
W
wangguibao 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

W
serving  
wangguibao 已提交
15
#include <sys/stat.h>
W
wangguibao 已提交
16
#include <sys/types.h>
W
serving  
wangguibao 已提交
17 18
#include <unistd.h>

W
wangguibao 已提交
19
#include <bthread/unstable.h>  // bthread_set_worker_startfn
W
wangguibao 已提交
20
#include <fstream>
W
wangguibao 已提交
21 22
#include <iostream>
#include "butil/logging.h"
W
wangguibao 已提交
23 24 25 26 27 28 29
#include "predictor/common/constant.h"
#include "predictor/common/inner_common.h"
#include "predictor/framework/manager.h"
#include "predictor/framework/resource.h"
#include "predictor/framework/server.h"
#include "predictor/framework/service.h"
#include "predictor/framework/workflow.h"
W
wangguibao 已提交
30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45

using baidu::paddle_serving::predictor::ServerManager;
using baidu::paddle_serving::predictor::WorkflowManager;
using baidu::paddle_serving::predictor::InferServiceManager;
using baidu::paddle_serving::predictor::Resource;
using baidu::paddle_serving::predictor::FLAGS_workflow_path;
using baidu::paddle_serving::predictor::FLAGS_workflow_file;
using baidu::paddle_serving::predictor::FLAGS_inferservice_path;
using baidu::paddle_serving::predictor::FLAGS_inferservice_file;
using baidu::paddle_serving::predictor::FLAGS_logger_path;
using baidu::paddle_serving::predictor::FLAGS_logger_file;
using baidu::paddle_serving::predictor::FLAGS_resource_path;
using baidu::paddle_serving::predictor::FLAGS_resource_file;
using baidu::paddle_serving::predictor::FLAGS_reload_interval_s;
using baidu::paddle_serving::predictor::FLAGS_port;

W
wangguibao 已提交
46 47 48
using baidu::paddle_serving::configure::InferServiceConf;
using baidu::paddle_serving::configure::read_proto_conf;

W
wangguibao 已提交
49 50
void print_revision(std::ostream& os, void*) {
#if defined(PDSERVING_VERSION)
W
wangguibao 已提交
51
  os << PDSERVING_VERSION;
W
wangguibao 已提交
52
#else
W
wangguibao 已提交
53
  os << "undefined";
W
wangguibao 已提交
54 55
#endif
#if defined(PDSERVING_BUILDTIME)
W
wangguibao 已提交
56
  os << ", BuildAt: " << PDSERVING_BUILDTIME;
W
wangguibao 已提交
57 58 59 60
#endif
}

static bvar::PassiveStatus<std::string> s_predictor_revision(
W
wangguibao 已提交
61
    "predictor_revision", print_revision, NULL);
W
wangguibao 已提交
62

W
wangguibao 已提交
63
DEFINE_bool(V, false, "print version, bool");
W
wangguibao 已提交
64 65 66
DEFINE_bool(g, false, "user defined gflag path");
DECLARE_string(flagfile);

W
wangguibao 已提交
67
void pthread_worker_start_fn() { Resource::instance().thread_initialize(); }
W
wangguibao 已提交
68 69

static void g_change_server_port() {
W
wangguibao 已提交
70 71 72 73 74 75
  InferServiceConf conf;
  if (read_proto_conf(FLAGS_inferservice_path.c_str(),
                      FLAGS_inferservice_file.c_str(),
                      &conf) != 0) {
    LOG(WARNING) << "failed to load configure[" << FLAGS_inferservice_path
                 << "," << FLAGS_inferservice_file << "].";
W
wangguibao 已提交
76
    return;
W
wangguibao 已提交
77 78 79 80 81 82 83 84 85
  }
  uint32_t port = conf.port();
  if (port != 0) {
    FLAGS_port = port;
    LOG(INFO) << "use configure[" << FLAGS_inferservice_path << "/"
              << FLAGS_inferservice_file << "] port[" << port
              << "] instead of flags";
  }
  return;
W
wangguibao 已提交
86 87 88 89
}

#ifdef UNIT_TEST
int ut_main(int argc, char** argv) {
W
wangguibao 已提交
90
#else
W
wangguibao 已提交
91 92
int main(int argc, char** argv) {
#endif
W
wangguibao 已提交
93
  google::ParseCommandLineFlags(&argc, &argv, true);
W
wangguibao 已提交
94

W
wangguibao 已提交
95 96 97 98 99
  if (FLAGS_V) {
    print_revision(std::cout, NULL);
    std::cout << std::flush;
    return 0;
  }
W
wangguibao 已提交
100

W
wangguibao 已提交
101 102 103
  if (!FLAGS_g) {
    google::SetCommandLineOption("flagfile", "conf/gflags.conf");
  }
W
wangguibao 已提交
104

W
wangguibao 已提交
105
  google::ParseCommandLineFlags(&argc, &argv, true);
W
wangguibao 已提交
106

W
wangguibao 已提交
107
  g_change_server_port();
W
wangguibao 已提交
108

W
wangguibao 已提交
109
  // initialize logger instance
W
wangguibao 已提交
110 111 112
  if (FLAGS_log_dir == "") {
    FLAGS_log_dir = "./log";
  }
W
wangguibao 已提交
113

W
wangguibao 已提交
114 115
  struct stat st_buf;
  int ret = 0;
W
wangguibao 已提交
116 117 118
  if ((ret = stat(FLAGS_log_dir.c_str(), &st_buf)) != 0) {
    mkdir(FLAGS_log_dir.c_str(), 0777);
    ret = stat(FLAGS_log_dir.c_str(), &st_buf);
W
wangguibao 已提交
119
    if (ret != 0) {
W
wangguibao 已提交
120 121
      LOG(WARNING) << "Log path " << FLAGS_log_dir
                   << " not exist, and create fail";
W
wangguibao 已提交
122
      return -1;
W
wangguibao 已提交
123
    }
W
wangguibao 已提交
124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184
  }
  google::InitGoogleLogging(strdup(argv[0]));

  LOG(INFO) << "Succ initialize logger";

  // initialize resource manager
  if (Resource::instance().initialize(FLAGS_resource_path,
                                      FLAGS_resource_file) != 0) {
    LOG(ERROR) << "Failed initialize resource, conf:" << FLAGS_resource_path
               << "/" << FLAGS_resource_file;
    return -1;
  }
  LOG(INFO) << "Succ initialize resource";

  // initialize workflow manager
  if (WorkflowManager::instance().initialize(FLAGS_workflow_path,
                                             FLAGS_workflow_file) != 0) {
    LOG(ERROR) << "Failed initialize workflow manager, conf:"
               << FLAGS_workflow_path << "/" << FLAGS_workflow_file;
    return -1;
  }
  LOG(INFO) << "Succ initialize workflow";

  // initialize service manager
  if (InferServiceManager::instance().initialize(
          FLAGS_inferservice_path, FLAGS_inferservice_file) != 0) {
    LOG(ERROR) << "Failed initialize infer service manager, conf:"
               << FLAGS_inferservice_path << "/" << FLAGS_inferservice_file;
    return -1;
  }
  LOG(INFO) << "Succ initialize inferservice";

  int errcode = bthread_set_worker_startfn(pthread_worker_start_fn);
  if (errcode != 0) {
    LOG(ERROR) << "Failed call pthread worker start function, error_code["
               << errcode << "]";
    return -1;
  }
  LOG(INFO) << "Succ call pthread worker start function";

  if (ServerManager::instance().start_and_wait() != 0) {
    LOG(ERROR) << "Failed start server and wait!";
    return -1;
  }
  LOG(INFO) << "Succ start service manager";

  if (InferServiceManager::instance().finalize() != 0) {
    LOG(ERROR) << "Failed finalize infer service manager.";
  }

  if (WorkflowManager::instance().finalize() != 0) {
    LOG(ERROR) << "Failed finalize workflow manager";
  }

  if (Resource::instance().finalize() != 0) {
    LOG(ERROR) << "Failed finalize resource manager";
  }

  google::ShutdownGoogleLogging();
  LOG(INFO) << "Paddle Inference Server exit successfully!";
  return 0;
W
wangguibao 已提交
185
}