resource.cpp 10.9 KB
Newer Older
W
wangguibao 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

G
guru4elephant 已提交
15
#include "core/predictor/framework/resource.h"
G
guru4elephant 已提交
16
#include <sstream>
W
wangguibao 已提交
17
#include <string>
G
guru4elephant 已提交
18 19
#include "core/predictor/common/inner_common.h"
#include "core/predictor/framework/kv_manager.h"
W
wangguibao 已提交
20 21 22 23
namespace baidu {
namespace paddle_serving {
namespace predictor {

W
wangguibao 已提交
24
using configure::ResourceConf;
G
guru4elephant 已提交
25
using configure::GeneralModelConfig;
G
guru4elephant 已提交
26
using configure::Shape;
X
xulongteng 已提交
27
using rec::mcube::CubeAPI;
W
wangguibao 已提交
28 29 30
// __thread bool p_thread_initialized = false;

static void dynamic_resource_deleter(void* d) {
W
sdk-cpp  
wangguibao 已提交
31
#if 1
W
wangguibao 已提交
32
  LOG(INFO) << "dynamic_resource_delete on " << bthread_self();
W
sdk-cpp  
wangguibao 已提交
33
#endif
W
wangguibao 已提交
34
  delete static_cast<DynamicResource*>(d);
W
wangguibao 已提交
35 36 37 38 39 40
}

DynamicResource::DynamicResource() {}

DynamicResource::~DynamicResource() {}

41
int DynamicResource::initialize() { return 0; }
W
wangjiawei04 已提交
42

43
std::shared_ptr<RocksDBWrapper> Resource::getDB() { return db; }
W
wangguibao 已提交
44

G
guru4elephant 已提交
45 46 47 48 49
std::shared_ptr<PaddleGeneralModelConfig> Resource::get_general_model_config() {
  return _config;
}

void Resource::print_general_model_config(
50
    const std::shared_ptr<PaddleGeneralModelConfig>& config) {
G
guru4elephant 已提交
51 52 53 54
  if (config == nullptr) {
    LOG(INFO) << "paddle general model config is not set";
    return;
  }
55
  LOG(INFO) << "Number of Feed Tensor: " << config->_feed_name.size();
G
guru4elephant 已提交
56
  std::ostringstream oss;
57 58 59 60 61 62 63
  LOG(INFO) << "Feed Name Info";
  for (auto& feed_name : config->_feed_name) {
    oss << feed_name << " ";
  }
  LOG(INFO) << oss.str();
  oss.clear();
  oss.str("");
G
guru4elephant 已提交
64
  LOG(INFO) << "Feed Type Info";
65
  for (auto& feed_type : config->_feed_type) {
G
guru4elephant 已提交
66 67 68 69 70 71 72 73 74 75 76 77 78 79 80
    oss << feed_type << " ";
  }
  LOG(INFO) << oss.str();
  oss.clear();
  oss.str("");
  LOG(INFO) << "Lod Type Info";

  for (auto is_lod : config->_is_lod_feed) {
    oss << is_lod << " ";
  }

  LOG(INFO) << oss.str();
  oss.clear();
  oss.str("");
  LOG(INFO) << "Capacity Info";
81
  for (auto& cap : config->_capacity) {
G
guru4elephant 已提交
82 83 84 85 86 87 88
    oss << cap << " ";
  }
  LOG(INFO) << oss.str();
  oss.clear();
  oss.str("");
  LOG(INFO) << "Feed Shape Info";
  int tensor_idx = 0;
89 90
  for (auto& shape : config->_feed_shape) {
    for (auto& dim : shape) {
G
guru4elephant 已提交
91 92 93 94 95 96 97 98
      oss << dim << " ";
    }
    LOG(INFO) << "Tensor[" << tensor_idx++ << "].shape: " << oss.str();
    oss.clear();
    oss.str("");
  }
}

W
wangguibao 已提交
99
int DynamicResource::clear() { return 0; }
W
wangguibao 已提交
100 101

int Resource::initialize(const std::string& path, const std::string& file) {
W
wangguibao 已提交
102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121
  ResourceConf resource_conf;
  if (configure::read_proto_conf(path, file, &resource_conf) != 0) {
    LOG(ERROR) << "Failed initialize resource from: " << path << "/" << file;
    return -1;
  }

  // mempool
  if (MempoolWrapper::instance().initialize() != 0) {
    LOG(ERROR) << "Failed proc initialized mempool wrapper";
    return -1;
  }
  LOG(WARNING) << "Successfully proc initialized mempool wrapper";

  if (FLAGS_enable_model_toolkit) {
    int err = 0;
    std::string model_toolkit_path = resource_conf.model_toolkit_path();
    if (err != 0) {
      LOG(ERROR) << "read model_toolkit_path failed, path[" << path
                 << "], file[" << file << "]";
      return -1;
W
wangguibao 已提交
122
    }
W
wangguibao 已提交
123 124 125 126 127
    std::string model_toolkit_file = resource_conf.model_toolkit_file();
    if (err != 0) {
      LOG(ERROR) << "read model_toolkit_file failed, path[" << path
                 << "], file[" << file << "]";
      return -1;
W
wangguibao 已提交
128
    }
W
wangguibao 已提交
129 130 131 132 133
    if (InferManager::instance().proc_initialize(
            model_toolkit_path.c_str(), model_toolkit_file.c_str()) != 0) {
      LOG(ERROR) << "failed proc initialize modeltoolkit, config: "
                 << model_toolkit_path << "/" << model_toolkit_file;
      return -1;
W
wangguibao 已提交
134
    }
W
wangguibao 已提交
135 136 137 138 139 140

    if (KVManager::instance().proc_initialize(
            model_toolkit_path.c_str(), model_toolkit_file.c_str()) != 0) {
      LOG(ERROR) << "Failed proc initialize kvmanager, config: "
                 << model_toolkit_path << "/" << model_toolkit_file;
    }
W
wangguibao 已提交
141 142 143 144 145 146
  }

  if (THREAD_KEY_CREATE(&_tls_bspec_key, dynamic_resource_deleter) != 0) {
    LOG(ERROR) << "unable to create tls_bthread_key of thrd_data";
    return -1;
  }
147
  // init rocksDB instance
W
wangjiawei04 已提交
148 149 150 151
  if (db.get() == nullptr) {
    db = RocksDBWrapper::RocksDBWrapperFactory("kvdb");
  }

W
wangguibao 已提交
152 153
  THREAD_SETSPECIFIC(_tls_bspec_key, NULL);
  return 0;
W
wangguibao 已提交
154 155
}

156 157 158
// model config
int Resource::general_model_initialize(const std::string& path,
                                       const std::string& file) {
G
guru4elephant 已提交
159 160 161
  if (!FLAGS_enable_general_model) {
    return 0;
  }
162 163 164 165 166 167 168 169 170 171 172 173 174
  ResourceConf resource_conf;
  if (configure::read_proto_conf(path, file, &resource_conf) != 0) {
    LOG(ERROR) << "Failed initialize resource from: " << path << "/" << file;
    return -1;
  }
  int err = 0;
  std::string general_model_path = resource_conf.general_model_path();
  std::string general_model_file = resource_conf.general_model_file();
  if (err != 0) {
    LOG(ERROR) << "read general_model_path failed, path[" << path << "], file["
               << file << "]";
    return -1;
  }
G
guru4elephant 已提交
175

G
guru4elephant 已提交
176
  GeneralModelConfig model_config;
177 178 179 180 181
  if (configure::read_proto_conf(general_model_path.c_str(),
                                 general_model_file.c_str(),
                                 &model_config) != 0) {
    LOG(ERROR) << "Failed initialize model config from: " << general_model_path
               << "/" << general_model_file;
G
guru4elephant 已提交
182 183 184 185
    return -1;
  }

  _config.reset(new PaddleGeneralModelConfig());
186 187 188 189 190 191 192 193 194 195
  int feed_var_num = model_config.feed_var_size();
  _config->_feed_name.resize(feed_var_num);
  _config->_feed_type.resize(feed_var_num);
  _config->_is_lod_feed.resize(feed_var_num);
  _config->_capacity.resize(feed_var_num);
  _config->_feed_shape.resize(feed_var_num);
  for (int i = 0; i < feed_var_num; ++i) {
    _config->_feed_name[i] = model_config.feed_var(i).name();
    _config->_feed_type[i] = model_config.feed_var(i).feed_type();
    if (model_config.feed_var(i).is_lod_tensor()) {
G
guru4elephant 已提交
196 197 198
      _config->_feed_shape[i] = {-1};
      _config->_is_lod_feed[i] = true;
    } else {
G
guru4elephant 已提交
199
      _config->_capacity[i] = 1;
G
guru4elephant 已提交
200
      _config->_is_lod_feed[i] = false;
201 202 203
      for (int j = 0; j < model_config.feed_var(i).feed_shape().shape_size();
           ++j) {
        int32_t dim = model_config.feed_var(i).feed_shape().shape(j);
G
guru4elephant 已提交
204
        _config->_feed_shape[i].push_back(dim);
G
guru4elephant 已提交
205
        _config->_capacity[i] *= dim;
G
guru4elephant 已提交
206 207 208
      }
    }
  }
209 210 211 212 213 214 215 216 217 218 219 220

  int fetch_var_num = model_config.fetch_var_size();
  _config->_fetch_name.resize(fetch_var_num);
  _config->_fetch_shape.resize(fetch_var_num);
  for (int i = 0; i < fetch_var_num; ++i) {
    _config->_fetch_name[i] = model_config.fetch_var(i).name();
    for (int j = 0; j < model_config.fetch_var(i).fetch_shape().shape_size();
         ++j) {
      int dim = model_config.fetch_var(i).fetch_shape().shape(j);
      _config->_fetch_shape[i].push_back(dim);
    }
  }
G
guru4elephant 已提交
221
  return 0;
G
guru4elephant 已提交
222 223
}

X
xulongteng 已提交
224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243
int Resource::cube_initialize(const std::string& path,
                              const std::string& file) {
  // cube
  if (!FLAGS_enable_cube) {
    return 0;
  }

  ResourceConf resource_conf;
  if (configure::read_proto_conf(path, file, &resource_conf) != 0) {
    LOG(ERROR) << "Failed initialize resource from: " << path << "/" << file;
    return -1;
  }

  int err = 0;
  std::string cube_config_file = resource_conf.cube_config_file();
  if (err != 0) {
    LOG(ERROR) << "reade cube_config_file failed, path[" << path << "], file["
               << cube_config_file << "]";
    return -1;
  }
X
xulongteng 已提交
244
  err = CubeAPI::instance()->init(cube_config_file.c_str());
X
xulongteng 已提交
245
  if (err != 0) {
246 247
    LOG(ERROR) << "failed initialize cube, config: " << cube_config_file
               << " error code : " << err;
X
xulongteng 已提交
248 249 250 251 252 253 254 255
    return -1;
  }

  LOG(INFO) << "Successfully initialize cube";

  return 0;
}

W
wangguibao 已提交
256
int Resource::thread_initialize() {
W
wangguibao 已提交
257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277
  // mempool
  if (MempoolWrapper::instance().thread_initialize() != 0) {
    LOG(ERROR) << "Failed thread initialized mempool wrapper";
    return -1;
  }
  LOG(WARNING) << "Successfully thread initialized mempool wrapper";

  // infer manager
  if (FLAGS_enable_model_toolkit &&
      InferManager::instance().thrd_initialize() != 0) {
    LOG(ERROR) << "Failed thrd initialized infer manager";
    return -1;
  }

  DynamicResource* p_dynamic_resource =
      reinterpret_cast<DynamicResource*>(THREAD_GETSPECIFIC(_tls_bspec_key));
  if (p_dynamic_resource == NULL) {
    p_dynamic_resource = new (std::nothrow) DynamicResource;
    if (p_dynamic_resource == NULL) {
      LOG(ERROR) << "failed to create tls DynamicResource";
      return -1;
W
wangguibao 已提交
278
    }
W
wangguibao 已提交
279 280 281 282 283
    if (p_dynamic_resource->initialize() != 0) {
      LOG(ERROR) << "DynamicResource initialize failed.";
      delete p_dynamic_resource;
      p_dynamic_resource = NULL;
      return -1;
W
wangguibao 已提交
284 285
    }

W
wangguibao 已提交
286 287 288 289 290
    if (THREAD_SETSPECIFIC(_tls_bspec_key, p_dynamic_resource) != 0) {
      LOG(ERROR) << "unable to set tls DynamicResource";
      delete p_dynamic_resource;
      p_dynamic_resource = NULL;
      return -1;
W
wangguibao 已提交
291
    }
W
wangguibao 已提交
292
  }
W
sdk-cpp  
wangguibao 已提交
293
#if 0
W
wangguibao 已提交
294
    LOG(INFO) << "Successfully thread initialized dynamic resource";
W
sdk-cpp  
wangguibao 已提交
295
#else
W
wangguibao 已提交
296 297 298
  LOG(INFO) << bthread_self()
            << ": Successfully thread initialized dynamic resource "
            << p_dynamic_resource;
W
wangguibao 已提交
299

W
sdk-cpp  
wangguibao 已提交
300
#endif
W
wangguibao 已提交
301
  return 0;
W
wangguibao 已提交
302 303 304
}

int Resource::thread_clear() {
W
wangguibao 已提交
305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320
  // mempool
  if (MempoolWrapper::instance().thread_clear() != 0) {
    LOG(ERROR) << "Failed thread clear mempool wrapper";
    return -1;
  }

  // infer manager
  if (FLAGS_enable_model_toolkit &&
      InferManager::instance().thrd_clear() != 0) {
    LOG(ERROR) << "Failed thrd clear infer manager";
    return -1;
  }

  DynamicResource* p_dynamic_resource =
      reinterpret_cast<DynamicResource*>(THREAD_GETSPECIFIC(_tls_bspec_key));
  if (p_dynamic_resource == NULL) {
W
sdk-cpp  
wangguibao 已提交
321
#if 0
W
wangguibao 已提交
322 323
    LOG(ERROR) << "tls dynamic resource shouldn't be null after "
        << "thread_initialize";
W
sdk-cpp  
wangguibao 已提交
324
#else
W
wangguibao 已提交
325 326 327
    LOG(ERROR)
        << bthread_self()
        << ": tls dynamic resource shouldn't be null after thread_initialize";
W
sdk-cpp  
wangguibao 已提交
328
#endif
W
wangguibao 已提交
329 330 331 332 333 334 335 336 337
    return -1;
  }
  if (p_dynamic_resource->clear() != 0) {
    LOG(ERROR) << "Failed to invoke dynamic resource clear";
    return -1;
  }

  // ...
  return 0;
W
wangguibao 已提交
338 339 340
}

int Resource::reload() {
W
wangguibao 已提交
341 342 343 344 345 346 347
  if (FLAGS_enable_model_toolkit && InferManager::instance().reload() != 0) {
    LOG(ERROR) << "Failed reload infer manager";
    return -1;
  }

  // other resource reload here...
  return 0;
W
wangguibao 已提交
348 349 350
}

int Resource::finalize() {
W
wangguibao 已提交
351 352 353 354 355
  if (FLAGS_enable_model_toolkit &&
      InferManager::instance().proc_finalize() != 0) {
    LOG(ERROR) << "Failed proc finalize infer manager";
    return -1;
  }
X
xulongteng 已提交
356 357 358 359
  if (CubeAPI::instance()->destroy() != 0) {
    LOG(ERROR) << "Destory cube api failed ";
    return -1;
  }
W
wangguibao 已提交
360
  THREAD_KEY_DELETE(_tls_bspec_key);
W
wangguibao 已提交
361

W
wangguibao 已提交
362
  return 0;
W
wangguibao 已提交
363 364
}

W
wangguibao 已提交
365 366 367
}  // namespace predictor
}  // namespace paddle_serving
}  // namespace baidu