resource.cpp 12.9 KB
Newer Older
W
wangguibao 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

G
guru4elephant 已提交
15
#include "core/predictor/framework/resource.h"
G
guru4elephant 已提交
16
#include <sstream>
W
wangguibao 已提交
17
#include <string>
G
guru4elephant 已提交
18 19
#include "core/predictor/common/inner_common.h"
#include "core/predictor/framework/kv_manager.h"
W
wangguibao 已提交
20 21 22 23
namespace baidu {
namespace paddle_serving {
namespace predictor {

W
wangguibao 已提交
24
using configure::ResourceConf;
G
guru4elephant 已提交
25
using configure::GeneralModelConfig;
X
xulongteng 已提交
26
using rec::mcube::CubeAPI;
W
wangguibao 已提交
27 28 29
// __thread bool p_thread_initialized = false;

static void dynamic_resource_deleter(void* d) {
W
sdk-cpp  
wangguibao 已提交
30
#if 1
W
wangguibao 已提交
31
  LOG(INFO) << "dynamic_resource_delete on " << bthread_self();
W
sdk-cpp  
wangguibao 已提交
32
#endif
W
wangguibao 已提交
33
  delete static_cast<DynamicResource*>(d);
W
wangguibao 已提交
34 35 36 37 38 39
}

DynamicResource::DynamicResource() {}

DynamicResource::~DynamicResource() {}

40
int DynamicResource::initialize() { return 0; }
W
wangjiawei04 已提交
41

42
std::shared_ptr<RocksDBWrapper> Resource::getDB() { return db; }
W
wangguibao 已提交
43

G
guru4elephant 已提交
44 45 46 47 48
std::shared_ptr<PaddleGeneralModelConfig> Resource::get_general_model_config() {
  return _config;
}

void Resource::print_general_model_config(
49
    const std::shared_ptr<PaddleGeneralModelConfig>& config) {
G
guru4elephant 已提交
50 51 52 53
  if (config == nullptr) {
    LOG(INFO) << "paddle general model config is not set";
    return;
  }
54
  LOG(INFO) << "Number of Feed Tensor: " << config->_feed_name.size();
G
guru4elephant 已提交
55
  std::ostringstream oss;
56 57 58 59 60 61 62
  LOG(INFO) << "Feed Name Info";
  for (auto& feed_name : config->_feed_name) {
    oss << feed_name << " ";
  }
  LOG(INFO) << oss.str();
  oss.clear();
  oss.str("");
G
guru4elephant 已提交
63
  LOG(INFO) << "Feed Type Info";
64
  for (auto& feed_type : config->_feed_type) {
G
guru4elephant 已提交
65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
    oss << feed_type << " ";
  }
  LOG(INFO) << oss.str();
  oss.clear();
  oss.str("");
  LOG(INFO) << "Lod Type Info";

  for (auto is_lod : config->_is_lod_feed) {
    oss << is_lod << " ";
  }

  LOG(INFO) << oss.str();
  oss.clear();
  oss.str("");
  LOG(INFO) << "Capacity Info";
80
  for (auto& cap : config->_capacity) {
G
guru4elephant 已提交
81 82 83 84 85 86 87
    oss << cap << " ";
  }
  LOG(INFO) << oss.str();
  oss.clear();
  oss.str("");
  LOG(INFO) << "Feed Shape Info";
  int tensor_idx = 0;
88 89
  for (auto& shape : config->_feed_shape) {
    for (auto& dim : shape) {
G
guru4elephant 已提交
90 91 92 93 94 95 96 97
      oss << dim << " ";
    }
    LOG(INFO) << "Tensor[" << tensor_idx++ << "].shape: " << oss.str();
    oss.clear();
    oss.str("");
  }
}

W
wangguibao 已提交
98
int DynamicResource::clear() { return 0; }
W
wangguibao 已提交
99 100

int Resource::initialize(const std::string& path, const std::string& file) {
W
wangguibao 已提交
101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120
  ResourceConf resource_conf;
  if (configure::read_proto_conf(path, file, &resource_conf) != 0) {
    LOG(ERROR) << "Failed initialize resource from: " << path << "/" << file;
    return -1;
  }

  // mempool
  if (MempoolWrapper::instance().initialize() != 0) {
    LOG(ERROR) << "Failed proc initialized mempool wrapper";
    return -1;
  }
  LOG(WARNING) << "Successfully proc initialized mempool wrapper";

  if (FLAGS_enable_model_toolkit) {
    int err = 0;
    std::string model_toolkit_path = resource_conf.model_toolkit_path();
    if (err != 0) {
      LOG(ERROR) << "read model_toolkit_path failed, path[" << path
                 << "], file[" << file << "]";
      return -1;
W
wangguibao 已提交
121
    }
W
wangguibao 已提交
122 123 124 125 126
    std::string model_toolkit_file = resource_conf.model_toolkit_file();
    if (err != 0) {
      LOG(ERROR) << "read model_toolkit_file failed, path[" << path
                 << "], file[" << file << "]";
      return -1;
W
wangguibao 已提交
127
    }
W
wangguibao 已提交
128 129 130 131 132
    if (InferManager::instance().proc_initialize(
            model_toolkit_path.c_str(), model_toolkit_file.c_str()) != 0) {
      LOG(ERROR) << "failed proc initialize modeltoolkit, config: "
                 << model_toolkit_path << "/" << model_toolkit_file;
      return -1;
W
wangguibao 已提交
133
    }
W
wangguibao 已提交
134 135 136 137 138 139

    if (KVManager::instance().proc_initialize(
            model_toolkit_path.c_str(), model_toolkit_file.c_str()) != 0) {
      LOG(ERROR) << "Failed proc initialize kvmanager, config: "
                 << model_toolkit_path << "/" << model_toolkit_file;
    }
W
wangguibao 已提交
140 141 142 143 144 145
  }

  if (THREAD_KEY_CREATE(&_tls_bspec_key, dynamic_resource_deleter) != 0) {
    LOG(ERROR) << "unable to create tls_bthread_key of thrd_data";
    return -1;
  }
W
wangjiawei04 已提交
146
  // init rocksDB or cube instance
W
wangjiawei04 已提交
147 148
  if (resource_conf.has_cube_config_file() &&
      resource_conf.has_cube_config_path()) {
W
wangjiawei04 已提交
149
    LOG(INFO) << "init cube client, path[ " << resource_conf.cube_config_path()
W
wangjiawei04 已提交
150 151 152 153 154
              << " ], config file [ " << resource_conf.cube_config_file()
              << " ].";
    rec::mcube::CubeAPI* cube = rec::mcube::CubeAPI::instance();
    std::string cube_config_fullpath = "./" + resource_conf.cube_config_path() +
                                       "/" + resource_conf.cube_config_file();
W
wangjiawei04 已提交
155 156
    this->cube_config_fullpath = cube_config_fullpath;
  }
W
wangjiawei04 已提交
157

W
wangjiawei04 已提交
158 159 160 161
  if (db.get() == nullptr) {
    db = RocksDBWrapper::RocksDBWrapperFactory("kvdb");
  }

W
wangguibao 已提交
162 163
  THREAD_SETSPECIFIC(_tls_bspec_key, NULL);
  return 0;
W
wangguibao 已提交
164 165
}

166 167 168
// model config
int Resource::general_model_initialize(const std::string& path,
                                       const std::string& file) {
W
wangjiawei04 已提交
169 170 171
  // TODO: add serving dist op detection, if true, add cube instance init.
  if (this->cube_config_fullpath.size() != 0) {
    LOG(INFO) << "init cube by config file : " << this->cube_config_fullpath;
W
wangjiawei04 已提交
172
    rec::mcube::CubeAPI* cube = rec::mcube::CubeAPI::instance();
W
wangjiawei04 已提交
173 174
    cube->init(this->cube_config_fullpath.c_str());
  }
175 176
  VLOG(2) << "general model path: " << path;
  VLOG(2) << "general model file: " << file;
G
guru4elephant 已提交
177
  if (!FLAGS_enable_general_model) {
178 179
    LOG(ERROR) << "general model is not enabled";
    return -1;
G
guru4elephant 已提交
180
  }
181 182 183 184 185 186 187 188 189 190 191 192 193
  ResourceConf resource_conf;
  if (configure::read_proto_conf(path, file, &resource_conf) != 0) {
    LOG(ERROR) << "Failed initialize resource from: " << path << "/" << file;
    return -1;
  }
  int err = 0;
  std::string general_model_path = resource_conf.general_model_path();
  std::string general_model_file = resource_conf.general_model_file();
  if (err != 0) {
    LOG(ERROR) << "read general_model_path failed, path[" << path << "], file["
               << file << "]";
    return -1;
  }
G
guru4elephant 已提交
194

G
guru4elephant 已提交
195
  GeneralModelConfig model_config;
196 197 198 199 200
  if (configure::read_proto_conf(general_model_path.c_str(),
                                 general_model_file.c_str(),
                                 &model_config) != 0) {
    LOG(ERROR) << "Failed initialize model config from: " << general_model_path
               << "/" << general_model_file;
G
guru4elephant 已提交
201 202 203 204
    return -1;
  }

  _config.reset(new PaddleGeneralModelConfig());
205
  int feed_var_num = model_config.feed_var_size();
206 207
  VLOG(2) << "load general model config";
  VLOG(2) << "feed var num: " << feed_var_num;
208
  _config->_feed_name.resize(feed_var_num);
209
  _config->_feed_alias_name.resize(feed_var_num);
210 211 212 213 214 215
  _config->_feed_type.resize(feed_var_num);
  _config->_is_lod_feed.resize(feed_var_num);
  _config->_capacity.resize(feed_var_num);
  _config->_feed_shape.resize(feed_var_num);
  for (int i = 0; i < feed_var_num; ++i) {
    _config->_feed_name[i] = model_config.feed_var(i).name();
216
    _config->_feed_alias_name[i] = model_config.feed_var(i).alias_name();
W
wangjiawei04 已提交
217 218
    VLOG(2) << "feed var[" << i << "]: " << _config->_feed_name[i];
    VLOG(2) << "feed var[" << i << "]: " << _config->_feed_alias_name[i];
219
    _config->_feed_type[i] = model_config.feed_var(i).feed_type();
W
wangjiawei04 已提交
220
    VLOG(2) << "feed type[" << i << "]: " << _config->_feed_type[i];
221

222
    if (model_config.feed_var(i).is_lod_tensor()) {
223
      VLOG(2) << "var[" << i << "] is lod tensor";
G
guru4elephant 已提交
224 225 226
      _config->_feed_shape[i] = {-1};
      _config->_is_lod_feed[i] = true;
    } else {
227
      VLOG(2) << "var[" << i << "] is tensor";
G
guru4elephant 已提交
228
      _config->_capacity[i] = 1;
G
guru4elephant 已提交
229
      _config->_is_lod_feed[i] = false;
M
MRXLT 已提交
230 231
      for (int j = 0; j < model_config.feed_var(i).shape_size(); ++j) {
        int32_t dim = model_config.feed_var(i).shape(j);
232
        VLOG(2) << "var[" << i << "].shape[" << i << "]: " << dim;
G
guru4elephant 已提交
233
        _config->_feed_shape[i].push_back(dim);
G
guru4elephant 已提交
234
        _config->_capacity[i] *= dim;
G
guru4elephant 已提交
235 236 237
      }
    }
  }
238 239

  int fetch_var_num = model_config.fetch_var_size();
240
  _config->_is_lod_fetch.resize(fetch_var_num);
241
  _config->_fetch_name.resize(fetch_var_num);
242
  _config->_fetch_alias_name.resize(fetch_var_num);
243 244 245
  _config->_fetch_shape.resize(fetch_var_num);
  for (int i = 0; i < fetch_var_num; ++i) {
    _config->_fetch_name[i] = model_config.fetch_var(i).name();
246 247 248 249 250 251 252 253 254 255 256 257 258
    _config->_fetch_alias_name[i] = model_config.fetch_var(i).alias_name();
    _config->_fetch_name_to_index[_config->_fetch_name[i]] = i;
    _config->_fetch_alias_name_to_index[_config->_fetch_alias_name[i]] = i;
    if (model_config.fetch_var(i).is_lod_tensor()) {
      VLOG(2) << "fetch var[" << i << "] is lod tensor";
      _config->_fetch_shape[i] = {-1};
      _config->_is_lod_fetch[i] = true;
    } else {
      _config->_is_lod_fetch[i] = false;
      for (int j = 0; j < model_config.fetch_var(i).shape_size(); ++j) {
        int dim = model_config.fetch_var(i).shape(j);
        _config->_fetch_shape[i].push_back(dim);
      }
259 260
    }
  }
G
guru4elephant 已提交
261
  return 0;
G
guru4elephant 已提交
262 263
}

X
xulongteng 已提交
264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283
int Resource::cube_initialize(const std::string& path,
                              const std::string& file) {
  // cube
  if (!FLAGS_enable_cube) {
    return 0;
  }

  ResourceConf resource_conf;
  if (configure::read_proto_conf(path, file, &resource_conf) != 0) {
    LOG(ERROR) << "Failed initialize resource from: " << path << "/" << file;
    return -1;
  }

  int err = 0;
  std::string cube_config_file = resource_conf.cube_config_file();
  if (err != 0) {
    LOG(ERROR) << "reade cube_config_file failed, path[" << path << "], file["
               << cube_config_file << "]";
    return -1;
  }
X
xulongteng 已提交
284
  err = CubeAPI::instance()->init(cube_config_file.c_str());
X
xulongteng 已提交
285
  if (err != 0) {
286 287
    LOG(ERROR) << "failed initialize cube, config: " << cube_config_file
               << " error code : " << err;
X
xulongteng 已提交
288 289 290 291 292 293 294 295
    return -1;
  }

  LOG(INFO) << "Successfully initialize cube";

  return 0;
}

W
wangguibao 已提交
296
int Resource::thread_initialize() {
W
wangguibao 已提交
297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317
  // mempool
  if (MempoolWrapper::instance().thread_initialize() != 0) {
    LOG(ERROR) << "Failed thread initialized mempool wrapper";
    return -1;
  }
  LOG(WARNING) << "Successfully thread initialized mempool wrapper";

  // infer manager
  if (FLAGS_enable_model_toolkit &&
      InferManager::instance().thrd_initialize() != 0) {
    LOG(ERROR) << "Failed thrd initialized infer manager";
    return -1;
  }

  DynamicResource* p_dynamic_resource =
      reinterpret_cast<DynamicResource*>(THREAD_GETSPECIFIC(_tls_bspec_key));
  if (p_dynamic_resource == NULL) {
    p_dynamic_resource = new (std::nothrow) DynamicResource;
    if (p_dynamic_resource == NULL) {
      LOG(ERROR) << "failed to create tls DynamicResource";
      return -1;
W
wangguibao 已提交
318
    }
W
wangguibao 已提交
319 320 321 322 323
    if (p_dynamic_resource->initialize() != 0) {
      LOG(ERROR) << "DynamicResource initialize failed.";
      delete p_dynamic_resource;
      p_dynamic_resource = NULL;
      return -1;
W
wangguibao 已提交
324 325
    }

W
wangguibao 已提交
326 327 328 329 330
    if (THREAD_SETSPECIFIC(_tls_bspec_key, p_dynamic_resource) != 0) {
      LOG(ERROR) << "unable to set tls DynamicResource";
      delete p_dynamic_resource;
      p_dynamic_resource = NULL;
      return -1;
W
wangguibao 已提交
331
    }
W
wangguibao 已提交
332
  }
W
sdk-cpp  
wangguibao 已提交
333
#if 0
W
wangguibao 已提交
334
    LOG(INFO) << "Successfully thread initialized dynamic resource";
W
sdk-cpp  
wangguibao 已提交
335
#else
W
wangguibao 已提交
336 337 338
  LOG(INFO) << bthread_self()
            << ": Successfully thread initialized dynamic resource "
            << p_dynamic_resource;
W
wangguibao 已提交
339

W
sdk-cpp  
wangguibao 已提交
340
#endif
W
wangguibao 已提交
341
  return 0;
W
wangguibao 已提交
342 343 344
}

int Resource::thread_clear() {
W
wangguibao 已提交
345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360
  // mempool
  if (MempoolWrapper::instance().thread_clear() != 0) {
    LOG(ERROR) << "Failed thread clear mempool wrapper";
    return -1;
  }

  // infer manager
  if (FLAGS_enable_model_toolkit &&
      InferManager::instance().thrd_clear() != 0) {
    LOG(ERROR) << "Failed thrd clear infer manager";
    return -1;
  }

  DynamicResource* p_dynamic_resource =
      reinterpret_cast<DynamicResource*>(THREAD_GETSPECIFIC(_tls_bspec_key));
  if (p_dynamic_resource == NULL) {
W
sdk-cpp  
wangguibao 已提交
361
#if 0
W
wangguibao 已提交
362 363
    LOG(ERROR) << "tls dynamic resource shouldn't be null after "
        << "thread_initialize";
W
sdk-cpp  
wangguibao 已提交
364
#else
W
wangguibao 已提交
365 366 367
    LOG(ERROR)
        << bthread_self()
        << ": tls dynamic resource shouldn't be null after thread_initialize";
W
sdk-cpp  
wangguibao 已提交
368
#endif
W
wangguibao 已提交
369 370 371 372 373 374 375 376 377
    return -1;
  }
  if (p_dynamic_resource->clear() != 0) {
    LOG(ERROR) << "Failed to invoke dynamic resource clear";
    return -1;
  }

  // ...
  return 0;
W
wangguibao 已提交
378 379 380
}

int Resource::reload() {
W
wangguibao 已提交
381 382 383 384 385 386 387
  if (FLAGS_enable_model_toolkit && InferManager::instance().reload() != 0) {
    LOG(ERROR) << "Failed reload infer manager";
    return -1;
  }

  // other resource reload here...
  return 0;
W
wangguibao 已提交
388 389 390
}

int Resource::finalize() {
W
wangguibao 已提交
391 392 393 394 395
  if (FLAGS_enable_model_toolkit &&
      InferManager::instance().proc_finalize() != 0) {
    LOG(ERROR) << "Failed proc finalize infer manager";
    return -1;
  }
X
xulongteng 已提交
396 397 398 399
  if (CubeAPI::instance()->destroy() != 0) {
    LOG(ERROR) << "Destory cube api failed ";
    return -1;
  }
W
wangguibao 已提交
400
  THREAD_KEY_DELETE(_tls_bspec_key);
W
wangguibao 已提交
401

W
wangguibao 已提交
402
  return 0;
W
wangguibao 已提交
403 404
}

W
wangguibao 已提交
405 406 407
}  // namespace predictor
}  // namespace paddle_serving
}  // namespace baidu