fluid_cpu_engine.h 8.8 KB
Newer Older
W
wangguibao 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

W
wangguibao 已提交
15 16 17
#pragma once

#include <pthread.h>
W
wangguibao 已提交
18 19
#include <fstream>
#include <map>
W
wangguibao 已提交
20 21
#include <string>
#include <vector>
G
guru4elephant 已提交
22 23 24
#include "core/configure/include/configure_parser.h"
#include "core/configure/inferencer_configure.pb.h"
#include "core/predictor/framework/infer.h"
B
barrierye 已提交
25
#include "paddle_inference_api.h"  // NOLINT
W
wangguibao 已提交
26 27 28 29 30 31

namespace baidu {
namespace paddle_serving {
namespace fluid_cpu {

class AutoLock {
W
wangguibao 已提交
32 33 34 35
 public:
  explicit AutoLock(pthread_mutex_t& mutex) : _mut(mutex) {
    pthread_mutex_lock(&mutex);
  }
W
wangguibao 已提交
36

W
wangguibao 已提交
37
  ~AutoLock() { pthread_mutex_unlock(&_mut); }
W
wangguibao 已提交
38

W
wangguibao 已提交
39 40
 private:
  pthread_mutex_t& _mut;
W
wangguibao 已提交
41 42 43
};

class GlobalPaddleCreateMutex {
W
wangguibao 已提交
44 45
 public:
  pthread_mutex_t& mutex() { return _mut; }
W
wangguibao 已提交
46

W
wangguibao 已提交
47 48 49 50
  static pthread_mutex_t& instance() {
    static GlobalPaddleCreateMutex gmutex;
    return gmutex.mutex();
  }
W
wangguibao 已提交
51

W
wangguibao 已提交
52 53
 private:
  GlobalPaddleCreateMutex() { pthread_mutex_init(&_mut, NULL); }
W
wangguibao 已提交
54

W
wangguibao 已提交
55
  pthread_mutex_t _mut;
W
wangguibao 已提交
56 57
};

W
wangjiawei04 已提交
58 59 60 61
using paddle_infer::Config;
using paddle_infer::Predictor;
using paddle_infer::Tensor;
using paddle_infer::CreatePredictor;
W
wangguibao 已提交
62 63 64

// data interface
class FluidFamilyCore {
W
wangguibao 已提交
65 66
 public:
  virtual ~FluidFamilyCore() {}
W
wangjiawei04 已提交
67 68 69 70
  virtual std::vector<std::string> GetInputNames() {
    return _core->GetInputNames();
  }

W
wangjiawei04 已提交
71
  virtual std::unique_ptr<Tensor> GetInputHandle(const std::string& name) {
W
wangjiawei04 已提交
72
    return _core->GetInputHandle(name);
W
wangjiawei04 已提交
73
  }
W
wangjiawei04 已提交
74 75 76 77 78

  virtual std::vector<std::string> GetOutputNames() {
    return _core->GetOutputNames();
  }

W
wangjiawei04 已提交
79
  virtual std::unique_ptr<Tensor> GetOutputHandle(const std::string& name) {
W
wangjiawei04 已提交
80
    return _core->GetOutputHandle(name);
W
wangjiawei04 已提交
81 82 83 84
  }

  virtual bool Run() {
    if (!_core->Run()) {
W
wangguibao 已提交
85 86
      LOG(ERROR) << "Failed call Run with paddle predictor";
      return false;
W
wangguibao 已提交
87
    }
W
wangguibao 已提交
88 89 90
    return true;
  }

91
  virtual int create(const predictor::InferEngineCreationParams& params) = 0;
W
wangguibao 已提交
92 93 94 95 96 97

  virtual int clone(void* origin_core) {
    if (origin_core == NULL) {
      LOG(ERROR) << "origin paddle Predictor is null.";
      return -1;
    }
W
wangjiawei04 已提交
98
    Predictor* p_predictor = (Predictor*)origin_core;
W
wangguibao 已提交
99 100 101 102
    _core = p_predictor->Clone();
    if (_core.get() == NULL) {
      LOG(ERROR) << "fail to clone paddle predictor: " << origin_core;
      return -1;
W
wangguibao 已提交
103
    }
W
wangguibao 已提交
104 105
    return 0;
  }
W
wangguibao 已提交
106

W
wangguibao 已提交
107
  virtual void* get() { return _core.get(); }
W
wangguibao 已提交
108

W
wangguibao 已提交
109
 protected:
W
wangjiawei04 已提交
110
  std::shared_ptr<Predictor> _core;
W
wangguibao 已提交
111 112 113 114
};

// infer interface
class FluidCpuAnalysisCore : public FluidFamilyCore {
W
wangguibao 已提交
115
 public:
116 117
  int create(const predictor::InferEngineCreationParams& params) {
    std::string data_path = params.get_path();
W
wangguibao 已提交
118 119 120 121
    if (access(data_path.c_str(), F_OK) == -1) {
      LOG(ERROR) << "create paddle predictor failed, path not exits: "
                 << data_path;
      return -1;
W
wangguibao 已提交
122
    }
W
wangguibao 已提交
123

W
wangjiawei04 已提交
124 125 126 127 128
    Config config;
    config.SetParamsFile(data_path + "/__params__");
    config.SetProgFile(data_path + "/__model__");
    config.DisableGpu();
    config.SetCpuMathLibraryNumThreads(1);
129 130

    if (params.enable_memory_optimization()) {
W
wangjiawei04 已提交
131
      config.EnableMemoryOptim();
W
wangguibao 已提交
132
    }
W
wangguibao 已提交
133

W
wangjiawei04 已提交
134
    config.SwitchSpecifyInputNames(true);
W
wangguibao 已提交
135
    AutoLock lock(GlobalPaddleCreateMutex::instance());
W
wangjiawei04 已提交
136
    _core = CreatePredictor(config);
W
wangguibao 已提交
137 138 139 140 141
    if (NULL == _core.get()) {
      LOG(ERROR) << "create paddle predictor failed, path: " << data_path;
      return -1;
    }

142
    VLOG(2) << "create paddle predictor sucess, path: " << data_path;
W
wangguibao 已提交
143 144
    return 0;
  }
W
wangguibao 已提交
145 146 147
};

class FluidCpuAnalysisDirCore : public FluidFamilyCore {
W
wangguibao 已提交
148
 public:
149 150
  int create(const predictor::InferEngineCreationParams& params) {
    std::string data_path = params.get_path();
W
wangguibao 已提交
151 152 153 154
    if (access(data_path.c_str(), F_OK) == -1) {
      LOG(ERROR) << "create paddle predictor failed, path not exits: "
                 << data_path;
      return -1;
W
wangguibao 已提交
155 156
    }

W
wangjiawei04 已提交
157 158 159 160 161
    Config config;
    config.SetModel(data_path);
    config.DisableGpu();
    config.SwitchSpecifyInputNames(true);
    config.SetCpuMathLibraryNumThreads(1);
162 163

    if (params.enable_memory_optimization()) {
W
wangjiawei04 已提交
164
      config.EnableMemoryOptim();
165 166
    }

M
MRXLT 已提交
167
    if (params.enable_ir_optimization()) {
W
wangjiawei04 已提交
168
      config.SwitchIrOptim(true);
M
MRXLT 已提交
169
    } else {
W
wangjiawei04 已提交
170
      config.SwitchIrOptim(false);
M
MRXLT 已提交
171 172
    }

W
wangguibao 已提交
173
    AutoLock lock(GlobalPaddleCreateMutex::instance());
W
wangjiawei04 已提交
174
    _core = CreatePredictor(config);
W
wangguibao 已提交
175 176 177
    if (NULL == _core.get()) {
      LOG(ERROR) << "create paddle predictor failed, path: " << data_path;
      return -1;
W
wangguibao 已提交
178 179
    }

180
    VLOG(2) << "create paddle predictor sucess, path: " << data_path;
W
wangguibao 已提交
181 182
    return 0;
  }
W
wangguibao 已提交
183 184 185
};

class Parameter {
W
wangguibao 已提交
186 187 188
 public:
  Parameter() : _row(0), _col(0), _params(NULL) {}
  ~Parameter() {
189
    VLOG(2) << "before destroy Parameter, file_name[" << _file_name << "]";
W
wangguibao 已提交
190 191 192 193 194 195 196 197 198 199 200 201
    destroy();
  }

  int init(int row, int col, const char* file_name) {
    destroy();
    _file_name = file_name;
    _row = row;
    _col = col;
    _params = reinterpret_cast<float*>(malloc(_row * _col * sizeof(float)));
    if (_params == NULL) {
      LOG(ERROR) << "Load " << _file_name << " malloc error.";
      return -1;
W
wangguibao 已提交
202
    }
203
    VLOG(2) << "Load parameter file[" << _file_name << "] success.";
W
wangguibao 已提交
204 205 206 207 208 209 210 211 212
    return 0;
  }

  void destroy() {
    _row = 0;
    _col = 0;
    if (_params != NULL) {
      free(_params);
      _params = NULL;
W
wangguibao 已提交
213
    }
W
wangguibao 已提交
214
  }
W
wangguibao 已提交
215

W
wangguibao 已提交
216 217 218 219
  int load() {
    if (_params == NULL || _row <= 0 || _col <= 0) {
      LOG(ERROR) << "load parameter error [not inited].";
      return -1;
W
wangguibao 已提交
220 221
    }

W
wangguibao 已提交
222 223 224 225 226 227 228 229 230 231 232 233 234 235 236
    FILE* fs = fopen(_file_name.c_str(), "rb");
    if (fs == NULL) {
      LOG(ERROR) << "load " << _file_name << " fopen error.";
      return -1;
    }
    static const uint32_t MODEL_FILE_HEAD_LEN = 16;
    char head[MODEL_FILE_HEAD_LEN] = {0};
    if (fread(head, 1, MODEL_FILE_HEAD_LEN, fs) != MODEL_FILE_HEAD_LEN) {
      destroy();
      LOG(ERROR) << "Load " << _file_name << " read head error.";
      if (fs != NULL) {
        fclose(fs);
        fs = NULL;
      }
      return -1;
W
wangguibao 已提交
237 238
    }

W
wangguibao 已提交
239 240 241 242 243 244
    uint32_t matrix_size = _row * _col;
    if (matrix_size == fread(_params, sizeof(float), matrix_size, fs)) {
      if (fs != NULL) {
        fclose(fs);
        fs = NULL;
      }
245
      VLOG(2) << "load " << _file_name << " read ok.";
W
wangguibao 已提交
246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263
      return 0;
    } else {
      LOG(ERROR) << "load " << _file_name << " read error.";
      destroy();
      if (fs != NULL) {
        fclose(fs);
        fs = NULL;
      }
      return -1;
    }
    return 0;
  }

 public:
  std::string _file_name;
  int _row;
  int _col;
  float* _params;
W
wangguibao 已提交
264 265
};

H
HexToString 已提交
266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321
class FluidCpuAnalysisEncryptCore : public FluidFamilyCore {
 public:
  void ReadBinaryFile(const std::string& filename, std::string* contents) {
    std::ifstream fin(filename, std::ios::in | std::ios::binary);
    fin.seekg(0, std::ios::end);
    contents->clear();
    contents->resize(fin.tellg());
    fin.seekg(0, std::ios::beg);
    fin.read(&(contents->at(0)), contents->size());
    fin.close();
  }

  int create(const predictor::InferEngineCreationParams& params) {
    std::string data_path = params.get_path();
    if (access(data_path.c_str(), F_OK) == -1) {
      LOG(ERROR) << "create paddle predictor failed, path note exits: "
                 << data_path;
      return -1;
    }

    std::string model_buffer, params_buffer, key_buffer;
    ReadBinaryFile(data_path + "encrypt_model", &model_buffer);
    ReadBinaryFile(data_path + "encrypt_params", &params_buffer);
    ReadBinaryFile(data_path + "key", &key_buffer);

    VLOG(2) << "prepare for encryption model";

    auto cipher = paddle::MakeCipher("");
    std::string real_model_buffer = cipher->Decrypt(model_buffer, key_buffer);
    std::string real_params_buffer = cipher->Decrypt(params_buffer, key_buffer);

    Config analysis_config;
    //paddle::AnalysisConfig analysis_config;
    analysis_config.SetModelBuffer(&real_model_buffer[0],
                                   real_model_buffer.size(),
                                   &real_params_buffer[0],
                                   real_params_buffer.size());
    analysis_config.DisableGpu();
    analysis_config.SetCpuMathLibraryNumThreads(1);
    if (params.enable_memory_optimization()) {
      analysis_config.EnableMemoryOptim();
    }
    analysis_config.SwitchSpecifyInputNames(true);
    AutoLock lock(GlobalPaddleCreateMutex::instance());
    VLOG(2) << "decrypt model file sucess";
    _core =
        CreatePredictor(analysis_config);
    if (NULL == _core.get()) {
      LOG(ERROR) << "create paddle predictor failed, path: " << data_path;
      return -1;
    }
    VLOG(2) << "create paddle predictor sucess, path: " << data_path;
    return 0;
  }
};

W
wangguibao 已提交
322 323 324
}  // namespace fluid_cpu
}  // namespace paddle_serving
}  // namespace baidu