general_model.h 8.1 KB
Newer Older
G
guru4elephant 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once

#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>

M
MRXLT 已提交
20
#include <pybind11/numpy.h>
M
MRXLT 已提交
21
#include <algorithm>
G
guru4elephant 已提交
22
#include <fstream>
M
MRXLT 已提交
23
#include <map>
G
guru4elephant 已提交
24
#include <string>
25
#include <utility>  // move
G
guru4elephant 已提交
26
#include <vector>
G
guru4elephant 已提交
27 28 29 30
#include "core/sdk-cpp/builtin_format.pb.h"
#include "core/sdk-cpp/general_model_service.pb.h"
#include "core/sdk-cpp/include/common.h"
#include "core/sdk-cpp/include/predictor_sdk.h"
G
guru4elephant 已提交
31 32 33
using baidu::paddle_serving::sdk_cpp::Predictor;
using baidu::paddle_serving::sdk_cpp::PredictorApi;

34 35 36
DECLARE_bool(profile_client);
DECLARE_bool(profile_server);

G
guru4elephant 已提交
37
// given some input data, pack into pb, and send request
M
MRXLT 已提交
38
namespace py = pybind11;
G
guru4elephant 已提交
39 40 41 42
namespace baidu {
namespace paddle_serving {
namespace general_model {

B
barrierye 已提交
43
class ModelRes {
44
 public:
B
barrierye 已提交
45
  ModelRes() {}
B
barrierye 已提交
46 47
  ModelRes(const ModelRes& res) {
    _engine_name = res._engine_name;
B
barrierye 已提交
48 49 50 51
    _int64_value_map.insert(res._int64_value_map.begin(),
                            res._int64_value_map.end());
    _float_value_map.insert(res._float_value_map.begin(),
                            res._float_value_map.end());
B
fix bug  
barrierye 已提交
52 53
    _shape_map.insert(res._shape_map.begin(), res._shape_map.end());
    _lod_map.insert(res._lod_map.begin(), res._lod_map.end());
B
barrierye 已提交
54 55 56
  }
  ModelRes(ModelRes&& res) {
    _engine_name = std::move(res._engine_name);
B
barrierye 已提交
57 58 59 60 61 62
    _int64_value_map.insert(
        std::make_move_iterator(std::begin(res._int64_value_map)),
        std::make_move_iterator(std::end(res._int64_value_map)));
    _float_value_map.insert(
        std::make_move_iterator(std::begin(res._float_value_map)),
        std::make_move_iterator(std::end(res._float_value_map)));
B
fix bug  
barrierye 已提交
63 64 65 66
    _shape_map.insert(std::make_move_iterator(std::begin(res._shape_map)),
                      std::make_move_iterator(std::end(res._shape_map)));
    _lod_map.insert(std::make_move_iterator(std::begin(res._lod_map)),
                    std::make_move_iterator(std::end(res._lod_map)));
B
barrierye 已提交
67
  }
B
barrierye 已提交
68
  ~ModelRes() {}
69 70
  const std::vector<int64_t>& get_int64_by_name(const std::string& name) {
    return _int64_value_map[name];
71
  }
B
barrierye 已提交
72 73 74
  std::vector<int64_t>&& get_int64_by_name_with_rv(const std::string& name) {
    return std::move(_int64_value_map[name]);
  }
75 76 77
  const std::vector<float>& get_float_by_name(const std::string& name) {
    return _float_value_map[name];
  }
B
barrierye 已提交
78 79 80
  std::vector<float>&& get_float_by_name_with_rv(const std::string& name) {
    return std::move(_float_value_map[name]);
  }
81 82 83 84 85
  const std::vector<int>& get_shape(const std::string& name) {
    return _shape_map[name];
  }
  const std::vector<int>& get_lod(const std::string& name) {
    return _lod_map[name];
86
  }
B
barrierye 已提交
87 88 89
  void set_engine_name(const std::string& engine_name) {
    _engine_name = engine_name;
  }
B
barrierye 已提交
90 91
  const std::string& engine_name() { return _engine_name; }
  ModelRes& operator=(ModelRes&& res) {
B
barrierye 已提交
92
    if (this != &res) {
B
barrierye 已提交
93
      _engine_name = std::move(res._engine_name);
B
barrierye 已提交
94 95 96 97 98 99
      _int64_value_map.insert(
          std::make_move_iterator(std::begin(res._int64_value_map)),
          std::make_move_iterator(std::end(res._int64_value_map)));
      _float_value_map.insert(
          std::make_move_iterator(std::begin(res._float_value_map)),
          std::make_move_iterator(std::end(res._float_value_map)));
B
fix bug  
barrierye 已提交
100 101 102 103
      _shape_map.insert(std::make_move_iterator(std::begin(res._shape_map)),
                        std::make_move_iterator(std::end(res._shape_map)));
      _lod_map.insert(std::make_move_iterator(std::begin(res._lod_map)),
                      std::make_move_iterator(std::end(res._lod_map)));
B
barrierye 已提交
104 105 106
    }
    return *this;
  }
B
barrierye 已提交
107

B
barrierye 已提交
108
 public:
B
barrierye 已提交
109
  std::string _engine_name;
110 111 112 113
  std::map<std::string, std::vector<int64_t>> _int64_value_map;
  std::map<std::string, std::vector<float>> _float_value_map;
  std::map<std::string, std::vector<int>> _shape_map;
  std::map<std::string, std::vector<int>> _lod_map;
B
barrierye 已提交
114 115 116 117 118 119 120 121
};

class PredictorRes {
 public:
  PredictorRes() {}
  ~PredictorRes() {}

 public:
B
barrierye 已提交
122 123
  void clear() {
    _models.clear();
B
barrierye 已提交
124
    _engine_names.clear();
B
barrierye 已提交
125
  }
B
barrierye 已提交
126 127
  const std::vector<int64_t>& get_int64_by_name(const int model_idx,
                                                const std::string& name) {
B
barrierye 已提交
128 129
    return _models[model_idx].get_int64_by_name(name);
  }
B
barrierye 已提交
130 131 132 133
  std::vector<int64_t>&& get_int64_by_name_with_rv(const int model_idx,
                                                   const std::string& name) {
    return std::move(_models[model_idx].get_int64_by_name_with_rv(name));
  }
B
barrierye 已提交
134 135
  const std::vector<float>& get_float_by_name(const int model_idx,
                                              const std::string& name) {
B
barrierye 已提交
136 137
    return _models[model_idx].get_float_by_name(name);
  }
B
barrierye 已提交
138 139 140 141
  std::vector<float>&& get_float_by_name_with_rv(const int model_idx,
                                                 const std::string& name) {
    return std::move(_models[model_idx].get_float_by_name_with_rv(name));
  }
B
barrierye 已提交
142 143 144 145 146 147 148 149
  const std::vector<int>& get_shape(const int model_idx,
                                    const std::string& name) {
    return _models[model_idx].get_shape(name);
  }
  const std::vector<int>& get_lod(const int model_idx,
                                  const std::string& name) {
    return _models[model_idx].get_lod(name);
  }
B
barrierye 已提交
150 151
  void add_model_res(ModelRes&& res) {
    _engine_names.push_back(res.engine_name());
B
barrierye 已提交
152
    _models.emplace_back(std::move(res));
B
barrierye 已提交
153
  }
154 155 156 157
  void set_variant_tag(const std::string& variant_tag) {
    _variant_tag = variant_tag;
  }
  const std::string& variant_tag() { return _variant_tag; }
B
barrierye 已提交
158
  const std::vector<std::string>& get_engine_names() { return _engine_names; }
159 160

 private:
B
barrierye 已提交
161
  std::vector<ModelRes> _models;
162
  std::string _variant_tag;
B
barrierye 已提交
163
  std::vector<std::string> _engine_names;
164
};
G
guru4elephant 已提交
165 166 167 168 169 170

class PredictorClient {
 public:
  PredictorClient() {}
  ~PredictorClient() {}

171 172
  void init_gflags(std::vector<std::string> argv);

173
  int init(const std::string& client_conf);
G
guru4elephant 已提交
174

M
MRXLT 已提交
175 176
  void set_predictor_conf(const std::string& conf_path,
                          const std::string& conf_file);
G
guru4elephant 已提交
177

M
MRXLT 已提交
178
  int create_predictor_by_desc(const std::string& sdk_desc);
G
guru4elephant 已提交
179

G
guru4elephant 已提交
180 181
  int create_predictor();

182
  int destroy_predictor();
183

M
MRXLT 已提交
184 185 186
  int batch_predict(
      const std::vector<std::vector<std::vector<float>>>& float_feed_batch,
      const std::vector<std::string>& float_feed_name,
D
dongdaxiang 已提交
187
      const std::vector<std::vector<int>>& float_shape,
M
MRXLT 已提交
188 189
      const std::vector<std::vector<std::vector<int64_t>>>& int_feed_batch,
      const std::vector<std::string>& int_feed_name,
D
dongdaxiang 已提交
190
      const std::vector<std::vector<int>>& int_shape,
M
MRXLT 已提交
191
      const std::vector<std::string>& fetch_name,
M
MRXLT 已提交
192
      PredictorRes& predict_res_batch,  // NOLINT
M
MRXLT 已提交
193
      const int& pid);
M
MRXLT 已提交
194

M
MRXLT 已提交
195 196 197 198 199 200 201 202 203 204 205
  int numpy_predict(
      const std::vector<std::vector<py::array_t<float>>>& float_feed_batch,
      const std::vector<std::string>& float_feed_name,
      const std::vector<std::vector<int>>& float_shape,
      const std::vector<std::vector<py::array_t<int64_t>>>& int_feed_batch,
      const std::vector<std::string>& int_feed_name,
      const std::vector<std::vector<int>>& int_shape,
      const std::vector<std::string>& fetch_name,
      PredictorRes& predict_res_batch,  // NOLINT
      const int& pid);

G
guru4elephant 已提交
206 207
 private:
  PredictorApi _api;
M
MRXLT 已提交
208
  Predictor* _predictor;
G
guru4elephant 已提交
209 210 211 212 213 214
  std::string _predictor_conf;
  std::string _predictor_path;
  std::string _conf_file;
  std::map<std::string, int> _feed_name_to_idx;
  std::map<std::string, int> _fetch_name_to_idx;
  std::map<std::string, std::string> _fetch_name_to_var_name;
215
  std::map<std::string, int> _fetch_name_to_type;
M
MRXLT 已提交
216
  std::vector<std::vector<int>> _shape;
G
guru4elephant 已提交
217
  std::vector<int> _type;
G
guru4elephant 已提交
218
  std::vector<int64_t> _last_request_ts;
G
guru4elephant 已提交
219 220 221 222 223 224 225
};

}  // namespace general_model
}  // namespace paddle_serving
}  // namespace baidu

/* vim: set expandtab ts=4 sw=4 sts=4 tw=100: */