pybind_general_model.cpp 3.4 KB
Newer Older
M
MRXLT 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

G
guru4elephant 已提交
15 16
#include <Python.h>
#include <pybind11/pybind11.h>
M
MRXLT 已提交
17
#include <pybind11/stl.h>
G
guru4elephant 已提交
18
#include <unordered_map>
G
guru4elephant 已提交
19
#include "core/general-client/include/general_model.h"
G
guru4elephant 已提交
20 21 22 23 24 25 26 27 28 29 30 31 32 33

namespace py = pybind11;

using baidu::paddle_serving::general_model::FetchedMap;

namespace baidu {
namespace paddle_serving {
namespace general_model {

PYBIND11_MODULE(serving_client, m) {
  m.doc() = R"pddoc(this is a practice
       )pddoc";
  py::class_<PredictorClient>(m, "PredictorClient", py::buffer_protocol())
      .def(py::init())
34 35 36 37
      .def("init_gflags",
           [](PredictorClient &self, std::vector<std::string> argv) {
             self.init_gflags(argv);
           })
G
guru4elephant 已提交
38
      .def("init",
M
MRXLT 已提交
39
           [](PredictorClient &self, const std::string &conf) {
40
             return self.init(conf);
G
guru4elephant 已提交
41 42
           })
      .def("set_predictor_conf",
M
MRXLT 已提交
43 44 45
           [](PredictorClient &self,
              const std::string &conf_path,
              const std::string &conf_file) {
G
guru4elephant 已提交
46 47
             self.set_predictor_conf(conf_path, conf_file);
           })
G
guru4elephant 已提交
48 49 50
      .def("create_predictor_by_desc",
           [](PredictorClient &self, const std::string & sdk_desc) {
             self.create_predictor_by_desc(sdk_desc); })
G
guru4elephant 已提交
51
      .def("create_predictor",
M
MRXLT 已提交
52
           [](PredictorClient &self) { self.create_predictor(); })
53 54
      .def("destroy_predictor",
           [](PredictorClient &self) { self.destroy_predictor(); })
G
guru4elephant 已提交
55 56
      .def("predict",
           [](PredictorClient &self,
M
MRXLT 已提交
57 58 59 60 61 62 63 64 65 66 67
              const std::vector<std::vector<float>> &float_feed,
              const std::vector<std::string> &float_feed_name,
              const std::vector<std::vector<int64_t>> &int_feed,
              const std::vector<std::string> &int_feed_name,
              const std::vector<std::string> &fetch_name) {
             return self.predict(float_feed,
                                 float_feed_name,
                                 int_feed,
                                 int_feed_name,
                                 fetch_name);
           })
G
guru4elephant 已提交
68

M
MRXLT 已提交
69
      .def("batch_predict",
M
MRXLT 已提交
70 71 72 73 74 75 76
           [](PredictorClient &self,
              const std::vector<std::vector<std::vector<float>>>
                  &float_feed_batch,
              const std::vector<std::string> &float_feed_name,
              const std::vector<std::vector<std::vector<int64_t>>>
                  &int_feed_batch,
              const std::vector<std::string> &int_feed_name,
M
MRXLT 已提交
77 78 79 80 81 82
              const std::vector<std::string> &fetch_name) {
             return self.batch_predict(float_feed_batch,
                                       float_feed_name,
                                       int_feed_batch,
                                       int_feed_name,
                                       fetch_name);
G
guru4elephant 已提交
83 84 85 86 87 88
           });
}

}  // namespace general_model
}  // namespace paddle_serving
}  // namespace baidu