pybind_general_model.cpp 3.1 KB
Newer Older
M
MRXLT 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

G
guru4elephant 已提交
15 16
#include <Python.h>
#include <pybind11/pybind11.h>
M
MRXLT 已提交
17
#include <pybind11/stl.h>
G
guru4elephant 已提交
18
#include <unordered_map>
G
guru4elephant 已提交
19
#include "core/general-client/include/general_model.h"
G
guru4elephant 已提交
20 21 22 23 24 25 26 27 28 29 30 31 32 33 34

namespace py = pybind11;

using baidu::paddle_serving::general_model::FetchedMap;

namespace baidu {
namespace paddle_serving {
namespace general_model {

PYBIND11_MODULE(serving_client, m) {
  m.doc() = R"pddoc(this is a practice
       )pddoc";
  py::class_<PredictorClient>(m, "PredictorClient", py::buffer_protocol())
      .def(py::init())
      .def("init",
M
MRXLT 已提交
35
           [](PredictorClient &self, const std::string &conf) {
G
guru4elephant 已提交
36 37 38
             self.init(conf);
           })
      .def("set_predictor_conf",
M
MRXLT 已提交
39 40 41
           [](PredictorClient &self,
              const std::string &conf_path,
              const std::string &conf_file) {
G
guru4elephant 已提交
42 43 44
             self.set_predictor_conf(conf_path, conf_file);
           })
      .def("create_predictor",
M
MRXLT 已提交
45
           [](PredictorClient &self) { self.create_predictor(); })
G
guru4elephant 已提交
46 47
      .def("predict",
           [](PredictorClient &self,
M
MRXLT 已提交
48 49 50 51 52 53 54 55 56 57 58
              const std::vector<std::vector<float>> &float_feed,
              const std::vector<std::string> &float_feed_name,
              const std::vector<std::vector<int64_t>> &int_feed,
              const std::vector<std::string> &int_feed_name,
              const std::vector<std::string> &fetch_name) {
             return self.predict(float_feed,
                                 float_feed_name,
                                 int_feed,
                                 int_feed_name,
                                 fetch_name);
           })
G
guru4elephant 已提交
59

M
MRXLT 已提交
60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75
      .def("predict_for_batch",
           [](PredictorClient &self,
              const std::vector<std::vector<std::vector<float>>>
                  &float_feed_batch,
              const std::vector<std::string> &float_feed_name,
              const std::vector<std::vector<std::vector<int64_t>>>
                  &int_feed_batch,
              const std::vector<std::string> &int_feed_name,
              const std::vector<std::string> &fetch_name,
              const int64_t &batch_size) {
             return self.predict_for_batch(float_feed_batch,
                                           float_feed_name,
                                           int_feed_batch,
                                           int_feed_name,
                                           fetch_name,
                                           batch_size);
G
guru4elephant 已提交
76 77 78 79 80 81
           });
}

}  // namespace general_model
}  // namespace paddle_serving
}  // namespace baidu