cxx_api.h 2.5 KB
Newer Older
S
superjomn 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once
S
superjomn 已提交
16
#include "paddle/fluid/lite/core/op_executor.h"
S
superjomn 已提交
17
#include "paddle/fluid/lite/core/op_lite.h"
S
Superjomn 已提交
18 19
#include "paddle/fluid/lite/core/optimizer.h"
#include "paddle/fluid/lite/core/program.h"
20
#include "paddle/fluid/lite/core/types.h"
S
superjomn 已提交
21 22 23
#include "paddle/fluid/lite/model_parser/model_parser.h"

namespace paddle {
S
superjomn 已提交
24 25 26 27 28 29
namespace lite {

struct Config {};

class Predictor {
 public:
S
superjomn 已提交
30 31
  Predictor() { scope_ = std::make_shared<Scope>(); }

S
superjomn 已提交
32
  void Build(const std::string& model_path, const Place& prefer_place,
33
             const std::vector<Place>& valid_places) {
S
superjomn 已提交
34
    framework::proto::ProgramDesc prog;
S
superjomn 已提交
35
    LoadModel(model_path, scope_.get(), &prog);
S
superjomn 已提交
36 37
    framework::ProgramDesc prog_desc(prog);

S
Superjomn 已提交
38
    Program program(prog_desc, scope_, valid_places);
S
superjomn 已提交
39

S
Superjomn 已提交
40
    Optimizer optimizer;
S
superjomn 已提交
41
    optimizer.KernelPickPreferPlace(prefer_place);
42 43 44
    core::KernelPickFactor factor;
    factor.ConsiderTarget();
    optimizer.Run(std::move(program), valid_places, factor);
S
Superjomn 已提交
45
    program_ = optimizer.GenRuntimeProgram();
S
superjomn 已提交
46 47
  }

48 49 50 51 52 53 54 55 56 57 58 59 60 61
  // Get offset-th col of feed.
  Tensor* GetInput(size_t offset) {
    auto* _feed_list = program_->exec_scope()->FindVar("feed");
    CHECK(_feed_list) << "no feed variable in exec_scope";
    auto* feed_list = _feed_list->GetMutable<std::vector<Tensor>>();
    if (offset >= feed_list->size()) {
      feed_list->resize(offset + 1);
    }
    return &feed_list->at(offset);
  }

  const Tensor* GetOutput(size_t offset) {
    auto* _fetch_list = program_->exec_scope()->FindVar("fetch");
    CHECK(_fetch_list) << "no fatch variable in exec_scope";
62
    auto& fetch_list = *_fetch_list->GetMutable<std::vector<lite::Tensor>>();
63 64 65 66
    CHECK_LT(offset, fetch_list.size()) << "offset " << offset << " overflow";
    return &fetch_list.at(offset);
  }

S
Superjomn 已提交
67
  void Run() { program_->Run(); }
S
superjomn 已提交
68 69

 private:
S
superjomn 已提交
70
  std::shared_ptr<Scope> scope_;
S
Superjomn 已提交
71
  std::unique_ptr<RuntimeProgram> program_;
S
superjomn 已提交
72 73 74
};

}  // namespace lite
S
superjomn 已提交
75
}  // namespace paddle