io.cc 5.1 KB
Newer Older
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/inference/io.h"
16

17
#include <algorithm>
18
#include <fstream>
19
#include <vector>
Y
Yi Wang 已提交
20 21
#include "paddle/fluid/framework/block_desc.h"
#include "paddle/fluid/framework/feed_fetch_type.h"
22
#include "paddle/fluid/framework/op_registry.h"
T
tensor-tang 已提交
23
#include "paddle/fluid/platform/cpu_helper.h"
24
#include "paddle/fluid/pybind/pybind.h"
25

W
wanghaoshuang 已提交
26 27
DEFINE_string(devices, "", "The devices to be used which is joined by comma.");
DEFINE_bool(init_p2p, false, "Whether to init p2p.");
28 29
DEFINE_int32(math_num_threads, 1,
             "Number of threads used to run math functions.");
30

31
namespace paddle {
32
namespace inference {
33

34 35
void Init(const std::vector<std::string> argv) {
  framework::InitGflags(argv);
T
tensor-tang 已提交
36
  platform::SetNumThreads(FLAGS_math_num_threads);
37 38 39 40 41 42 43 44 45
  // init devices
  std::vector<int> devices;
  std::string token;
  std::istringstream tokenStream(FLAGS_devices);
  while (std::getline(tokenStream, token, ',')) {
    devices.push_back(std::stoi(token));
  }
  framework::InitDevices(FLAGS_init_p2p, devices);
}
46

47
void ReadBinaryFile(const std::string& filename, std::string* contents) {
48 49 50
  std::ifstream fin(filename, std::ios::in | std::ios::binary);
  PADDLE_ENFORCE(static_cast<bool>(fin), "Cannot open file %s", filename);
  fin.seekg(0, std::ios::end);
51 52
  contents->clear();
  contents->resize(fin.tellg());
53
  fin.seekg(0, std::ios::beg);
54
  fin.read(&(contents->at(0)), contents->size());
55
  fin.close();
56 57
}

L
Liu Yiqun 已提交
58 59
bool IsPersistable(const framework::VarDesc* var) {
  if (var->Persistable() &&
60 61
      var->GetType() != framework::proto::VarType::FEED_MINIBATCH &&
      var->GetType() != framework::proto::VarType::FETCH_LIST) {
L
Liu Yiqun 已提交
62
    return true;
63 64 65 66
  }
  return false;
}

67
void LoadPersistables(framework::Executor* executor, framework::Scope* scope,
68
                      const framework::ProgramDesc& main_program,
69
                      const std::string& dirname,
70
                      const std::string& param_filename) {
K
kexinzhao 已提交
71
  const framework::BlockDesc& global_block = main_program.Block(0);
72

73 74
  framework::ProgramDesc* load_program = new framework::ProgramDesc();
  framework::BlockDesc* load_block = load_program->MutableBlock(0);
75 76
  std::vector<std::string> paramlist;

K
kexinzhao 已提交
77
  for (auto* var : global_block.AllVars()) {
L
Liu Yiqun 已提交
78 79
    if (IsPersistable(var)) {
      VLOG(3) << "persistable variable's name: " << var->Name();
80 81

      framework::VarDesc* new_var = load_block->Var(var->Name());
F
fengjiayi 已提交
82
      new_var->SetShape(var->GetShape());
83 84 85 86 87
      new_var->SetDataType(var->GetDataType());
      new_var->SetType(var->GetType());
      new_var->SetLoDLevel(var->GetLoDLevel());
      new_var->SetPersistable(true);

88 89 90 91 92 93 94 95 96 97
      if (!param_filename.empty()) {
        paramlist.push_back(new_var->Name());
      } else {
        // append_op
        framework::OpDesc* op = load_block->AppendOp();
        op->SetType("load");
        op->SetOutput("Out", {new_var->Name()});
        op->SetAttr("file_path", {dirname + "/" + new_var->Name()});
        op->CheckAttrs();
      }
98 99
    }
  }
100 101 102 103 104 105 106 107 108 109 110 111

  if (!param_filename.empty()) {
    // sort paramlist to have consistent ordering
    std::sort(paramlist.begin(), paramlist.end());
    // append just the load_combine op
    framework::OpDesc* op = load_block->AppendOp();
    op->SetType("load_combine");
    op->SetOutput("Out", paramlist);
    op->SetAttr("file_path", {param_filename});
    op->CheckAttrs();
  }

112
  executor->Run(*load_program, scope, 0, true, true);
113

114
  delete load_program;
115
}
116

117 118
std::unique_ptr<framework::ProgramDesc> Load(framework::Executor* executor,
                                             framework::Scope* scope,
K
kexinzhao 已提交
119
                                             const std::string& dirname) {
120 121
  std::string model_filename = dirname + "/__model__";
  std::string program_desc_str;
122
  VLOG(3) << "loading model from " << model_filename;
123
  ReadBinaryFile(model_filename, &program_desc_str);
124 125 126 127 128 129 130 131 132

  std::unique_ptr<framework::ProgramDesc> main_program(
      new framework::ProgramDesc(program_desc_str));

  LoadPersistables(executor, scope, *main_program, dirname, "");
  return main_program;
}

std::unique_ptr<framework::ProgramDesc> Load(
133
    framework::Executor* executor, framework::Scope* scope,
134
    const std::string& prog_filename, const std::string& param_filename) {
135 136
  std::string model_filename = prog_filename;
  std::string program_desc_str;
137
  ReadBinaryFile(model_filename, &program_desc_str);
138

K
kexinzhao 已提交
139 140
  std::unique_ptr<framework::ProgramDesc> main_program(
      new framework::ProgramDesc(program_desc_str));
X
clean  
Xin Pan 已提交
141

142
  LoadPersistables(executor, scope, *main_program, "", param_filename);
143 144 145 146
  return main_program;
}

}  // namespace inference
147
}  // namespace paddle