io.cc 6.8 KB
Newer Older
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/inference/io.h"
16

17
#include <algorithm>
18
#include <fstream>
19
#include <vector>
Y
Yi Wang 已提交
20 21
#include "paddle/fluid/framework/block_desc.h"
#include "paddle/fluid/framework/feed_fetch_type.h"
22
#include "paddle/fluid/framework/op_registry.h"
X
version  
Xin Pan 已提交
23
#include "paddle/fluid/framework/version.h"
T
tensor-tang 已提交
24
#include "paddle/fluid/platform/cpu_helper.h"
25
#include "paddle/fluid/pybind/pybind.h"
26

W
wanghaoshuang 已提交
27 28
DEFINE_string(devices, "", "The devices to be used which is joined by comma.");
DEFINE_bool(init_p2p, false, "Whether to init p2p.");
29 30
DEFINE_int32(math_num_threads, 1,
             "Number of threads used to run math functions.");
31

32
namespace paddle {
33
namespace inference {
34

35 36
void Init(const std::vector<std::string> argv) {
  framework::InitGflags(argv);
T
tensor-tang 已提交
37
  platform::SetNumThreads(FLAGS_math_num_threads);
38 39 40 41 42 43 44 45 46
  // init devices
  std::vector<int> devices;
  std::string token;
  std::istringstream tokenStream(FLAGS_devices);
  while (std::getline(tokenStream, token, ',')) {
    devices.push_back(std::stoi(token));
  }
  framework::InitDevices(FLAGS_init_p2p, devices);
}
47

48
void ReadBinaryFile(const std::string& filename, std::string* contents) {
49 50 51
  std::ifstream fin(filename, std::ios::in | std::ios::binary);
  PADDLE_ENFORCE(static_cast<bool>(fin), "Cannot open file %s", filename);
  fin.seekg(0, std::ios::end);
52 53
  contents->clear();
  contents->resize(fin.tellg());
54
  fin.seekg(0, std::ios::beg);
55
  fin.read(&(contents->at(0)), contents->size());
56
  fin.close();
57 58
}

L
Liu Yiqun 已提交
59 60
bool IsPersistable(const framework::VarDesc* var) {
  if (var->Persistable() &&
61
      var->GetType() != framework::proto::VarType::FEED_MINIBATCH &&
62 63
      var->GetType() != framework::proto::VarType::FETCH_LIST &&
      var->GetType() != framework::proto::VarType::RAW) {
L
Liu Yiqun 已提交
64
    return true;
65 66 67 68
  }
  return false;
}

69
void LoadPersistables(framework::Executor* executor, framework::Scope* scope,
70
                      const framework::ProgramDesc& main_program,
71
                      const std::string& dirname,
T
Tao Luo 已提交
72
                      const std::string& param_filename,
T
Tao Luo 已提交
73
                      bool model_from_memory = false) {
K
kexinzhao 已提交
74
  const framework::BlockDesc& global_block = main_program.Block(0);
75

76 77
  framework::ProgramDesc* load_program = new framework::ProgramDesc();
  framework::BlockDesc* load_block = load_program->MutableBlock(0);
78 79
  std::vector<std::string> paramlist;

K
kexinzhao 已提交
80
  for (auto* var : global_block.AllVars()) {
L
Liu Yiqun 已提交
81
    if (IsPersistable(var)) {
82
      VLOG(4) << "persistable variable's name: " << var->Name();
83 84

      framework::VarDesc* new_var = load_block->Var(var->Name());
F
fengjiayi 已提交
85
      new_var->SetShape(var->GetShape());
86 87 88 89 90
      new_var->SetDataType(var->GetDataType());
      new_var->SetType(var->GetType());
      new_var->SetLoDLevel(var->GetLoDLevel());
      new_var->SetPersistable(true);

91 92 93 94 95 96 97 98 99 100
      if (!param_filename.empty()) {
        paramlist.push_back(new_var->Name());
      } else {
        // append_op
        framework::OpDesc* op = load_block->AppendOp();
        op->SetType("load");
        op->SetOutput("Out", {new_var->Name()});
        op->SetAttr("file_path", {dirname + "/" + new_var->Name()});
        op->CheckAttrs();
      }
101 102
    }
  }
103 104 105 106 107 108 109 110 111

  if (!param_filename.empty()) {
    // sort paramlist to have consistent ordering
    std::sort(paramlist.begin(), paramlist.end());
    // append just the load_combine op
    framework::OpDesc* op = load_block->AppendOp();
    op->SetType("load_combine");
    op->SetOutput("Out", paramlist);
    op->SetAttr("file_path", {param_filename});
T
Tao Luo 已提交
112
    op->SetAttr("model_from_memory", {model_from_memory});
113 114 115
    op->CheckAttrs();
  }

116
  executor->Run(*load_program, scope, 0, true, true);
117

118
  delete load_program;
119
}
120

121 122
std::unique_ptr<framework::ProgramDesc> Load(framework::Executor* executor,
                                             framework::Scope* scope,
K
kexinzhao 已提交
123
                                             const std::string& dirname) {
124 125
  std::string model_filename = dirname + "/__model__";
  std::string program_desc_str;
M
minqiyang 已提交
126
  VLOG(3) << "loading model from " << model_filename;
127
  ReadBinaryFile(model_filename, &program_desc_str);
128 129 130

  std::unique_ptr<framework::ProgramDesc> main_program(
      new framework::ProgramDesc(program_desc_str));
X
fix  
Xin Pan 已提交
131
  PADDLE_ENFORCE(framework::IsProgramVersionSupported(main_program->Version()),
X
refine  
Xin Pan 已提交
132 133
                 "model version %ld is not supported.",
                 main_program->Version());
134

T
Tao Luo 已提交
135
  // model_from_memory is false in seperate parameters.
T
Tao Luo 已提交
136
  LoadPersistables(executor, scope, *main_program, dirname, "",
T
Tao Luo 已提交
137
                   false /* model_from_memory */);
138 139 140
  return main_program;
}

T
Tao Luo 已提交
141 142 143
std::unique_ptr<framework::ProgramDesc> Load(
    framework::Executor* executor, framework::Scope* scope,
    const std::string& prog_filename, const std::string& param_filename) {
144
  std::string program_desc_str;
T
Tao Luo 已提交
145
  ReadBinaryFile(prog_filename, &program_desc_str);
146

K
kexinzhao 已提交
147 148
  std::unique_ptr<framework::ProgramDesc> main_program(
      new framework::ProgramDesc(program_desc_str));
X
fix  
Xin Pan 已提交
149
  PADDLE_ENFORCE(framework::IsProgramVersionSupported(main_program->Version()),
X
refine  
Xin Pan 已提交
150 151
                 "model version %ld is not supported.",
                 main_program->Version());
152

T
Tao Luo 已提交
153
  LoadPersistables(executor, scope, *main_program, "", param_filename,
T
Tao Luo 已提交
154
                   false /* model_from_memory */);
155 156 157
  return main_program;
}

T
Tao Luo 已提交
158
std::unique_ptr<framework::ProgramDesc> LoadFromMemory(
T
Tao Luo 已提交
159
    framework::Executor* executor, framework::Scope* scope,
T
Tao Luo 已提交
160 161 162 163 164 165 166 167 168 169
    const std::string& prog_buffer, const std::string& param_buffer) {
  std::unique_ptr<framework::ProgramDesc> main_program(
      new framework::ProgramDesc(prog_buffer));
  PADDLE_ENFORCE(framework::IsProgramVersionSupported(main_program->Version()),
                 "model version %ld is not supported.",
                 main_program->Version());

  LoadPersistables(executor, scope, *main_program, "", param_buffer,
                   true /* model_filename */);
  return main_program;
T
Tao Luo 已提交
170 171
}

172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187
void SaveVars(const framework::Scope& scope,
              const std::vector<std::string>& vars, const std::string& dirname,
              bool predicate) {
  framework::ProgramDesc prog;
  auto* block = prog.MutableBlock(0);
  auto* op = block->AppendOp();
  op->SetType("save_combine");
  op->SetInput("X", vars);
  op->SetAttr("file_path", dirname + "/param");
  op->CheckAttrs();

  platform::CPUPlace place;
  framework::Executor exe(place);
  exe.Run(prog, const_cast<framework::Scope*>(&scope), 0, true, true);
}

188
}  // namespace inference
189
}  // namespace paddle