未验证 提交 e45c4242 编写于 作者: H huzhiqiang 提交者: GitHub

[Framework] Update flatbuffer and opt (#4307)

上级 515f9a6a
......@@ -131,7 +131,8 @@ TEST(CXXApi, save_model) {
predictor.Build(FLAGS_model_dir, "", "", valid_places);
LOG(INFO) << "Save optimized model to " << FLAGS_optimized_model;
predictor.SaveModel(FLAGS_optimized_model);
predictor.SaveModel(FLAGS_optimized_model,
lite_api::LiteModelType::kProtobuf);
predictor.SaveModel(FLAGS_optimized_model + ".naive",
lite_api::LiteModelType::kNaiveBuffer);
}
......
......@@ -17,6 +17,7 @@
#include <cstdint>
#include <string>
#include <vector>
#include "lite/model_parser/base/traits.h"
#include "lite/utils/cp_logging.h"
namespace paddle {
......@@ -47,30 +48,29 @@ class BlockDescReadAPI {
class BlockDescWriteAPI {
public:
virtual void SetIdx(int32_t idx) { NotImplemented(); }
virtual void SetParentIdx(int32_t idx) { NotImplemented(); }
virtual void ClearVars() { NotImplemented(); }
virtual void ClearOps() { NotImplemented(); }
virtual void SetForwardBlockIdx(int32_t idx) { NotImplemented(); }
virtual void SetIdx(int32_t idx) { LITE_MODEL_INTERFACE_NOT_IMPLEMENTED; }
virtual void SetParentIdx(int32_t idx) {
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
virtual void ClearVars() { LITE_MODEL_INTERFACE_NOT_IMPLEMENTED; }
virtual void ClearOps() { LITE_MODEL_INTERFACE_NOT_IMPLEMENTED; }
virtual void SetForwardBlockIdx(int32_t idx) {
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
template <typename T>
T* AddVar() {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
return nullptr;
}
template <typename T>
T* AddOp() {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
return nullptr;
}
virtual ~BlockDescWriteAPI() = default;
private:
void NotImplemented() const {
LOG(FATAL) << "BlockDescWriteAPI is not available in model read-only mode.";
}
};
// The reading and writing of the model are one-time and separate.
......
......@@ -62,27 +62,24 @@ class OpDescReadAPI {
class OpDescWriteAPI {
public:
virtual void SetType(const std::string& type) { NotImplemented(); }
virtual void SetType(const std::string& type) {
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
virtual void SetInput(const std::string& param,
const std::vector<std::string>& args) {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
virtual void SetOutput(const std::string& param,
const std::vector<std::string>& args) {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
template <typename T>
void SetAttr(const std::string& name, const T& v) {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
virtual ~OpDescWriteAPI() = default;
private:
void NotImplemented() const {
LOG(FATAL) << "OpDescWriteAPI is not available in model read-only mode.";
}
};
// The reading and writing of the model are one-time and separate.
......
......@@ -34,17 +34,20 @@ class ParamDescReadAPI {
class ParamDescWriteAPI {
public:
virtual void SetName(const std::string &name) { NotImplemented(); }
virtual void SetDim(const std::vector<int64_t> &dim) { NotImplemented(); }
virtual void SetDataType(VarDataType data_type) { NotImplemented(); }
virtual void SetData(const void *data, size_t byte_size) { NotImplemented(); }
virtual void SetName(const std::string &name) {
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
virtual void SetDim(const std::vector<int64_t> &dim) {
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
virtual void SetDataType(VarDataType data_type) {
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
virtual void SetData(const void *data, size_t byte_size) {
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
virtual ~ParamDescWriteAPI() = default;
private:
void NotImplemented() const {
LOG(FATAL) << "ParamDescWriteAPI is not available in model read-only mode.";
}
};
class CombinedParamsDescReadAPI {
......@@ -57,16 +60,10 @@ class CombinedParamsDescReadAPI {
class CombinedParamsDescWriteAPI {
public:
virtual ParamDescWriteAPI *AddParamDesc() {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
return nullptr;
}
virtual ~CombinedParamsDescWriteAPI() = default;
private:
void NotImplemented() const {
LOG(FATAL) << "CombinedParamsDescWriteAPI is not available in model "
"read-only mode.";
}
};
// The reading and writing of the model are one-time and separate.
......
......@@ -14,6 +14,7 @@
#pragma once
#include "lite/model_parser/base/traits.h"
#include "lite/utils/cp_logging.h"
namespace paddle {
......@@ -36,22 +37,18 @@ class ProgramDescReadAPI {
class ProgramDescWriteAPI {
public:
virtual void ClearBlocks() { NotImplemented(); }
virtual void SetVersion(int64_t version) { NotImplemented(); }
virtual void ClearBlocks() { LITE_MODEL_INTERFACE_NOT_IMPLEMENTED; }
virtual void SetVersion(int64_t version) {
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
template <typename T>
T* AddBlock() {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
return nullptr;
}
virtual ~ProgramDescWriteAPI() = default;
private:
void NotImplemented() const {
LOG(FATAL)
<< "ProgramDescWriteAPI is not available in model read-only mode.";
}
};
// The reading and writing of the model are one-time and separate.
......
......@@ -19,6 +19,10 @@
#include "lite/api/paddle_place.h"
#include "lite/utils/cp_logging.h"
#define LITE_MODEL_INTERFACE_NOT_IMPLEMENTED \
LOG(FATAL) << "This additional interface is temporarily " \
"unavailable in flatbuffers read-only mode."
namespace paddle {
namespace lite {
......
......@@ -33,16 +33,19 @@ class VarDescReadAPI {
class VarDescWriteAPI {
public:
virtual void SetName(std::string name) { NotImplemented(); }
virtual void SetType(VarDataType type) { NotImplemented(); }
virtual void SetPersistable(bool persistable) { NotImplemented(); }
virtual void SetShape(const std::vector<int64_t>& dims) { NotImplemented(); }
virtual ~VarDescWriteAPI() = default;
private:
void NotImplemented() const {
LOG(FATAL) << "VarDescWriteAPI is not available in model read-only mode.";
virtual void SetName(std::string name) {
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
virtual void SetType(VarDataType type) {
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
virtual void SetPersistable(bool persistable) {
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
virtual void SetShape(const std::vector<int64_t>& dims) {
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
virtual ~VarDescWriteAPI() = default;
};
// The reading and writing of the model are one-time and separate.
......
......@@ -51,7 +51,7 @@ class BlockDescView : public BlockDescAPI {
template <typename T>
T* GetVar(int32_t idx) {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
return nullptr;
}
......@@ -66,7 +66,7 @@ class BlockDescView : public BlockDescAPI {
template <typename T>
T* GetOp(int32_t idx) {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
return nullptr;
}
......@@ -82,12 +82,6 @@ class BlockDescView : public BlockDescAPI {
proto::BlockDesc const* desc_; // not_own
std::vector<VarDescView> vars_;
std::vector<OpDescView> ops_;
private:
void NotImplemented() const {
LOG(FATAL) << "The additional interfaces of BlockDescView is temporarily "
"unavailable in read-only mode.";
}
};
#ifdef LITE_WITH_FLATBUFFERS_DESC
......
......@@ -23,12 +23,20 @@ namespace paddle {
namespace lite {
namespace fbs {
std::vector<char> LoadFile(const std::string& path) {
std::vector<char> LoadFile(const std::string& path,
const size_t& offset,
const size_t& size) {
// open file in readonly mode
FILE* file = fopen(path.c_str(), "rb");
CHECK(file);
fseek(file, 0, SEEK_END);
uint64_t length = ftell(file);
rewind(file);
CHECK(file) << "Unable to open file: " << path;
// move fstream pointer backward for offset
uint64_t length = size;
if (size == 0) {
fseek(file, 0L, SEEK_END);
length = ftell(file) - offset;
}
fseek(file, offset, SEEK_SET);
// read data of `length` into buf
std::vector<char> buf(length);
CHECK_EQ(fread(buf.data(), 1, length, file), length);
fclose(file);
......
......@@ -26,7 +26,9 @@ namespace paddle {
namespace lite {
namespace fbs {
std::vector<char> LoadFile(const std::string& path);
std::vector<char> LoadFile(const std::string& path,
const size_t& offset = 0,
const size_t& size = 0);
void SaveFile(const std::string& path, const std::vector<char>& cache);
void SetScopeWithCombinedParams(lite::Scope* scope,
......
......@@ -154,19 +154,41 @@ class OpDescView : public OpDescAPI {
}
const std::map<std::string, std::vector<std::string>>& inputs() const {
NotImplemented();
for (const auto& var : *desc_->inputs()) {
std::pair<std::string, std::vector<std::string>> pair;
pair.first = var->parameter()->str();
auto& args_vec = pair.second;
if (var && var->arguments()) {
args_vec.resize(var->arguments()->size());
for (size_t i = 0; i < var->arguments()->size(); ++i) {
args_vec[i] = (*var->arguments())[i]->str();
}
}
inputs_.insert(std::move(pair));
}
return inputs_;
}
const std::map<std::string, std::vector<std::string>>& outputs() const {
NotImplemented();
for (const auto& var : *desc_->outputs()) {
std::pair<std::string, std::vector<std::string>> pair;
pair.first = var->parameter()->str();
auto& args_vec = pair.second;
if (var && var->arguments()) {
args_vec.resize(var->arguments()->size());
for (size_t i = 0; i < var->arguments()->size(); ++i) {
args_vec[i] = (*var->arguments())[i]->str();
}
}
outputs_.insert(std::move(pair));
}
return outputs_;
}
std::map<std::string, std::vector<std::string>>* mutable_inputs() {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
return &inputs_;
}
std::map<std::string, std::vector<std::string>>* mutable_outputs() {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
return &outputs_;
}
......@@ -183,7 +205,7 @@ class OpDescView : public OpDescAPI {
}
std::vector<std::string> output_vars() const {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
return std::vector<std::string>();
}
......@@ -192,22 +214,18 @@ class OpDescView : public OpDescAPI {
}
const std::map<std::string, Any>& attrs() const {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
return attrs_;
}
const std::map<std::string, AttrType>& attr_types() const {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
return attr_types_;
}
private:
void NotImplemented() const {
LOG(FATAL) << "The additional interfaces of OpDescView is temporarily "
"unavailable in read-only mode.";
}
std::string type_;
std::map<std::string, std::vector<std::string>> inputs_;
std::map<std::string, std::vector<std::string>> outputs_;
mutable std::map<std::string, std::vector<std::string>> inputs_;
mutable std::map<std::string, std::vector<std::string>> outputs_;
std::map<std::string, Any> attrs_;
std::map<std::string, AttrType> attr_types_;
};
......
......@@ -66,7 +66,7 @@ class ProgramDescView : public ProgramDescAPI {
template <typename T>
T* GetBlock(int32_t idx) {
NotImplemented();
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
return nullptr;
}
......@@ -91,10 +91,6 @@ class ProgramDescView : public ProgramDescAPI {
private:
ProgramDescView& operator=(const ProgramDescView&) = delete;
ProgramDescView(const ProgramDescView&) = delete;
void NotImplemented() const {
LOG(FATAL) << "The additional interfaces of ProgramDescView is temporarily "
"unavailable in read-only mode.";
}
};
#ifdef LITE_WITH_FLATBUFFERS_DESC
......
......@@ -67,14 +67,12 @@ class VarDescView : public VarDescAPI {
public:
VarDescView() = default;
void SetDataType(Type data_type) { NotImplemented(); }
void SetShape(const std::vector<int64_t>& dims) { NotImplemented(); }
void SetDataType(Type data_type) { LITE_MODEL_INTERFACE_NOT_IMPLEMENTED; }
void SetShape(const std::vector<int64_t>& dims) {
LITE_MODEL_INTERFACE_NOT_IMPLEMENTED;
}
private:
void NotImplemented() const {
LOG(FATAL) << "The additional interfaces of VarDescView is temporarily "
"unavailable in read-only mode.";
}
std::vector<int64_t> shape_;
};
......
......@@ -546,64 +546,57 @@ void SaveCombinedParamsNaive(const std::string &path,
table.AppendToFile(path);
}
void SaveModelNaive(const std::string &model_dir,
////////////////////////////////////////////////////////////////////////////////////
// Save model: meta_version = 1
// Flatbuffer model + params
////////////////////////////////////////////////////////////////////////////////////
// Create a new file and write data into it.
void WriteToFile(const std::string &filename,
const void *src,
size_t byte_size) {
CHECK(src);
FILE *file = fopen(filename.c_str(), "wb");
CHECK(file);
CHECK(fwrite(src, sizeof(char), byte_size, file) == byte_size);
fclose(file);
}
// Append data into an existed file.
void AppendToFile(const std::string &filename,
const void *src,
size_t byte_size) {
CHECK(src);
FILE *fp = fopen(filename.c_str(), "ab");
CHECK(fp) << "Unable to open file: " << filename;
if (fwrite(reinterpret_cast<const char *>(src), 1, byte_size, fp) !=
byte_size) {
fclose(fp);
LOG(FATAL) << "Write file error: " << filename;
}
fclose(fp);
}
/* ---------- Flatbuffers ---------- */
void SaveModelNaive(const std::string &model_file,
const Scope &exec_scope,
const cpp::ProgramDesc &cpp_prog,
bool combined) {
// Save program
const std::string prog_path = model_dir + ".nb";
naive_buffer::BinaryTable table;
naive_buffer::proto::ProgramDesc nb_proto_prog(&table);
naive_buffer::ProgramDesc nb_prog(&nb_proto_prog);
TransformProgramDescCppToAny(cpp_prog, &nb_prog);
nb_proto_prog.Save();
const cpp::ProgramDesc &cpp_prog) {
/* 1. Save model to model.fbs */
const std::string prog_path = model_file + ".nb";
// Save meta_version(uint16) into file
naive_buffer::BinaryTable meta_version_table;
meta_version_table.Require(sizeof(uint16_t));
uint16_t meta_version = 0;
memcpy(meta_version_table.cursor(), &meta_version, sizeof(uint16_t));
meta_version_table.Consume(sizeof(uint16_t));
meta_version_table.SaveToFile(prog_path);
uint16_t meta_version = 1;
WriteToFile(prog_path, &meta_version, sizeof(uint16_t));
// Save lite_version(char[16]) into file
const int paddle_version_length = 16 * sizeof(char);
naive_buffer::BinaryTable paddle_version_table;
paddle_version_table.Require(paddle_version_length);
std::string paddle_version = version();
memcpy(paddle_version_table.cursor(),
paddle_version.c_str(),
paddle_version_length);
paddle_version_table.Consume(paddle_version_length);
paddle_version_table.AppendToFile(prog_path);
AppendToFile(prog_path, paddle_version.c_str(), paddle_version_length);
VLOG(4) << "paddle_version:" << paddle_version;
// Save topology_size(uint64) into file
naive_buffer::BinaryTable topology_size_table;
topology_size_table.Require(sizeof(uint64_t));
uint64_t topology_size = table.size();
memcpy(topology_size_table.cursor(), &topology_size, sizeof(uint64_t));
topology_size_table.Consume(sizeof(uint64_t));
topology_size_table.AppendToFile(prog_path);
// save topology data into model file
table.AppendToFile(prog_path);
// Save Params
SaveCombinedParamsNaive(prog_path, exec_scope, cpp_prog);
LOG(INFO) << "Save naive buffer model in '" << model_dir
<< ".nb' successfully";
}
/* ---------- Flatbuffers ---------- */
void SaveModelFbs(const std::string &model_dir,
const Scope &exec_scope,
const cpp::ProgramDesc &cpp_prog) {
/* 1. Save model to model.fbs */
const std::string prog_path = model_dir + "/model.fbs";
fbs::ProgramDesc fbs_prog;
TransformProgramDescCppToAny(cpp_prog, &fbs_prog);
fbs::SaveFile(prog_path, fbs_prog.data());
uint64_t topology_size = (fbs_prog.data()).size();
AppendToFile(prog_path, &topology_size, sizeof(uint64_t));
/* 1. Save model to model.fbs */
AppendToFile(prog_path, (fbs_prog.data()).data(), topology_size);
VLOG(4) << "save topology_size:" << topology_size;
/* 2. Get param names from cpp::ProgramDesc */
auto &main_block_desc = *cpp_prog.GetBlock<cpp::BlockDesc>(0);
......@@ -618,37 +611,14 @@ void SaveModelFbs(const std::string &model_dir,
}
/* 3. Save combined params to params.fbs */
const std::string params_path = model_dir + "/params.fbs";
fbs::CombinedParamsDesc params_prog;
fbs::SetCombinedParamsWithScope(exec_scope, unique_var_names, &params_prog);
fbs::SaveFile(params_path, params_prog.data());
}
#endif // LITE_ON_TINY_PUBLISH
void LoadModelFbsFromFile(const std::string &filename,
Scope *scope,
cpp::ProgramDesc *cpp_prog) {
CHECK(cpp_prog);
CHECK(scope);
/* 1. Load cpp::ProgramDesc with model.fbs */
const std::string prog_path = filename + "/model.fbs";
#ifdef LITE_ON_FLATBUFFERS_DESC_VIEW
cpp_prog->Init(fbs::LoadFile(prog_path));
#elif LITE_ON_TINY_PUBLISH
LOG(FATAL) << "Since no data structure of Flatbuffers has been constructed, "
"the model cannot be loaded.";
#else
fbs::ProgramDesc program(fbs::LoadFile(prog_path));
TransformProgramDescAnyToCpp(program, cpp_prog);
#endif
AppendToFile(
prog_path, (params_prog.data()).data(), (params_prog.data()).size());
/* 2. Load scope with params.fbs */
const std::string params_path = filename + "/params.fbs";
fbs::CombinedParamsDescView params(fbs::LoadFile(params_path));
fbs::SetScopeWithCombinedParams(scope, params);
LOG(INFO) << "Save naive buffer model in '" << prog_path << " successfully";
}
#endif // LITE_ON_TINY_PUBLISH
template <typename T>
void SetTensorDataNaive(T *out, size_t size, const std::vector<T> &src) {
CHECK(out);
......@@ -746,7 +716,10 @@ void LoadCombinedParamsNaive(const std::string &path,
<< "] not found";
}
}
///////////////////////////////////////////////////////////////////////////////
/* Old Method of loading and saving model, before V2.3.0 */
/* Warning: this is an old inference and will be abandened in release/v3.0.0 */
///////////////////////////////////////////////////////////////////////////////
void LoadModelNaive(const std::string &model_dir,
Scope *scope,
cpp::ProgramDesc *cpp_prog,
......@@ -802,6 +775,43 @@ void LoadModelNaive(const std::string &model_dir,
VLOG(4) << "Load naive buffer model in '" << model_dir << "' successfully";
}
void LoadModelNaiveFromMemory(const std::string &model_buffer,
const std::string &param_buffer,
Scope *scope,
cpp::ProgramDesc *cpp_prog) {
CHECK(cpp_prog);
CHECK(scope);
cpp_prog->ClearBlocks();
// Load model
naive_buffer::BinaryTable table;
table.LoadFromMemory(model_buffer.c_str(), model_buffer.length());
naive_buffer::proto::ProgramDesc nb_proto_prog(&table);
nb_proto_prog.Load();
naive_buffer::ProgramDesc nb_prog(&nb_proto_prog);
// Transform to cpp::ProgramDesc
TransformProgramDescAnyToCpp(nb_prog, cpp_prog);
// Load Params
LoadCombinedParamsNaive(param_buffer, 0, scope, *cpp_prog, true);
VLOG(4) << "Load model from naive buffer memory successfully";
}
//////////////////////////////////////////////////////////////////////
// usage: LoadModelNaiveFromFile is used for loading model from file.
template <typename T>
void ReadModelDataFromFile(T *data,
const std::string &prog_path,
uint64_t *offset,
const uint64_t &size) {
naive_buffer::BinaryTable data_table;
data_table.LoadFromFile(prog_path, *offset, size);
memcpy(data, data_table.cursor(), size);
*offset = *offset + size;
}
/*
* Binary structure of naive_buffer model: model.nb
* ----------------------------------------------------------
......@@ -820,18 +830,6 @@ void LoadModelNaive(const std::string &model_dir,
* param_data: contains model's params data.
*/
// usage: LoadModelNaiveFromFile is used for loading model from file.
template <typename T>
void ReadModelDataFromFile(T *data,
const std::string &prog_path,
uint64_t *offset,
const uint64_t &size) {
naive_buffer::BinaryTable data_table;
data_table.LoadFromFile(prog_path, *offset, size);
memcpy(data, data_table.cursor(), size);
*offset = *offset + size;
}
void LoadModelNaiveFromFile(const std::string &filename,
Scope *scope,
cpp::ProgramDesc *cpp_prog) {
......@@ -850,6 +848,36 @@ void LoadModelNaiveFromFile(const std::string &filename,
&meta_version, prog_path, &offset, sizeof(uint16_t));
VLOG(4) << "Meta_version:" << meta_version;
switch (meta_version) {
case 0:
LoadModelNaiveV0FromFile(filename, scope, cpp_prog);
break;
case 1:
LoadModelFbsFromFile(filename, scope, cpp_prog);
break;
default:
LOG(FATAL) << "Error, this model file is not supported.";
break;
}
}
void LoadModelNaiveV0FromFile(const std::string &filename,
Scope *scope,
cpp::ProgramDesc *cpp_prog) {
CHECK(cpp_prog);
CHECK(scope);
cpp_prog->ClearBlocks();
// ModelFile
const std::string prog_path = filename;
// Offset
uint64_t offset = 0;
// (1)get meta version
uint16_t meta_version;
ReadModelDataFromFile<uint16_t>(
&meta_version, prog_path, &offset, sizeof(uint16_t));
VLOG(4) << "Meta_version:" << meta_version;
// (2)get opt version
char opt_version[16];
const uint64_t opt_version_length = 16 * sizeof(char);
......@@ -890,34 +918,53 @@ void LoadModelNaiveFromFile(const std::string &filename,
VLOG(4) << "Load naive buffer model in '" << filename << "' successfully";
}
// warning: this is an old inference and is not suggested.
// todo: this inference will be abandened in release/v3.0.0
void LoadModelNaiveFromMemory(const std::string &model_buffer,
const std::string &param_buffer,
Scope *scope,
cpp::ProgramDesc *cpp_prog) {
void LoadModelFbsFromFile(const std::string &filename,
Scope *scope,
cpp::ProgramDesc *cpp_prog) {
CHECK(cpp_prog);
CHECK(scope);
cpp_prog->ClearBlocks();
// Offset
uint64_t offset = sizeof(uint16_t);
// Load model
naive_buffer::BinaryTable table;
table.LoadFromMemory(model_buffer.c_str(), model_buffer.length());
naive_buffer::proto::ProgramDesc nb_proto_prog(&table);
nb_proto_prog.Load();
naive_buffer::ProgramDesc nb_prog(&nb_proto_prog);
// get opt version
char opt_version[16];
const uint64_t opt_version_length = 16 * sizeof(char);
ReadModelDataFromFile<char>(
opt_version, filename, &offset, opt_version_length);
VLOG(4) << "Opt_version:" << static_cast<const char *>(opt_version);
// check version, opt's version should be consistent with current Paddle-Lite
// version.
const std::string paddle_version = version();
const std::string opt_version_str = opt_version;
if (paddle_version != opt_version_str) {
LOG(WARNING) << "warning: the version of opt that transformed this model "
"is not consistent with current Paddle-Lite version."
"\n version of opt:"
<< static_cast<const char *>(opt_version)
<< "\n version of current Paddle-Lite:" << paddle_version;
}
// (3)get topo_size
uint64_t topo_size;
ReadModelDataFromFile<uint64_t>(
&topo_size, filename, &offset, sizeof(uint64_t));
// Transform to cpp::ProgramDesc
TransformProgramDescAnyToCpp(nb_prog, cpp_prog);
#ifdef LITE_ON_FLATBUFFERS_DESC_VIEW
cpp_prog->Init(fbs::LoadFile(filename, offset, topo_size));
#elif LITE_ON_TINY_PUBLISH
LOG(FATAL) << "Since no data structure of Flatbuffers has been constructed, "
"the model cannot be loaded.";
#else
fbs::ProgramDesc program(fbs::LoadFile(filename, offset, topo_size));
TransformProgramDescAnyToCpp(program, cpp_prog);
#endif
offset = offset + topo_size;
// Load Params
// NOTE: Only main block be used now.
// only combined Params are supported in Loading Model from memory
LoadCombinedParamsNaive(param_buffer, 0, scope, *cpp_prog, true);
/* 2. Load scope from params.fbs */
fbs::CombinedParamsDescView params(fbs::LoadFile(filename, offset));
fbs::SetScopeWithCombinedParams(scope, params);
VLOG(4) << "Load model from naive buffer memory successfully";
VLOG(4) << "Load naive buffer model in '" << filename << "' successfully";
}
// usage: LoadModelNaiveFromMemory is used for loading naive model from memory
......@@ -931,6 +978,7 @@ void ReadModelDataFromBuffer(T *data,
memcpy(data, data_table.cursor(), size);
*offset = *offset + size;
}
void LoadModelNaiveFromMemory(const std::string &model_buffer,
Scope *scope,
cpp::ProgramDesc *cpp_prog) {
......@@ -938,14 +986,30 @@ void LoadModelNaiveFromMemory(const std::string &model_buffer,
CHECK(scope);
cpp_prog->ClearBlocks();
// Offset
uint64_t offset = 0;
// (1)get meta version
uint16_t meta_version;
ReadModelDataFromBuffer<uint16_t>(
&meta_version, model_buffer, &offset, sizeof(uint16_t));
VLOG(4) << "Meta_version:" << meta_version;
switch (meta_version) {
case 0:
LoadModelNaiveV0FromMemory(model_buffer, scope, cpp_prog);
break;
case 1:
LoadModelNaiveV1FromMemory(model_buffer, scope, cpp_prog);
break;
default:
LOG(FATAL) << "Error: Unsupported model type.";
break;
}
}
void LoadModelNaiveV0FromMemory(const std::string &model_buffer,
Scope *scope,
cpp::ProgramDesc *cpp_prog) {
// Offset
uint64_t offset = sizeof(uint16_t);
// (2)get opt version
char opt_version[16];
......@@ -977,5 +1041,52 @@ void LoadModelNaiveFromMemory(const std::string &model_buffer,
VLOG(4) << "Load model from naive buffer memory successfully";
}
///////////////////////////////////////////////////////////////////
// Meta_version=1
///////////////////////////////////////////////////////////////////
void LoadModelNaiveV1FromMemory(const std::string &model_buffer,
Scope *scope,
cpp::ProgramDesc *cpp_prog) {
// Offset
uint64_t offset = sizeof(uint16_t);
// (2)get opt version
char opt_version[16];
const uint64_t paddle_version_length = 16 * sizeof(char);
ReadModelDataFromBuffer<char>(
opt_version, model_buffer, &offset, paddle_version_length);
VLOG(4) << "Opt_version:" << static_cast<const char *>(opt_version);
// (3)get prog_size and prog_data
uint64_t prog_size;
ReadModelDataFromBuffer<uint64_t>(
&prog_size, model_buffer, &offset, sizeof(uint64_t));
VLOG(4) << "prog_size:" << prog_size;
std::vector<char> prog_data(prog_size);
memcpy(prog_data.data(), model_buffer.c_str() + offset, prog_size);
#ifdef LITE_ON_FLATBUFFERS_DESC_VIEW
cpp_prog->Init(prog_data);
#elif LITE_ON_TINY_PUBLISH
LOG(FATAL) << "Since no data structure of Flatbuffers has been constructed, "
"the model cannot be loaded.";
#else
fbs::ProgramDesc program(prog_data);
TransformProgramDescAnyToCpp(program, cpp_prog);
#endif
offset = offset + prog_size;
VLOG(4) << "param_size:" << model_buffer.length() - offset;
std::vector<char> params_data(model_buffer.length() - offset);
memcpy(params_data.data(),
model_buffer.c_str() + offset,
model_buffer.length() - offset);
fbs::CombinedParamsDescView params(params_data);
fbs::SetScopeWithCombinedParams(scope, params);
VLOG(4) << "Load model from naive buffer memory successfully";
}
} // namespace lite
} // namespace paddle
......@@ -35,6 +35,16 @@ namespace lite {
std::unique_ptr<framework::proto::ProgramDesc> LoadProgram(
const std::string& path, bool program_from_memory = false);
template <typename T>
void ReadModelDataFromFile(T* data,
const std::string& prog_path,
uint64_t* offset,
const uint64_t& size);
void AppendToFile(const std::string& filename,
const void* src,
size_t byte_size);
// Read a single file containing all the parameters.
void LoadParams(const std::string& path);
......@@ -86,14 +96,12 @@ void SaveCombinedParamsNaive(const std::string& path,
void SaveModelNaive(const std::string& model_dir,
const Scope& exec_scope,
const cpp::ProgramDesc& cpp_prog,
bool combined = true);
const cpp::ProgramDesc& cpp_prog);
void SaveModelFbs(const std::string& model_dir,
const Scope& exec_scope,
const cpp::ProgramDesc& cpp_prog);
#endif // LITE_ON_TINY_PUBLISH
void LoadModelFbsFromFile(const std::string& filename,
Scope* scope,
cpp::ProgramDesc* cpp_prog);
......@@ -108,6 +116,12 @@ void LoadModelNaive(const std::string& model_dir,
lite::Scope* scope,
cpp::ProgramDesc* prog,
bool combined = true);
void LoadModelFbsFromFile(const std::string& filename,
Scope* scope,
cpp::ProgramDesc* cpp_prog);
void LoadModelNaiveV0FromFile(const std::string& filename,
Scope* scope,
cpp::ProgramDesc* cpp_prog);
void LoadModelNaiveFromFile(const std::string& filename,
lite::Scope* scope,
cpp::ProgramDesc* prog);
......@@ -118,6 +132,15 @@ void LoadModelNaiveFromMemory(const std::string& model_buffer,
void LoadModelNaiveFromMemory(const std::string& model_buffer,
lite::Scope* scope,
cpp::ProgramDesc* cpp_prog);
void LoadModelNaiveV1FromMemory(const std::string& model_buffer,
Scope* scope,
cpp::ProgramDesc* cpp_prog);
void LoadModelFbsFromMemory(const std::string& model_buffer,
lite::Scope* scope,
cpp::ProgramDesc* cpp_prog);
void LoadModelNaiveV0FromMemory(const std::string& model_buffer,
Scope* scope,
cpp::ProgramDesc* cpp_prog);
} // namespace lite
} // namespace paddle
......@@ -21,7 +21,6 @@ DEFINE_string(model_dir, "", "");
namespace paddle {
namespace lite {
TEST(ModelParser, LoadProgram) {
CHECK(!FLAGS_model_dir.empty());
auto program = LoadProgram(FLAGS_model_dir + "/__model__");
......@@ -117,7 +116,7 @@ TEST(ModelParser, SaveModelNaive) {
cpp::ProgramDesc prog;
Scope scope;
LoadModelPb(FLAGS_model_dir, "", "", &scope, &prog);
const std::string save_pb_model_path = FLAGS_model_dir + ".saved.naive";
const std::string save_pb_model_path = FLAGS_model_dir + ".saved";
SaveModelNaive(save_pb_model_path, scope, prog);
}
......@@ -126,7 +125,7 @@ TEST(ModelParser, LoadModelNaiveFromFile) {
cpp::ProgramDesc prog;
Scope scope;
auto model_path = std::string(FLAGS_model_dir) + ".saved.naive.nb";
auto model_path = std::string(FLAGS_model_dir) + ".saved.nb";
LoadModelNaiveFromFile(model_path, &scope, &prog);
}
......@@ -135,7 +134,7 @@ TEST(ModelParser, LoadModelNaiveFromMemory) {
cpp::ProgramDesc prog;
Scope scope;
auto model_path = std::string(FLAGS_model_dir) + ".saved.naive.nb";
auto model_path = std::string(FLAGS_model_dir) + ".saved.nb";
std::string model_buffer = lite::ReadFile(model_path);
LoadModelNaiveFromMemory(model_buffer, &scope, &prog);
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册