未验证 提交 8de02e61 编写于 作者: 石晓伟 提交者: GitHub

add build macros for flatbuffers, test=develop (#4142)

* add build macros for flatbuffers, test=develop
上级 d2b70c3c
......@@ -198,8 +198,15 @@ if (LITE_WITH_EXCEPTION)
add_definitions("-DLITE_WITH_EXCEPTION")
endif()
if (LITE_ON_FLATBUFFERS_DESC_VIEW)
add_definitions("-DLITE_ON_FLATBUFFERS_DESC_VIEW")
message(STATUS "Flatbuffers will be used as cpp default program description.")
endif()
if (LITE_ON_TINY_PUBLISH)
add_definitions("-DLITE_ON_TINY_PUBLISH")
else()
add_definitions("-DLITE_WITH_FLATBUFFERS_DESC")
endif()
if (LITE_ON_MODEL_OPTIMIZE_TOOL)
......
......@@ -15,12 +15,12 @@
#include "lite/model_parser/compatible_pb.h"
#include <string>
#include <vector>
#include "lite/model_parser/flatbuffers/program_desc.h"
#include "lite/model_parser/naive_buffer/block_desc.h"
#include "lite/model_parser/naive_buffer/op_desc.h"
#include "lite/model_parser/naive_buffer/program_desc.h"
#include "lite/model_parser/naive_buffer/var_desc.h"
#ifndef LITE_ON_TINY_PUBLISH
#include "lite/model_parser/flatbuffers/program_desc.h"
#include "lite/model_parser/pb/block_desc.h"
#include "lite/model_parser/pb/op_desc.h"
#include "lite/model_parser/pb/program_desc.h"
......@@ -56,6 +56,17 @@ void TransformVarDescAnyToCpp<pb::VarDesc>(const pb::VarDesc &any_desc,
cpp_desc->SetShape(any_desc.GetShape());
}
}
template <>
void TransformVarDescAnyToCpp<fbs::VarDesc>(const fbs::VarDesc &any_desc,
cpp::VarDesc *cpp_desc) {
cpp_desc->SetName(any_desc.Name());
cpp_desc->SetType(any_desc.GetType());
cpp_desc->SetPersistable(any_desc.Persistable());
if (any_desc.Name() != "feed" && any_desc.Name() != "fetch") {
cpp_desc->SetDataType(any_desc.GetDataType());
cpp_desc->SetShape(any_desc.GetShape());
}
}
#endif
template <>
......@@ -74,18 +85,6 @@ void TransformVarDescAnyToCpp<naive_buffer::VarDesc>(
}*/
}
template <>
void TransformVarDescAnyToCpp<fbs::VarDesc>(const fbs::VarDesc &any_desc,
cpp::VarDesc *cpp_desc) {
cpp_desc->SetName(any_desc.Name());
cpp_desc->SetType(any_desc.GetType());
cpp_desc->SetPersistable(any_desc.Persistable());
if (any_desc.Name() != "feed" && any_desc.Name() != "fetch") {
cpp_desc->SetDataType(any_desc.GetDataType());
cpp_desc->SetShape(any_desc.GetShape());
}
}
/// For OpDesc transform
template <typename OpDescType>
void OpInputsAnyToCpp(const OpDescType &any_desc, cpp::OpDesc *cpp_desc) {
......@@ -318,12 +317,11 @@ TRANS_OP_ANY_WITH_CPP_IMPL(naive_buffer::OpDesc);
TRANS_BLOCK_ANY_WITH_CPP_IMPL(OpDesc, VarDesc, naive_buffer, naive_buffer);
TRANS_PROGRAM_ANY_WITH_CPP_IMPL(BlockDesc, naive_buffer, naive_buffer);
#ifndef LITE_ON_TINY_PUBLISH
TRANS_VAR_ANY_WITH_CPP_IMPL(fbs::VarDesc);
TRANS_OP_ANY_WITH_CPP_IMPL(fbs::OpDesc);
TRANS_BLOCK_ANY_WITH_CPP_IMPL(OpDescT, VarDescT, fbs, fbs);
TRANS_PROGRAM_ANY_WITH_CPP_IMPL(BlockDescT, fbs, fbs);
#ifndef LITE_ON_TINY_PUBLISH
TRANS_VAR_ANY_WITH_CPP_IMPL(pb::VarDesc);
TRANS_OP_ANY_WITH_CPP_IMPL(pb::OpDesc);
TRANS_BLOCK_ANY_WITH_CPP_IMPL(OpDesc, VarDesc, pb, framework);
......
......@@ -14,13 +14,29 @@
#pragma once
#ifdef LITE_ON_FLATBUFFERS_DESC_VIEW
#include "lite/model_parser/flatbuffers/block_desc.h"
#include "lite/model_parser/flatbuffers/op_desc.h"
#include "lite/model_parser/flatbuffers/program_desc.h"
#include "lite/model_parser/flatbuffers/var_desc.h"
namespace paddle {
namespace lite {
namespace cpp {
using ProgramDesc = fbs::ProgramDescView;
using BlockDesc = fbs::BlockDescView;
using OpDesc = fbs::OpDescView;
using VarDesc = fbs::VarDescView;
}
}
}
#else
#include "lite/model_parser/general/block_desc.h"
#include "lite/model_parser/general/op_desc.h"
#include "lite/model_parser/general/program_desc.h"
#include "lite/model_parser/general/var_desc.h"
namespace paddle {
namespace lite {
namespace cpp = general;
}
}
#endif // LITE_ON_FLATBUFFERS_DESC_VIEW
......@@ -43,6 +43,7 @@ OpDescView const* BlockDescView::GetOp<OpDescView>(int32_t idx) const {
return &ops_[idx];
}
#ifdef LITE_WITH_FLATBUFFERS_DESC
template <>
proto::VarDescT* BlockDesc::GetVar<proto::VarDescT>(int32_t idx) {
CHECK_LT(idx, static_cast<int32_t>(VarsSize())) << "idx >= vars.size()";
......@@ -68,6 +69,7 @@ proto::OpDescT* BlockDesc::AddOp<proto::OpDescT>() {
SyncOps();
return ops_.back()->raw_desc();
}
#endif // LITE_WITH_FLATBUFFERS_DESC
} // namespace fbs
} // namespace lite
......
......@@ -90,6 +90,7 @@ class BlockDescView : public BlockDescAPI {
}
};
#ifdef LITE_WITH_FLATBUFFERS_DESC
class BlockDesc : public BlockDescAPI {
public:
BlockDesc() : owned_(true), desc_(new proto::BlockDescT()) {}
......@@ -170,6 +171,7 @@ class BlockDesc : public BlockDescAPI {
std::vector<std::unique_ptr<VarDesc>> vars_;
std::vector<std::unique_ptr<OpDesc>> ops_;
};
#endif // LITE_WITH_FLATBUFFERS_DESC
} // namespace fbs
} // namespace lite
......
......@@ -39,6 +39,7 @@ void set_tensor(paddle::lite::Tensor* tensor,
}
} // namespace
#ifdef LITE_WITH_FLATBUFFERS_DESC
TEST(CombinedParamsDesc, Scope) {
/* --------- Save scope ---------- */
Scope scope;
......@@ -81,6 +82,7 @@ TEST(CombinedParamsDesc, Scope) {
/* --------- View scope ---------- */
check_params(CombinedParamsDescView(std::move(cache)));
}
#endif // LITE_WITH_FLATBUFFERS_DESC
} // namespace fbs
} // namespace lite
......
......@@ -83,6 +83,7 @@ GET_ATTRS_IMPL(std::vector<int64_t>, longs);
#undef GET_ATTR_IMPL
#undef GET_ATTRS_IMPL
#ifdef LITE_WITH_FLATBUFFERS_DESC
#define ATTR_IMPL(T, fb_f__) \
template <> \
T OpDesc::GetAttr<T>(const std::string& name) const { \
......@@ -109,6 +110,7 @@ ATTR_IMPL(std::vector<float>, floats);
ATTR_IMPL(std::vector<int64_t>, longs);
ATTR_IMPL(std::vector<std::string>, strings);
#undef GET_ATTRS_IMPL
#endif // LITE_WITH_FLATBUFFERS_DESC
} // namespace fbs
} // namespace lite
......
......@@ -205,6 +205,7 @@ class OpDescView : public OpDescAPI {
std::map<std::string, AttrType> attr_types_;
};
#ifdef LITE_WITH_FLATBUFFERS_DESC
class OpDesc : public OpDescAPI {
public:
OpDesc() : owned_(true), desc_(new proto::OpDescT()) {}
......@@ -291,6 +292,7 @@ class OpDesc : public OpDescAPI {
bool owned_{false};
proto::OpDescT* desc_{nullptr};
};
#endif // LITE_WITH_FLATBUFFERS_DESC
} // namespace fbs
} // namespace lite
......
......@@ -106,6 +106,7 @@ class CombinedParamsDescView : public CombinedParamsDescReadAPI {
proto::CombinedParamsDesc const* desc_;
};
#ifdef LITE_WITH_FLATBUFFERS_DESC
class ParamDesc : public ParamDescAPI {
public:
ParamDesc() : owned_(true), desc_(new proto::ParamDescT()) {
......@@ -218,6 +219,7 @@ class CombinedParamsDesc : public CombinedParamsDescAPI {
proto::CombinedParamsDescT desc_;
std::vector<std::unique_ptr<ParamDesc>> params_;
};
#endif // LITE_WITH_FLATBUFFERS_DESC
} // namespace fbs
} // namespace lite
......
......@@ -32,6 +32,7 @@ BlockDescView const* ProgramDescView::GetBlock<BlockDescView>(
return &blocks_[idx];
}
#ifdef LITE_WITH_FLATBUFFERS_DESC
template <>
proto::BlockDescT* ProgramDesc::GetBlock<proto::BlockDescT>(int32_t idx) {
CHECK_LT(idx, static_cast<int32_t>(BlocksSize())) << "idx >= vars.size()";
......@@ -45,6 +46,7 @@ proto::BlockDescT* ProgramDesc::AddBlock<proto::BlockDescT>() {
SyncBlocks();
return blocks_.back()->raw_desc();
}
#endif // LITE_WITH_FLATBUFFERS_DESC
} // namespace fbs
} // namespace lite
......
......@@ -97,6 +97,7 @@ class ProgramDescView : public ProgramDescAPI {
}
};
#ifdef LITE_WITH_FLATBUFFERS_DESC
class ProgramDesc : public ProgramDescAPI {
public:
ProgramDesc() = default;
......@@ -169,6 +170,7 @@ class ProgramDesc : public ProgramDescAPI {
proto::ProgramDescT desc_;
std::vector<std::unique_ptr<BlockDesc>> blocks_;
};
#endif // LITE_WITH_FLATBUFFERS_DESC
} // namespace fbs
} // namespace lite
......
......@@ -21,16 +21,18 @@ namespace paddle {
namespace lite {
namespace fbs {
TEST(ProgramDesc, LoadTest) {
ProgramDesc program(test::GenerateProgramCache());
test::CheckProgramCache(&program);
}
#ifdef LITE_WITH_FLATBUFFERS_DESC
TEST(ProgramDescView, LoadTest) {
const ProgramDescView program(test::GenerateProgramCache());
test::CheckProgramCache(program);
}
TEST(ProgramDesc, LoadTest) {
ProgramDesc program(test::GenerateProgramCache());
test::CheckProgramCache(&program);
}
#endif // LITE_WITH_FLATBUFFERS_DESC
} // namespace fbs
} // namespace lite
} // namespace paddle
......@@ -21,6 +21,7 @@ namespace paddle {
namespace lite {
namespace fbs {
namespace test {
#ifdef LITE_WITH_FLATBUFFERS_DESC
inline std::vector<char> GenerateProgramCache() {
/* --------- Set Program --------- */
ProgramDesc program;
......@@ -144,6 +145,7 @@ inline void CheckProgramCache(const ProgramDescView& program) {
CHECK_EQ(op_b0.GetAttr<bool>("Attr1"), true);
CHECK_EQ(op_b0.HasAttr("Attr4"), false);
}
#endif // LITE_WITH_FLATBUFFERS_DESC
} // namespace test
} // namespace fbs
......
......@@ -78,6 +78,7 @@ class VarDescView : public VarDescAPI {
std::vector<int64_t> shape_;
};
#ifdef LITE_WITH_FLATBUFFERS_DESC
class VarDesc : public VarDescAPI {
public:
VarDesc() : owned_(true), desc_(new proto::VarDescT()) {}
......@@ -143,6 +144,7 @@ class VarDesc : public VarDescAPI {
proto::VarDescT* desc_{nullptr};
paddle::lite::fbs::proto::VarTypeT* type_{nullptr};
};
#endif // LITE_WITH_FLATBUFFERS_DESC
} // namespace fbs
} // namespace lite
......
......@@ -23,12 +23,12 @@
#include "lite/core/variable.h"
#include "lite/core/version.h"
#include "lite/model_parser/base/apis.h"
#include "lite/model_parser/flatbuffers/io.h"
#include "lite/model_parser/naive_buffer/combined_params_desc.h"
#include "lite/model_parser/naive_buffer/param_desc.h"
#include "lite/model_parser/naive_buffer/program_desc.h"
#include "lite/model_parser/naive_buffer/var_desc.h"
#ifndef LITE_ON_TINY_PUBLISH
#include "lite/model_parser/flatbuffers/io.h"
#include "lite/model_parser/pb/program_desc.h"
#include "lite/model_parser/pb/var_desc.h"
#endif
......@@ -623,6 +623,7 @@ void SaveModelFbs(const std::string &model_dir,
fbs::SetCombinedParamsWithScope(exec_scope, unique_var_names, &params_prog);
fbs::SaveFile(params_path, params_prog.data(), params_prog.buf_size());
}
#endif // LITE_ON_TINY_PUBLISH
void LoadModelFbsFromFile(const std::string &filename,
Scope *scope,
......@@ -630,19 +631,24 @@ void LoadModelFbsFromFile(const std::string &filename,
CHECK(cpp_prog);
CHECK(scope);
/* 1. Save cpp::ProgramDesc with model.fbs */
/* 1. Load cpp::ProgramDesc with model.fbs */
const std::string prog_path = filename + "/model.fbs";
#ifdef LITE_ON_FLATBUFFERS_DESC_VIEW
cpp_prog->Init(fbs::LoadFile(prog_path));
#elif LITE_ON_TINY_PUBLISH
LOG(FATAL) << "Since no data structure of Flatbuffers has been constructed, "
"the model cannot be loaded.";
#else
fbs::ProgramDesc program(fbs::LoadFile(prog_path));
TransformProgramDescAnyToCpp(program, cpp_prog);
#endif
/* 2. Save scope with params.fbs */
/* 2. Load scope with params.fbs */
const std::string params_path = filename + "/params.fbs";
fbs::CombinedParamsDesc params(fbs::LoadFile(params_path));
fbs::CombinedParamsDescView params(fbs::LoadFile(params_path));
fbs::SetScopeWithCombinedParams(scope, params);
}
#endif // LITE_ON_TINY_PUBLISH
template <typename T>
void SetTensorDataNaive(T *out, size_t size, const std::vector<T> &src) {
CHECK(out);
......
......@@ -92,11 +92,11 @@ void SaveModelNaive(const std::string& model_dir,
void SaveModelFbs(const std::string& model_dir,
const Scope& exec_scope,
const cpp::ProgramDesc& cpp_prog);
#endif // LITE_ON_TINY_PUBLISH
void LoadModelFbsFromFile(const std::string& filename,
Scope* scope,
cpp::ProgramDesc* cpp_prog);
#endif // LITE_ON_TINY_PUBLISH
void LoadParamNaive(const std::string& path,
lite::Scope* scope,
......
......@@ -37,6 +37,7 @@ WITH_HUAWEI_ASCEND_NPU=OFF # Huawei Ascend Builder/Runtime Libs on X86 host
# default installation path, ensure acllib/atc/opp directories are all in this root dir
HUAWEI_ASCEND_NPU_DDK_ROOT="/usr/local/Ascend/ascend-toolkit/latest/x86_64-linux_gcc4.8.5"
PYTHON_EXECUTABLE_OPTION=""
ENABLE_FLATBUFFERS_DESC_VIEW=OFF
readonly THIRDPARTY_TAR=https://paddle-inference-dist.bj.bcebos.com/PaddleLite/third-party-05b862.tar.gz
......@@ -146,7 +147,8 @@ function make_tiny_publish_so {
-DAPU_DDK_ROOT=$APU_DDK_ROOT \
-DLITE_WITH_RKNPU=$BUILD_RKNPU \
-DRKNPU_DDK_ROOT=$RKNPU_DDK_ROOT \
-DARM_TARGET_OS=${os} -DARM_TARGET_ARCH_ABI=${abi} -DARM_TARGET_LANG=${lang}
-DARM_TARGET_OS=${os} -DARM_TARGET_ARCH_ABI=${abi} -DARM_TARGET_LANG=${lang} \
-DLITE_ON_FLATBUFFERS_DESC_VIEW=${ENABLE_FLATBUFFERS_DESC_VIEW}
make publish_inference -j$NUM_PROC
cd - > /dev/null
......@@ -578,6 +580,10 @@ function main {
HUAWEI_ASCEND_NPU_DDK_ROOT="${i#*=}"
shift
;;
--enable_flatbuffers_view=*)
ENABLE_FLATBUFFERS_DESC_VIEW="${i#*=}"
shift
;;
tiny_publish)
make_tiny_publish_so $ARM_OS $ARM_ABI $ARM_LANG $ANDROID_STL
shift
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册