未验证 提交 784f75fa 编写于 作者: 石晓伟 提交者: GitHub

[cherry-pick] not build naive_buffer when build light framework, test=develop (#4315)

* [cherry-pick] not build naive_buffer when build light framework, test=develop (#4251)

* fix opt fatal errors, test=develop (#4293)

* fix cmake dependencies, test=develop
上级 bd4fd8d3
......@@ -199,13 +199,10 @@ if (LITE_WITH_EXCEPTION)
add_definitions("-DLITE_WITH_EXCEPTION")
endif()
if (LITE_ON_FLATBUFFERS_DESC_VIEW)
add_definitions("-DLITE_ON_FLATBUFFERS_DESC_VIEW")
message(STATUS "Flatbuffers will be used as cpp default program description.")
endif()
if (LITE_ON_TINY_PUBLISH)
add_definitions("-DLITE_ON_TINY_PUBLISH")
add_definitions("-DLITE_ON_FLATBUFFERS_DESC_VIEW")
message(STATUS "Flatbuffers will be used as cpp default program description.")
else()
add_definitions("-DLITE_WITH_FLATBUFFERS_DESC")
endif()
......
......@@ -267,6 +267,10 @@ function(cc_library TARGET_NAME)
list(REMOVE_ITEM cc_library_DEPS warpctc)
add_dependencies(${TARGET_NAME} warpctc)
endif()
if("${cc_library_DEPS};" MATCHES "fbs_headers;")
list(REMOVE_ITEM cc_library_DEPS fbs_headers)
add_dependencies(${TARGET_NAME} fbs_headers)
endif()
# Only deps libmklml.so, not link
if("${cc_library_DEPS};" MATCHES "mklml;")
list(REMOVE_ITEM cc_library_DEPS mklml)
......
......@@ -17,7 +17,6 @@ if (NOT LITE_ON_TINY_PUBLISH)
# Unlike static library, module library has to link target to be able to work
# as a single .so lib.
target_link_libraries(paddle_lite_jni ${lib_DEPS} ${arm_kernels} ${npu_kernels})
add_dependencies(paddle_lite_jni fbs_headers)
if (LITE_WITH_NPU)
# Strips the symbols of our protobuf functions to fix the conflicts during
# loading HIAI builder libs (libhiai_ir.so and libhiai_ir_build.so)
......
......@@ -46,7 +46,6 @@ void LightPredictor::Build(const std::string& model_dir,
case lite_api::LiteModelType::kProtobuf:
LoadModelPb(model_dir, "", "", scope_.get(), program_desc_.get());
break;
#endif
case lite_api::LiteModelType::kNaiveBuffer: {
if (model_from_memory) {
LoadModelNaiveFromMemory(
......@@ -56,6 +55,7 @@ void LightPredictor::Build(const std::string& model_dir,
}
break;
}
#endif
default:
LOG(FATAL) << "Unknown model type";
}
......
......@@ -9,7 +9,7 @@ if(WIN32)
target_link_libraries(lite_pybind ${os_dependency_modules})
else()
lite_cc_library(lite_pybind SHARED SRCS pybind.cc DEPS ${PYBIND_DEPS})
target_sources(lite_pybind PUBLIC ${__lite_cc_files})
target_sources(lite_pybind PUBLIC ${__lite_cc_files} fbs_headers)
endif(WIN32)
if (LITE_ON_TINY_PUBLISH)
......
......@@ -2,7 +2,7 @@ if (WITH_TESTING)
lite_cc_library(lite_gtest_main SRCS lite_gtest_main.cc DEPS gtest gflags)
endif()
lite_cc_library(target_wrapper SRCS target_wrapper.cc
DEPS target_wrapper_host place
DEPS target_wrapper_host place fbs_headers
X86_DEPS target_wrapper_x86
CUDA_DEPS target_wrapper_cuda
XPU_DEPS target_wrapper_xpu
......
......@@ -11,16 +11,15 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "lite/model_parser/compatibility.h"
#ifndef LITE_ON_TINY_PUBLISH
#include "lite/core/type_system.h"
#include "lite/model_parser/cpp_desc.h"
#include "lite/model_parser/naive_buffer/block_desc.h"
#include "lite/model_parser/naive_buffer/op_desc.h"
#include "lite/model_parser/naive_buffer/program_desc.h"
#include "lite/model_parser/naive_buffer/var_desc.h"
#ifndef LITE_ON_TINY_PUBLISH
#include "lite/model_parser/cpp_desc.h"
#endif
namespace paddle {
......
......@@ -15,12 +15,12 @@
#include "lite/model_parser/compatible_pb.h"
#include <string>
#include <vector>
#ifndef LITE_ON_TINY_PUBLISH
#include "lite/model_parser/flatbuffers/program_desc.h"
#include "lite/model_parser/naive_buffer/block_desc.h"
#include "lite/model_parser/naive_buffer/op_desc.h"
#include "lite/model_parser/naive_buffer/program_desc.h"
#include "lite/model_parser/naive_buffer/var_desc.h"
#ifndef LITE_ON_TINY_PUBLISH
#include "lite/model_parser/flatbuffers/program_desc.h"
#include "lite/model_parser/pb/block_desc.h"
#include "lite/model_parser/pb/op_desc.h"
#include "lite/model_parser/pb/program_desc.h"
......@@ -67,7 +67,6 @@ void TransformVarDescAnyToCpp<fbs::VarDesc>(const fbs::VarDesc &any_desc,
cpp_desc->SetShape(any_desc.GetShape());
}
}
#endif
template <>
void TransformVarDescAnyToCpp<naive_buffer::VarDesc>(
......@@ -84,7 +83,7 @@ void TransformVarDescAnyToCpp<naive_buffer::VarDesc>(
cpp_desc->SetShape(any_desc.GetShape());
}*/
}
#endif
/// For OpDesc transform
template <typename OpDescType>
void OpInputsAnyToCpp(const OpDescType &any_desc, cpp::OpDesc *cpp_desc) {
......@@ -312,12 +311,11 @@ void OpAttrsCppToAny(const cpp::OpDesc &cpp_desc, OpDescType *any_desc) {
} \
}
#ifndef LITE_ON_TINY_PUBLISH
TRANS_VAR_ANY_WITH_CPP_IMPL(naive_buffer::VarDesc);
TRANS_OP_ANY_WITH_CPP_IMPL(naive_buffer::OpDesc);
TRANS_BLOCK_ANY_WITH_CPP_IMPL(OpDesc, VarDesc, naive_buffer, naive_buffer);
TRANS_PROGRAM_ANY_WITH_CPP_IMPL(BlockDesc, naive_buffer, naive_buffer);
#ifndef LITE_ON_TINY_PUBLISH
TRANS_VAR_ANY_WITH_CPP_IMPL(fbs::VarDesc);
TRANS_OP_ANY_WITH_CPP_IMPL(fbs::OpDesc);
TRANS_BLOCK_ANY_WITH_CPP_IMPL(OpDescT, VarDescT, fbs, fbs);
......
......@@ -48,7 +48,7 @@ class ProgramDescView : public ProgramDescAPI {
void InitProgramDesc() {
desc_ = proto::GetProgramDesc(buf_.data());
blocks_.resize(BlocksSize());
blocks_.resize(desc_->blocks()->size());
for (size_t idx = 0; idx < BlocksSize(); ++idx) {
blocks_[idx] = BlockDescView(desc_->blocks()->Get(idx));
}
......@@ -59,7 +59,7 @@ class ProgramDescView : public ProgramDescAPI {
Init(buf_);
}
size_t BlocksSize() const override { return desc_->blocks()->size(); }
size_t BlocksSize() const override { return blocks_.size(); }
template <typename T>
T const* GetBlock(int32_t idx) const;
......
......@@ -24,11 +24,11 @@
#include "lite/core/version.h"
#include "lite/model_parser/base/apis.h"
#include "lite/model_parser/flatbuffers/io.h"
#ifndef LITE_ON_TINY_PUBLISH
#include "lite/model_parser/naive_buffer/combined_params_desc.h"
#include "lite/model_parser/naive_buffer/param_desc.h"
#include "lite/model_parser/naive_buffer/program_desc.h"
#include "lite/model_parser/naive_buffer/var_desc.h"
#ifndef LITE_ON_TINY_PUBLISH
#include "lite/model_parser/pb/program_desc.h"
#include "lite/model_parser/pb/var_desc.h"
#endif
......@@ -618,7 +618,7 @@ void SaveModelNaive(const std::string &model_file,
LOG(INFO) << "Save naive buffer model in '" << prog_path << " successfully";
}
#endif // LITE_ON_TINY_PUBLISH
template <typename T>
void SetTensorDataNaive(T *out, size_t size, const std::vector<T> &src) {
CHECK(out);
......@@ -716,6 +716,7 @@ void LoadCombinedParamsNaive(const std::string &path,
<< "] not found";
}
}
///////////////////////////////////////////////////////////////////////////////
/* Old Method of loading and saving model, before V2.3.0 */
/* Warning: this is an old inference and will be abandened in release/v3.0.0 */
......@@ -799,6 +800,7 @@ void LoadModelNaiveFromMemory(const std::string &model_buffer,
VLOG(4) << "Load model from naive buffer memory successfully";
}
#endif // LITE_ON_TINY_PUBLISH
//////////////////////////////////////////////////////////////////////
// usage: LoadModelNaiveFromFile is used for loading model from file.
......@@ -807,9 +809,8 @@ void ReadModelDataFromFile(T *data,
const std::string &prog_path,
uint64_t *offset,
const uint64_t &size) {
naive_buffer::BinaryTable data_table;
data_table.LoadFromFile(prog_path, *offset, size);
memcpy(data, data_table.cursor(), size);
std::vector<char> prog_data = lite::fbs::LoadFile(prog_path, *offset, size);
memcpy(data, prog_data.data(), size);
*offset = *offset + size;
}
/*
......@@ -835,7 +836,6 @@ void LoadModelNaiveFromFile(const std::string &filename,
cpp::ProgramDesc *cpp_prog) {
CHECK(cpp_prog);
CHECK(scope);
cpp_prog->ClearBlocks();
// ModelFile
const std::string prog_path = filename;
......@@ -850,7 +850,11 @@ void LoadModelNaiveFromFile(const std::string &filename,
switch (meta_version) {
case 0:
#ifndef LITE_ON_TINY_PUBLISH
LoadModelNaiveV0FromFile(filename, scope, cpp_prog);
#else
LOG(FATAL) << "Error, this model file is not supported.";
#endif
break;
case 1:
LoadModelFbsFromFile(filename, scope, cpp_prog);
......@@ -860,6 +864,7 @@ void LoadModelNaiveFromFile(const std::string &filename,
break;
}
}
#ifndef LITE_ON_TINY_PUBLISH
void LoadModelNaiveV0FromFile(const std::string &filename,
Scope *scope,
cpp::ProgramDesc *cpp_prog) {
......@@ -917,13 +922,13 @@ void LoadModelNaiveV0FromFile(const std::string &filename,
VLOG(4) << "Load naive buffer model in '" << filename << "' successfully";
}
#endif // LITE_ON_TINY_PUBLISH
void LoadModelFbsFromFile(const std::string &filename,
Scope *scope,
cpp::ProgramDesc *cpp_prog) {
CHECK(cpp_prog);
CHECK(scope);
cpp_prog->ClearBlocks();
CHECK_EQ(cpp_prog->BlocksSize(), 0);
// Offset
uint64_t offset = sizeof(uint16_t);
......@@ -973,9 +978,7 @@ void ReadModelDataFromBuffer(T *data,
const std::string &model_buffer,
uint64_t *offset,
const uint64_t &size) {
naive_buffer::BinaryTable data_table;
data_table.LoadFromMemory(model_buffer.c_str() + *offset, size);
memcpy(data, data_table.cursor(), size);
memcpy(data, model_buffer.c_str() + *offset, size);
*offset = *offset + size;
}
......@@ -994,7 +997,11 @@ void LoadModelNaiveFromMemory(const std::string &model_buffer,
VLOG(4) << "Meta_version:" << meta_version;
switch (meta_version) {
case 0:
#ifndef LITE_ON_TINY_PUBLISH
LoadModelNaiveV0FromMemory(model_buffer, scope, cpp_prog);
#else
LOG(FATAL) << "Error: Unsupported model type.";
#endif
break;
case 1:
LoadModelNaiveV1FromMemory(model_buffer, scope, cpp_prog);
......@@ -1004,7 +1011,7 @@ void LoadModelNaiveFromMemory(const std::string &model_buffer,
break;
}
}
#ifndef LITE_ON_TINY_PUBLISH
void LoadModelNaiveV0FromMemory(const std::string &model_buffer,
Scope *scope,
cpp::ProgramDesc *cpp_prog) {
......@@ -1040,7 +1047,7 @@ void LoadModelNaiveV0FromMemory(const std::string &model_buffer,
VLOG(4) << "Load model from naive buffer memory successfully";
}
#endif
///////////////////////////////////////////////////////////////////
// Meta_version=1
///////////////////////////////////////////////////////////////////
......
......@@ -21,11 +21,11 @@
#include <vector>
#ifndef LITE_ON_TINY_PUBLISH
#include "lite/core/framework.pb.h"
#include "lite/model_parser/naive_buffer/proto/framework.nb.h"
#endif
#include "lite/core/scope.h"
#include "lite/core/variable.h"
#include "lite/model_parser/compatible_pb.h"
#include "lite/model_parser/naive_buffer/proto/framework.nb.h"
namespace paddle {
namespace lite {
......@@ -101,45 +101,39 @@ void SaveModelNaive(const std::string& model_dir,
void SaveModelFbs(const std::string& model_dir,
const Scope& exec_scope,
const cpp::ProgramDesc& cpp_prog);
#endif // LITE_ON_TINY_PUBLISH
void LoadModelFbsFromFile(const std::string& filename,
Scope* scope,
cpp::ProgramDesc* cpp_prog);
void LoadParamNaive(const std::string& path,
lite::Scope* scope,
const std::string& name);
// warning:this old inference will be abandened in release/v3.0.0
// and LoadModelNaiveFromFile is suggested.
void LoadModelNaive(const std::string& model_dir,
lite::Scope* scope,
cpp::ProgramDesc* prog,
bool combined = true);
void LoadModelFbsFromFile(const std::string& filename,
Scope* scope,
cpp::ProgramDesc* cpp_prog);
void LoadModelNaiveV0FromFile(const std::string& filename,
Scope* scope,
cpp::ProgramDesc* cpp_prog);
void LoadModelNaiveFromFile(const std::string& filename,
lite::Scope* scope,
cpp::ProgramDesc* prog);
void LoadModelNaiveFromMemory(const std::string& model_buffer,
const std::string& param_buffer,
lite::Scope* scope,
cpp::ProgramDesc* cpp_prog);
void LoadModelNaiveFromMemory(const std::string& model_buffer,
lite::Scope* scope,
cpp::ProgramDesc* cpp_prog);
void LoadModelNaiveV1FromMemory(const std::string& model_buffer,
void LoadModelNaiveV0FromMemory(const std::string& model_buffer,
Scope* scope,
cpp::ProgramDesc* cpp_prog);
#endif // LITE_ON_TINY_PUBLISH
void LoadModelFbsFromFile(const std::string& filename,
Scope* scope,
cpp::ProgramDesc* cpp_prog);
void LoadModelFbsFromMemory(const std::string& model_buffer,
void LoadModelNaiveFromFile(const std::string& filename,
lite::Scope* scope,
cpp::ProgramDesc* cpp_prog);
void LoadModelNaiveV0FromMemory(const std::string& model_buffer,
cpp::ProgramDesc* prog);
void LoadModelNaiveFromMemory(const std::string& model_buffer,
lite::Scope* scope,
cpp::ProgramDesc* cpp_prog);
void LoadModelNaiveV1FromMemory(const std::string& model_buffer,
Scope* scope,
cpp::ProgramDesc* cpp_prog);
} // namespace lite
......
if (LITE_ON_TINY_PUBLISH)
set(naive_wrapper "")
return()
endif()
lite_cc_library(naive_buffer SRCS naive_buffer.cc DEPS types)
add_subdirectory(proto)
......
......@@ -37,7 +37,6 @@ WITH_HUAWEI_ASCEND_NPU=OFF # Huawei Ascend Builder/Runtime Libs on X86 host
# default installation path, ensure acllib/atc/opp directories are all in this root dir
HUAWEI_ASCEND_NPU_DDK_ROOT="/usr/local/Ascend/ascend-toolkit/latest/x86_64-linux_gcc4.8.5"
PYTHON_EXECUTABLE_OPTION=""
ENABLE_FLATBUFFERS_DESC_VIEW=OFF
IOS_DEPLOYMENT_TARGET=9.0
readonly THIRDPARTY_TAR=https://paddle-inference-dist.bj.bcebos.com/PaddleLite/third-party-05b862.tar.gz
......@@ -148,8 +147,7 @@ function make_tiny_publish_so {
-DAPU_DDK_ROOT=$APU_DDK_ROOT \
-DLITE_WITH_RKNPU=$BUILD_RKNPU \
-DRKNPU_DDK_ROOT=$RKNPU_DDK_ROOT \
-DARM_TARGET_OS=${os} -DARM_TARGET_ARCH_ABI=${abi} -DARM_TARGET_LANG=${lang} \
-DLITE_ON_FLATBUFFERS_DESC_VIEW=${ENABLE_FLATBUFFERS_DESC_VIEW}
-DARM_TARGET_OS=${os} -DARM_TARGET_ARCH_ABI=${abi} -DARM_TARGET_LANG=${lang}
make publish_inference -j$NUM_PROC
cd - > /dev/null
......@@ -438,7 +436,6 @@ function print_usage {
echo -e "--build_python: (OFF|ON); controls whether to publish python api lib (ANDROID and IOS is not supported)"
echo -e "--build_java: (OFF|ON); controls whether to publish java api lib (Only ANDROID is supported)"
echo -e "--build_dir: directory for building"
echo -e "--enable_flatbuffers_view: (OFF|ON); Use the flatbuffers read-only view to load the model. If ON, the naive buffer will no longer be supported."
echo -e "--ios_deployment_target: (default: 9.0); Set the minimum compatible system version for ios deployment."
echo
echo -e "argument choices:"
......@@ -584,10 +581,6 @@ function main {
HUAWEI_ASCEND_NPU_DDK_ROOT="${i#*=}"
shift
;;
--enable_flatbuffers_view=*)
ENABLE_FLATBUFFERS_DESC_VIEW="${i#*=}"
shift
;;
--ios_deployment_target=*)
IOS_DEPLOYMENT_TARGET="${i#*=}"
shift
......
......@@ -189,7 +189,9 @@ class LogMessageFatal : public LogMessage {
#ifndef LITE_ON_TINY_PUBLISH
abort();
#else
assert(false);
// If we decide whether the process exits according to the NDEBUG macro
// definition, assert() can be used here.
abort();
#endif
#endif
}
......@@ -250,7 +252,11 @@ class VoidifyFatal : public Voidify {
#ifdef LITE_WITH_EXCEPTION
~VoidifyFatal() noexcept(false) { throw std::exception(); }
#else
~VoidifyFatal() { assert(false); }
~VoidifyFatal() {
// If we decide whether the process exits according to the NDEBUG macro
// definition, assert() can be used here.
abort();
}
#endif
};
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册