diff --git a/cmake/configure.cmake b/cmake/configure.cmake index 69fba7968d75f0308acdc787313b48c2804d6caf..e980922d5b4869ede65e57e750b5b85676ed0dde 100644 --- a/cmake/configure.cmake +++ b/cmake/configure.cmake @@ -199,13 +199,10 @@ if (LITE_WITH_EXCEPTION) add_definitions("-DLITE_WITH_EXCEPTION") endif() -if (LITE_ON_FLATBUFFERS_DESC_VIEW) - add_definitions("-DLITE_ON_FLATBUFFERS_DESC_VIEW") - message(STATUS "Flatbuffers will be used as cpp default program description.") -endif() - if (LITE_ON_TINY_PUBLISH) add_definitions("-DLITE_ON_TINY_PUBLISH") + add_definitions("-DLITE_ON_FLATBUFFERS_DESC_VIEW") + message(STATUS "Flatbuffers will be used as cpp default program description.") else() add_definitions("-DLITE_WITH_FLATBUFFERS_DESC") endif() diff --git a/cmake/generic.cmake b/cmake/generic.cmake index d859404d559282970d96a735c400f745481e8efa..af05db559123e6d7305c35f95e3dacd58eeb7e19 100644 --- a/cmake/generic.cmake +++ b/cmake/generic.cmake @@ -267,6 +267,10 @@ function(cc_library TARGET_NAME) list(REMOVE_ITEM cc_library_DEPS warpctc) add_dependencies(${TARGET_NAME} warpctc) endif() + if("${cc_library_DEPS};" MATCHES "fbs_headers;") + list(REMOVE_ITEM cc_library_DEPS fbs_headers) + add_dependencies(${TARGET_NAME} fbs_headers) + endif() # Only deps libmklml.so, not link if("${cc_library_DEPS};" MATCHES "mklml;") list(REMOVE_ITEM cc_library_DEPS mklml) diff --git a/lite/api/android/jni/native/CMakeLists.txt b/lite/api/android/jni/native/CMakeLists.txt index 4638ed5fdfb360c1475ad6e2d1a8eb2051673eb1..1aa9aeeeff6f2737aa3a2a31beaedb0dbf4184f8 100644 --- a/lite/api/android/jni/native/CMakeLists.txt +++ b/lite/api/android/jni/native/CMakeLists.txt @@ -17,7 +17,6 @@ if (NOT LITE_ON_TINY_PUBLISH) # Unlike static library, module library has to link target to be able to work # as a single .so lib. target_link_libraries(paddle_lite_jni ${lib_DEPS} ${arm_kernels} ${npu_kernels}) - add_dependencies(paddle_lite_jni fbs_headers) if (LITE_WITH_NPU) # Strips the symbols of our protobuf functions to fix the conflicts during # loading HIAI builder libs (libhiai_ir.so and libhiai_ir_build.so) diff --git a/lite/api/light_api.cc b/lite/api/light_api.cc index fbcf171726d741ef0073f423bc4a600c9f9389d0..56461fded536f87ee59ecc8efbe2d3463c7c3822 100644 --- a/lite/api/light_api.cc +++ b/lite/api/light_api.cc @@ -46,7 +46,6 @@ void LightPredictor::Build(const std::string& model_dir, case lite_api::LiteModelType::kProtobuf: LoadModelPb(model_dir, "", "", scope_.get(), program_desc_.get()); break; -#endif case lite_api::LiteModelType::kNaiveBuffer: { if (model_from_memory) { LoadModelNaiveFromMemory( @@ -56,6 +55,7 @@ void LightPredictor::Build(const std::string& model_dir, } break; } +#endif default: LOG(FATAL) << "Unknown model type"; } diff --git a/lite/api/python/pybind/CMakeLists.txt b/lite/api/python/pybind/CMakeLists.txt index 1f8ee66a0dbce37480672cc213a60d87d28c4142..b0b897b5d47089eb4331bf4909b4e778092a6a7b 100644 --- a/lite/api/python/pybind/CMakeLists.txt +++ b/lite/api/python/pybind/CMakeLists.txt @@ -9,7 +9,7 @@ if(WIN32) target_link_libraries(lite_pybind ${os_dependency_modules}) else() lite_cc_library(lite_pybind SHARED SRCS pybind.cc DEPS ${PYBIND_DEPS}) - target_sources(lite_pybind PUBLIC ${__lite_cc_files}) + target_sources(lite_pybind PUBLIC ${__lite_cc_files} fbs_headers) endif(WIN32) if (LITE_ON_TINY_PUBLISH) diff --git a/lite/core/CMakeLists.txt b/lite/core/CMakeLists.txt index f6f8b231fe5448ca65f86e1234208c97d6860622..2a7751cd2a635ca83a602f7a53a1487e263b8c78 100644 --- a/lite/core/CMakeLists.txt +++ b/lite/core/CMakeLists.txt @@ -2,7 +2,7 @@ if (WITH_TESTING) lite_cc_library(lite_gtest_main SRCS lite_gtest_main.cc DEPS gtest gflags) endif() lite_cc_library(target_wrapper SRCS target_wrapper.cc - DEPS target_wrapper_host place + DEPS target_wrapper_host place fbs_headers X86_DEPS target_wrapper_x86 CUDA_DEPS target_wrapper_cuda XPU_DEPS target_wrapper_xpu diff --git a/lite/model_parser/compatibility.cc b/lite/model_parser/compatibility.cc index dd43f7bd25277e34a2fd8b04aae6b705402a0436..955bf6fb681b5d04b54892ccd9a35cc21d6992a3 100644 --- a/lite/model_parser/compatibility.cc +++ b/lite/model_parser/compatibility.cc @@ -11,16 +11,15 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. - #include "lite/model_parser/compatibility.h" +#ifndef LITE_ON_TINY_PUBLISH #include "lite/core/type_system.h" +#include "lite/model_parser/cpp_desc.h" #include "lite/model_parser/naive_buffer/block_desc.h" #include "lite/model_parser/naive_buffer/op_desc.h" #include "lite/model_parser/naive_buffer/program_desc.h" #include "lite/model_parser/naive_buffer/var_desc.h" -#ifndef LITE_ON_TINY_PUBLISH -#include "lite/model_parser/cpp_desc.h" #endif namespace paddle { diff --git a/lite/model_parser/compatible_pb.cc b/lite/model_parser/compatible_pb.cc index a679d815225d222f224351ab2177b07e37176781..826e46e7d101c565263c72061383305a25c82854 100644 --- a/lite/model_parser/compatible_pb.cc +++ b/lite/model_parser/compatible_pb.cc @@ -15,12 +15,12 @@ #include "lite/model_parser/compatible_pb.h" #include #include +#ifndef LITE_ON_TINY_PUBLISH +#include "lite/model_parser/flatbuffers/program_desc.h" #include "lite/model_parser/naive_buffer/block_desc.h" #include "lite/model_parser/naive_buffer/op_desc.h" #include "lite/model_parser/naive_buffer/program_desc.h" #include "lite/model_parser/naive_buffer/var_desc.h" -#ifndef LITE_ON_TINY_PUBLISH -#include "lite/model_parser/flatbuffers/program_desc.h" #include "lite/model_parser/pb/block_desc.h" #include "lite/model_parser/pb/op_desc.h" #include "lite/model_parser/pb/program_desc.h" @@ -67,7 +67,6 @@ void TransformVarDescAnyToCpp(const fbs::VarDesc &any_desc, cpp_desc->SetShape(any_desc.GetShape()); } } -#endif template <> void TransformVarDescAnyToCpp( @@ -84,7 +83,7 @@ void TransformVarDescAnyToCpp( cpp_desc->SetShape(any_desc.GetShape()); }*/ } - +#endif /// For OpDesc transform template void OpInputsAnyToCpp(const OpDescType &any_desc, cpp::OpDesc *cpp_desc) { @@ -312,12 +311,11 @@ void OpAttrsCppToAny(const cpp::OpDesc &cpp_desc, OpDescType *any_desc) { } \ } +#ifndef LITE_ON_TINY_PUBLISH TRANS_VAR_ANY_WITH_CPP_IMPL(naive_buffer::VarDesc); TRANS_OP_ANY_WITH_CPP_IMPL(naive_buffer::OpDesc); TRANS_BLOCK_ANY_WITH_CPP_IMPL(OpDesc, VarDesc, naive_buffer, naive_buffer); TRANS_PROGRAM_ANY_WITH_CPP_IMPL(BlockDesc, naive_buffer, naive_buffer); - -#ifndef LITE_ON_TINY_PUBLISH TRANS_VAR_ANY_WITH_CPP_IMPL(fbs::VarDesc); TRANS_OP_ANY_WITH_CPP_IMPL(fbs::OpDesc); TRANS_BLOCK_ANY_WITH_CPP_IMPL(OpDescT, VarDescT, fbs, fbs); diff --git a/lite/model_parser/flatbuffers/program_desc.h b/lite/model_parser/flatbuffers/program_desc.h index 30c2b202d85c4d1609748f9de0d16f85d271f365..afe7611599ac9eb2530866b0015d816e67ca878a 100644 --- a/lite/model_parser/flatbuffers/program_desc.h +++ b/lite/model_parser/flatbuffers/program_desc.h @@ -48,7 +48,7 @@ class ProgramDescView : public ProgramDescAPI { void InitProgramDesc() { desc_ = proto::GetProgramDesc(buf_.data()); - blocks_.resize(BlocksSize()); + blocks_.resize(desc_->blocks()->size()); for (size_t idx = 0; idx < BlocksSize(); ++idx) { blocks_[idx] = BlockDescView(desc_->blocks()->Get(idx)); } @@ -59,7 +59,7 @@ class ProgramDescView : public ProgramDescAPI { Init(buf_); } - size_t BlocksSize() const override { return desc_->blocks()->size(); } + size_t BlocksSize() const override { return blocks_.size(); } template T const* GetBlock(int32_t idx) const; diff --git a/lite/model_parser/model_parser.cc b/lite/model_parser/model_parser.cc index 2c51b31ca9846fb5442363d2b9ec2efac2793408..e96ddce7c0e686f13d1bfdb021ab40bd093a70f8 100644 --- a/lite/model_parser/model_parser.cc +++ b/lite/model_parser/model_parser.cc @@ -24,11 +24,11 @@ #include "lite/core/version.h" #include "lite/model_parser/base/apis.h" #include "lite/model_parser/flatbuffers/io.h" +#ifndef LITE_ON_TINY_PUBLISH #include "lite/model_parser/naive_buffer/combined_params_desc.h" #include "lite/model_parser/naive_buffer/param_desc.h" #include "lite/model_parser/naive_buffer/program_desc.h" #include "lite/model_parser/naive_buffer/var_desc.h" -#ifndef LITE_ON_TINY_PUBLISH #include "lite/model_parser/pb/program_desc.h" #include "lite/model_parser/pb/var_desc.h" #endif @@ -618,7 +618,7 @@ void SaveModelNaive(const std::string &model_file, LOG(INFO) << "Save naive buffer model in '" << prog_path << " successfully"; } -#endif // LITE_ON_TINY_PUBLISH + template void SetTensorDataNaive(T *out, size_t size, const std::vector &src) { CHECK(out); @@ -716,6 +716,7 @@ void LoadCombinedParamsNaive(const std::string &path, << "] not found"; } } + /////////////////////////////////////////////////////////////////////////////// /* Old Method of loading and saving model, before V2.3.0 */ /* Warning: this is an old inference and will be abandened in release/v3.0.0 */ @@ -799,6 +800,7 @@ void LoadModelNaiveFromMemory(const std::string &model_buffer, VLOG(4) << "Load model from naive buffer memory successfully"; } +#endif // LITE_ON_TINY_PUBLISH ////////////////////////////////////////////////////////////////////// // usage: LoadModelNaiveFromFile is used for loading model from file. @@ -807,9 +809,8 @@ void ReadModelDataFromFile(T *data, const std::string &prog_path, uint64_t *offset, const uint64_t &size) { - naive_buffer::BinaryTable data_table; - data_table.LoadFromFile(prog_path, *offset, size); - memcpy(data, data_table.cursor(), size); + std::vector prog_data = lite::fbs::LoadFile(prog_path, *offset, size); + memcpy(data, prog_data.data(), size); *offset = *offset + size; } /* @@ -835,7 +836,6 @@ void LoadModelNaiveFromFile(const std::string &filename, cpp::ProgramDesc *cpp_prog) { CHECK(cpp_prog); CHECK(scope); - cpp_prog->ClearBlocks(); // ModelFile const std::string prog_path = filename; @@ -850,7 +850,11 @@ void LoadModelNaiveFromFile(const std::string &filename, switch (meta_version) { case 0: +#ifndef LITE_ON_TINY_PUBLISH LoadModelNaiveV0FromFile(filename, scope, cpp_prog); +#else + LOG(FATAL) << "Error, this model file is not supported."; +#endif break; case 1: LoadModelFbsFromFile(filename, scope, cpp_prog); @@ -860,6 +864,7 @@ void LoadModelNaiveFromFile(const std::string &filename, break; } } +#ifndef LITE_ON_TINY_PUBLISH void LoadModelNaiveV0FromFile(const std::string &filename, Scope *scope, cpp::ProgramDesc *cpp_prog) { @@ -917,13 +922,13 @@ void LoadModelNaiveV0FromFile(const std::string &filename, VLOG(4) << "Load naive buffer model in '" << filename << "' successfully"; } - +#endif // LITE_ON_TINY_PUBLISH void LoadModelFbsFromFile(const std::string &filename, Scope *scope, cpp::ProgramDesc *cpp_prog) { CHECK(cpp_prog); CHECK(scope); - cpp_prog->ClearBlocks(); + CHECK_EQ(cpp_prog->BlocksSize(), 0); // Offset uint64_t offset = sizeof(uint16_t); @@ -973,9 +978,7 @@ void ReadModelDataFromBuffer(T *data, const std::string &model_buffer, uint64_t *offset, const uint64_t &size) { - naive_buffer::BinaryTable data_table; - data_table.LoadFromMemory(model_buffer.c_str() + *offset, size); - memcpy(data, data_table.cursor(), size); + memcpy(data, model_buffer.c_str() + *offset, size); *offset = *offset + size; } @@ -994,7 +997,11 @@ void LoadModelNaiveFromMemory(const std::string &model_buffer, VLOG(4) << "Meta_version:" << meta_version; switch (meta_version) { case 0: +#ifndef LITE_ON_TINY_PUBLISH LoadModelNaiveV0FromMemory(model_buffer, scope, cpp_prog); +#else + LOG(FATAL) << "Error: Unsupported model type."; +#endif break; case 1: LoadModelNaiveV1FromMemory(model_buffer, scope, cpp_prog); @@ -1004,7 +1011,7 @@ void LoadModelNaiveFromMemory(const std::string &model_buffer, break; } } - +#ifndef LITE_ON_TINY_PUBLISH void LoadModelNaiveV0FromMemory(const std::string &model_buffer, Scope *scope, cpp::ProgramDesc *cpp_prog) { @@ -1040,7 +1047,7 @@ void LoadModelNaiveV0FromMemory(const std::string &model_buffer, VLOG(4) << "Load model from naive buffer memory successfully"; } - +#endif /////////////////////////////////////////////////////////////////// // Meta_version=1 /////////////////////////////////////////////////////////////////// diff --git a/lite/model_parser/model_parser.h b/lite/model_parser/model_parser.h index 3a37c8fbe4c4b80a885e55b84c6a84182b391f76..02c254e909877e4905dea5a46cd5a340ca9d9fdb 100644 --- a/lite/model_parser/model_parser.h +++ b/lite/model_parser/model_parser.h @@ -21,11 +21,11 @@ #include #ifndef LITE_ON_TINY_PUBLISH #include "lite/core/framework.pb.h" +#include "lite/model_parser/naive_buffer/proto/framework.nb.h" #endif #include "lite/core/scope.h" #include "lite/core/variable.h" #include "lite/model_parser/compatible_pb.h" -#include "lite/model_parser/naive_buffer/proto/framework.nb.h" namespace paddle { namespace lite { @@ -101,45 +101,39 @@ void SaveModelNaive(const std::string& model_dir, void SaveModelFbs(const std::string& model_dir, const Scope& exec_scope, const cpp::ProgramDesc& cpp_prog); -#endif // LITE_ON_TINY_PUBLISH -void LoadModelFbsFromFile(const std::string& filename, - Scope* scope, - cpp::ProgramDesc* cpp_prog); void LoadParamNaive(const std::string& path, lite::Scope* scope, const std::string& name); - // warning:this old inference will be abandened in release/v3.0.0 // and LoadModelNaiveFromFile is suggested. void LoadModelNaive(const std::string& model_dir, lite::Scope* scope, cpp::ProgramDesc* prog, bool combined = true); -void LoadModelFbsFromFile(const std::string& filename, - Scope* scope, - cpp::ProgramDesc* cpp_prog); void LoadModelNaiveV0FromFile(const std::string& filename, Scope* scope, cpp::ProgramDesc* cpp_prog); -void LoadModelNaiveFromFile(const std::string& filename, - lite::Scope* scope, - cpp::ProgramDesc* prog); void LoadModelNaiveFromMemory(const std::string& model_buffer, const std::string& param_buffer, lite::Scope* scope, cpp::ProgramDesc* cpp_prog); -void LoadModelNaiveFromMemory(const std::string& model_buffer, - lite::Scope* scope, - cpp::ProgramDesc* cpp_prog); -void LoadModelNaiveV1FromMemory(const std::string& model_buffer, +void LoadModelNaiveV0FromMemory(const std::string& model_buffer, Scope* scope, cpp::ProgramDesc* cpp_prog); +#endif // LITE_ON_TINY_PUBLISH +void LoadModelFbsFromFile(const std::string& filename, + Scope* scope, + cpp::ProgramDesc* cpp_prog); -void LoadModelFbsFromMemory(const std::string& model_buffer, +void LoadModelNaiveFromFile(const std::string& filename, lite::Scope* scope, - cpp::ProgramDesc* cpp_prog); -void LoadModelNaiveV0FromMemory(const std::string& model_buffer, + cpp::ProgramDesc* prog); + +void LoadModelNaiveFromMemory(const std::string& model_buffer, + lite::Scope* scope, + cpp::ProgramDesc* cpp_prog); +void LoadModelNaiveV1FromMemory(const std::string& model_buffer, Scope* scope, cpp::ProgramDesc* cpp_prog); } // namespace lite diff --git a/lite/model_parser/naive_buffer/CMakeLists.txt b/lite/model_parser/naive_buffer/CMakeLists.txt index b44b817d315adfdb49e86d47924bc1294070f802..4e8311d97c96cce57c50ea6897ababea667c600e 100644 --- a/lite/model_parser/naive_buffer/CMakeLists.txt +++ b/lite/model_parser/naive_buffer/CMakeLists.txt @@ -1,3 +1,8 @@ +if (LITE_ON_TINY_PUBLISH) + set(naive_wrapper "") + return() +endif() + lite_cc_library(naive_buffer SRCS naive_buffer.cc DEPS types) add_subdirectory(proto) diff --git a/lite/tools/build.sh b/lite/tools/build.sh index bbfa81be2d9b47ddeba132be7f841a992ca9de0d..1f5389cce3d5e55fee28bd32e91ed212fbedab81 100755 --- a/lite/tools/build.sh +++ b/lite/tools/build.sh @@ -37,7 +37,6 @@ WITH_HUAWEI_ASCEND_NPU=OFF # Huawei Ascend Builder/Runtime Libs on X86 host # default installation path, ensure acllib/atc/opp directories are all in this root dir HUAWEI_ASCEND_NPU_DDK_ROOT="/usr/local/Ascend/ascend-toolkit/latest/x86_64-linux_gcc4.8.5" PYTHON_EXECUTABLE_OPTION="" -ENABLE_FLATBUFFERS_DESC_VIEW=OFF IOS_DEPLOYMENT_TARGET=9.0 readonly THIRDPARTY_TAR=https://paddle-inference-dist.bj.bcebos.com/PaddleLite/third-party-05b862.tar.gz @@ -148,8 +147,7 @@ function make_tiny_publish_so { -DAPU_DDK_ROOT=$APU_DDK_ROOT \ -DLITE_WITH_RKNPU=$BUILD_RKNPU \ -DRKNPU_DDK_ROOT=$RKNPU_DDK_ROOT \ - -DARM_TARGET_OS=${os} -DARM_TARGET_ARCH_ABI=${abi} -DARM_TARGET_LANG=${lang} \ - -DLITE_ON_FLATBUFFERS_DESC_VIEW=${ENABLE_FLATBUFFERS_DESC_VIEW} + -DARM_TARGET_OS=${os} -DARM_TARGET_ARCH_ABI=${abi} -DARM_TARGET_LANG=${lang} make publish_inference -j$NUM_PROC cd - > /dev/null @@ -438,7 +436,6 @@ function print_usage { echo -e "--build_python: (OFF|ON); controls whether to publish python api lib (ANDROID and IOS is not supported)" echo -e "--build_java: (OFF|ON); controls whether to publish java api lib (Only ANDROID is supported)" echo -e "--build_dir: directory for building" - echo -e "--enable_flatbuffers_view: (OFF|ON); Use the flatbuffers read-only view to load the model. If ON, the naive buffer will no longer be supported." echo -e "--ios_deployment_target: (default: 9.0); Set the minimum compatible system version for ios deployment." echo echo -e "argument choices:" @@ -584,10 +581,6 @@ function main { HUAWEI_ASCEND_NPU_DDK_ROOT="${i#*=}" shift ;; - --enable_flatbuffers_view=*) - ENABLE_FLATBUFFERS_DESC_VIEW="${i#*=}" - shift - ;; --ios_deployment_target=*) IOS_DEPLOYMENT_TARGET="${i#*=}" shift diff --git a/lite/utils/logging.h b/lite/utils/logging.h index c7fa8d4cf113abebb29c4ebe972e243a39573cf0..731ba7ad719ce3d7a1c56c7707bb255c5463824a 100644 --- a/lite/utils/logging.h +++ b/lite/utils/logging.h @@ -189,7 +189,9 @@ class LogMessageFatal : public LogMessage { #ifndef LITE_ON_TINY_PUBLISH abort(); #else - assert(false); + // If we decide whether the process exits according to the NDEBUG macro + // definition, assert() can be used here. + abort(); #endif #endif } @@ -250,7 +252,11 @@ class VoidifyFatal : public Voidify { #ifdef LITE_WITH_EXCEPTION ~VoidifyFatal() noexcept(false) { throw std::exception(); } #else - ~VoidifyFatal() { assert(false); } + ~VoidifyFatal() { + // If we decide whether the process exits according to the NDEBUG macro + // definition, assert() can be used here. + abort(); + } #endif };