提交 8fbdce08 编写于 作者: H huzhiqiang 提交者: Yan Chunwei

cherry-pick: add dynamic library for tiny_publish and full_publish test=develop (#2040)

上级 af2a2e79
...@@ -76,6 +76,7 @@ if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND LITE_WITH_ARM) ...@@ -76,6 +76,7 @@ if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND LITE_WITH_ARM)
COMMAND cp "${CMAKE_BINARY_DIR}/libpaddle_api_full_bundled.a" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib" COMMAND cp "${CMAKE_BINARY_DIR}/libpaddle_api_full_bundled.a" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib"
COMMAND cp "${CMAKE_BINARY_DIR}/libpaddle_api_light_bundled.a" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib" COMMAND cp "${CMAKE_BINARY_DIR}/libpaddle_api_light_bundled.a" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib"
COMMAND cp "${CMAKE_BINARY_DIR}/lite/api/model_optimize_tool" "${INFER_LITE_PUBLISH_ROOT}/bin" COMMAND cp "${CMAKE_BINARY_DIR}/lite/api/model_optimize_tool" "${INFER_LITE_PUBLISH_ROOT}/bin"
COMMAND cp "${CMAKE_BINARY_DIR}/lite/api/*.so" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib"
COMMAND cp "${CMAKE_BINARY_DIR}/lite/gen_code/paddle_code_generator" "${INFER_LITE_PUBLISH_ROOT}/bin" COMMAND cp "${CMAKE_BINARY_DIR}/lite/gen_code/paddle_code_generator" "${INFER_LITE_PUBLISH_ROOT}/bin"
COMMAND cp "${CMAKE_BINARY_DIR}/lite/api/test_model_bin" "${INFER_LITE_PUBLISH_ROOT}/bin" COMMAND cp "${CMAKE_BINARY_DIR}/lite/api/test_model_bin" "${INFER_LITE_PUBLISH_ROOT}/bin"
) )
...@@ -85,6 +86,11 @@ if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND LITE_WITH_ARM) ...@@ -85,6 +86,11 @@ if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND LITE_WITH_ARM)
add_dependencies(publish_inference_cxx_lib bundle_full_api) add_dependencies(publish_inference_cxx_lib bundle_full_api)
add_dependencies(publish_inference_cxx_lib bundle_light_api) add_dependencies(publish_inference_cxx_lib bundle_light_api)
add_dependencies(publish_inference_cxx_lib test_model_bin) add_dependencies(publish_inference_cxx_lib test_model_bin)
if (ARM_TARGET_OS STREQUAL "android")
add_dependencies(publish_inference_cxx_lib paddle_api_full_shared)
add_dependencies(publish_inference_cxx_lib paddle_api_light_shared)
add_dependencies(publish_inference_cxx_lib paddle_cxx_api_shared)
endif()
add_dependencies(publish_inference publish_inference_cxx_lib) add_dependencies(publish_inference publish_inference_cxx_lib)
add_custom_command(TARGET publish_inference_cxx_lib POST_BUILD add_custom_command(TARGET publish_inference_cxx_lib POST_BUILD
COMMAND ${CMAKE_STRIP} "--strip-debug" ${INFER_LITE_PUBLISH_ROOT}/cxx/lib/*.a) COMMAND ${CMAKE_STRIP} "--strip-debug" ${INFER_LITE_PUBLISH_ROOT}/cxx/lib/*.a)
...@@ -99,6 +105,18 @@ if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND LITE_WITH_ARM) ...@@ -99,6 +105,18 @@ if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND LITE_WITH_ARM)
) )
add_dependencies(tiny_publish_lib bundle_light_api) add_dependencies(tiny_publish_lib bundle_light_api)
add_dependencies(publish_inference tiny_publish_lib) add_dependencies(publish_inference tiny_publish_lib)
else()
if (ARM_TARGET_OS STREQUAL "android")
add_custom_target(tiny_publish_cxx_lib ${TARGET}
COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/cxx"
COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/cxx/include"
COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/cxx/lib"
COMMAND cp "${CMAKE_SOURCE_DIR}/lite/api/paddle_*.h" "${INFER_LITE_PUBLISH_ROOT}/cxx/include"
COMMAND cp "${CMAKE_BINARY_DIR}/lite/api/libpaddle_cxx_api_shared.so" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib"
)
add_dependencies(tiny_publish_cxx_lib paddle_cxx_api_shared)
add_dependencies(publish_inference tiny_publish_cxx_lib)
endif()
endif() endif()
endif() endif()
......
...@@ -4,6 +4,30 @@ else() ...@@ -4,6 +4,30 @@ else()
lite_cc_library(place SRCS paddle_place.cc DEPS glog) lite_cc_library(place SRCS paddle_place.cc DEPS glog)
endif(LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) endif(LITE_WITH_LIGHT_WEIGHT_FRAMEWORK)
if (LITE_ON_TINY_PUBLISH)
set(CMAKE_CXX_FLAGS_RELEASE "-Os -DNDEBUG")
set(CMAKE_C_FLAGS_RELEASE "-Os -DNDEBUG")
set(lib_DEPS light_api paddle_api paddle_api_light)
else()
set(lib_DEPS light_api cxx_api paddle_api_full paddle_api paddle_api_light)
endif()
if ((NOT LITE_ON_TINY_PUBLISH) AND (ARM_TARGET_OS STREQUAL "android"))
lite_cc_library(paddle_cxx_api_shared MODULE
SRCS paddle_api_shared.cc
DEPS ${lib_DEPS}
ARM_DEPS ${arm_kernels} NPU_DEPS ${npu_kernels})
# Unlike static library, module library has to link target to be able to work
# as a single .so lib.
target_link_libraries(paddle_cxx_api_shared ${lib_DEPS} ${arm_kernels} ${npu_kernels})
else()
if (ARM_TARGET_OS STREQUAL "android")
add_library(paddle_cxx_api_shared SHARED "")
target_sources(paddle_cxx_api_shared PUBLIC paddle_api_shared.cc)
add_dependencies(paddle_cxx_api_shared op_list_h kernel_list_h ${lib_DEPS})
endif()
endif()
if (WITH_TESTING) if (WITH_TESTING)
lite_cc_library(lite_api_test_helper SRCS lite_api_test_helper.cc lite_cc_library(lite_api_test_helper SRCS lite_api_test_helper.cc
DEPS scope optimizer target_wrapper_host model_parser program DEPS scope optimizer target_wrapper_host model_parser program
...@@ -174,6 +198,7 @@ if (LITE_ON_TINY_PUBLISH) ...@@ -174,6 +198,7 @@ if (LITE_ON_TINY_PUBLISH)
lite_cc_library(paddle_api_light SRCS light_api_impl.cc DEPS light_api paddle_api stream) lite_cc_library(paddle_api_light SRCS light_api_impl.cc DEPS light_api paddle_api stream)
else() else()
lite_cc_library(paddle_api_light SRCS light_api_impl.cc DEPS light_api paddle_api) lite_cc_library(paddle_api_light SRCS light_api_impl.cc DEPS light_api paddle_api)
lite_cc_library(paddle_api_light_shared MODULE SRCS light_api_impl.cc DEPS light_api paddle_api)
endif() endif()
if (NOT LITE_ON_TINY_PUBLISH) if (NOT LITE_ON_TINY_PUBLISH)
lite_cc_library(paddle_api_full SRCS cxx_api_impl.cc DEPS cxx_api paddle_api_light lite_cc_library(paddle_api_full SRCS cxx_api_impl.cc DEPS cxx_api paddle_api_light
...@@ -182,6 +207,12 @@ if (NOT LITE_ON_TINY_PUBLISH) ...@@ -182,6 +207,12 @@ if (NOT LITE_ON_TINY_PUBLISH)
NPU_DEPS ${npu_kernels} NPU_DEPS ${npu_kernels}
CL_DEPS ${opencl_kernels} CL_DEPS ${opencl_kernels}
FPGA_DEPS ${fpga_kernels}) FPGA_DEPS ${fpga_kernels})
lite_cc_library(paddle_api_full_shared MODULE SRCS cxx_api_impl.cc DEPS cxx_api paddle_api_light
${ops}
ARM_DEPS ${arm_kernels}
NPU_DEPS ${npu_kernels}
CL_DEPS ${opencl_kernels}
FPGA_DEPS ${fpga_kernels})
# The final inference library for just MobileConfig. # The final inference library for just MobileConfig.
bundle_static_library(paddle_api_full paddle_api_full_bundled bundle_full_api) bundle_static_library(paddle_api_full paddle_api_full_bundled bundle_full_api)
endif() endif()
......
/* Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "lite/api/paddle_api.h"
#include "lite/api/paddle_use_kernels.h"
#include "lite/api/paddle_use_ops.h"
#ifndef LITE_ON_TINY_PUBLISH
#include "lite/api/paddle_use_passes.h"
#endif
namespace paddle {
namespace lite_api {
void RunModel() {
// 1. Set CxxConfig
CxxConfig config;
// 2. Create PaddlePredictor by CxxConfig
std::shared_ptr<PaddlePredictor> predictor =
CreatePaddlePredictor<CxxConfig>(config);
}
} // namespace lite_api
} // namespace paddle
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册