CMakeLists.txt 5.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

16
if(WITH_TESTING)
T
tianshuo78520a 已提交
17
  include(tests/test.cmake) # some generic cmake function for inference
18
endif()
19

20
cc_library(paddle_inference_io
21
    SRCS io.cc
22
    DEPS paddle_framework ${GLOB_OP_LIB} ${GLOB_OPERATOR_DEPS})
23

Y
Yiqun Liu 已提交
24 25 26 27 28 29 30
# analysis and tensorrt must be added before creating static library,
# otherwise, there would be undefined reference to them in static library.
add_subdirectory(analysis)
add_subdirectory(utils)
if (TENSORRT_FOUND)
  add_subdirectory(tensorrt)
endif()
31

石晓伟 已提交
32 33 34 35
if (WITH_LITE)
  add_subdirectory(lite)
endif()

36
# fluid_modules exclude API-interface of inference/api and inference/capi_exp
37
get_property(fluid_modules GLOBAL PROPERTY FLUID_MODULES)
38
get_property(phi_modules GLOBAL PROPERTY PHI_MODULES)
39
set(utils_modules stringpiece pretty_log string_helper)
40

41
add_subdirectory(api)
42 43 44

# Create static inference library if needed
# All static libs in inference/api
45
set(STATIC_INFERENCE_API paddle_inference_api analysis_predictor
T
tangwei12 已提交
46
     zero_copy_tensor reset_tensor_array
47
        analysis_config paddle_pass_builder activation_functions ${mkldnn_quantizer_cfg})
48 49 50 51 52

if(WITH_ONNXRUNTIME)
  set(STATIC_INFERENCE_API ${STATIC_INFERENCE_API} onnxruntime_predictor)
endif()

W
Wilber 已提交
53 54
#TODO(wilber, T8T9): Do we still need to support windows gpu static library?
if(WIN32 AND WITH_GPU)
55
  cc_library(paddle_inference DEPS ${fluid_modules} ${phi_modules} ${STATIC_INFERENCE_API} ${utils_modules})
56
elseif(WITH_IPU)
57
  cc_library(paddle_inference DEPS ${fluid_modules} ${phi_modules} ${STATIC_INFERENCE_API} ${utils_modules} paddle_ipu)
W
Wilber 已提交
58
else()
59
  create_static_lib(paddle_inference ${fluid_modules} ${phi_modules} ${STATIC_INFERENCE_API} ${utils_modules})
W
Wilber 已提交
60
endif()
61

62
if(NOT APPLE)
63
  # TODO(liuyiqu: Temporarily disable the link flag because it is not support on Mac.
64 65
  set(LINK_FLAGS "-Wl,--retain-symbols-file ${CMAKE_CURRENT_SOURCE_DIR}/paddle_inference.sym")
  set_target_properties(paddle_inference PROPERTIES LINK_FLAGS "${LINK_FLAGS}")
66 67 68
endif()

# C inference API
69
add_subdirectory(capi_exp)
70

71
if(WITH_TESTING AND WITH_INFERENCE_API_TEST)
72 73 74
    add_subdirectory(tests/api)
endif()

N
nhzlx 已提交
75
set(SHARED_INFERENCE_SRCS
76 77 78 79 80 81
    io.cc
    ${CMAKE_CURRENT_SOURCE_DIR}/../framework/data_feed.cc
    ${CMAKE_CURRENT_SOURCE_DIR}/../framework/data_feed_factory.cc
    ${CMAKE_CURRENT_SOURCE_DIR}/../framework/dataset_factory.cc
    ${CMAKE_CURRENT_SOURCE_DIR}/api/api.cc
    ${CMAKE_CURRENT_SOURCE_DIR}/api/api_impl.cc
N
nhzlx 已提交
82
    ${CMAKE_CURRENT_SOURCE_DIR}/api/analysis_predictor.cc
83
    ${CMAKE_CURRENT_SOURCE_DIR}/api/paddle_infer_contrib.cc
84
    ${CMAKE_CURRENT_SOURCE_DIR}/api/details/zero_copy_tensor.cc
85
    ${CMAKE_CURRENT_SOURCE_DIR}/utils/io_utils.cc
86
    ${PADDLE_CUSTOM_OP_SRCS})
N
nhzlx 已提交
87

88
# shared inference library deps
89
set(SHARED_INFERENCE_DEPS ${fluid_modules} ${phi_modules} analysis_predictor)
90 91 92 93 94 95

if (WITH_CRYPTO) 
    set(SHARED_INFERENCE_DEPS ${SHARED_INFERENCE_DEPS} paddle_crypto)
endif (WITH_CRYPTO)

if (WITH_PSCORE)
96
    set(SHARED_INFERENCE_DEPS ${SHARED_INFERENCE_DEPS} fleet ps_service tensor_table)
97 98
endif ()

99 100 101 102 103 104 105
if (WITH_ONNXRUNTIME)
  set(SHARED_INFERENCE_SRCS ${SHARED_INFERENCE_SRCS} 
      ${CMAKE_CURRENT_SOURCE_DIR}/api/onnxruntime_predictor.cc
  )
  set(SHARED_INFERENCE_DEPS ${SHARED_INFERENCE_DEPS} onnxruntime_predictor)
endif (WITH_ONNXRUNTIME)

106
# Create shared inference library
107
cc_library(paddle_inference_shared SHARED SRCS ${SHARED_INFERENCE_SRCS}
108
    DEPS ${SHARED_INFERENCE_DEPS})
109

110
get_property(os_dependency_modules GLOBAL PROPERTY OS_DEPENDENCY_MODULES)
111
target_link_libraries(paddle_inference_shared ${os_dependency_modules})
112
if(WIN32)
113
    target_link_libraries(paddle_inference_shared gflags)
114
endif()
Y
Yan Chunwei 已提交
115

116
set_target_properties(paddle_inference_shared PROPERTIES OUTPUT_NAME paddle_inference)
P
peizhilin 已提交
117
if(NOT APPLE AND NOT WIN32)
Y
Update  
Yi Wang 已提交
118
  # TODO(liuyiqun): Temporarily disable the link flag because it is not support on Mac.
119 120
  set(LINK_FLAGS "-Wl,--version-script ${CMAKE_CURRENT_SOURCE_DIR}/paddle_inference.map")
  set_target_properties(paddle_inference_shared PROPERTIES LINK_FLAGS "${LINK_FLAGS}")
121 122
  # check symbol hidden
  FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/check_symbol.cmake
chen.zhiyu's avatar
chen.zhiyu 已提交
123
    "execute_process(COMMAND sh -c \"${CMAKE_CURRENT_SOURCE_DIR}/check_symbol.sh"
124
    " ${CMAKE_CURRENT_BINARY_DIR}/libpaddle_inference.so\" RESULT_VARIABLE symbol_res)\n"
125 126 127 128 129 130
    "if(NOT \"\${symbol_res}\" STREQUAL \"0\")\n"
    "  message(FATAL_ERROR \"Check symbol failed.\")\n"
    "endif()\n")
  add_custom_command(
    OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/.check_symbol"
    COMMAND ${CMAKE_COMMAND} -P "${CMAKE_CURRENT_BINARY_DIR}/check_symbol.cmake"
131
    DEPENDS paddle_inference_shared)
132
  add_custom_target(check_symbol ALL DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/.check_symbol")
Y
Update  
Yi Wang 已提交
133
endif()