CMakeLists.txt 5.0 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

16
if(WITH_TESTING)
T
tianshuo78520a 已提交
17
  include(tests/test.cmake) # some generic cmake function for inference
18
endif()
19

20
cc_library(paddle_inference_io
21
    SRCS io.cc
22
    DEPS paddle_framework ${GLOB_OP_LIB} ${GLOB_OPERATOR_DEPS})
23

Y
Yiqun Liu 已提交
24 25 26 27 28 29 30
# analysis and tensorrt must be added before creating static library,
# otherwise, there would be undefined reference to them in static library.
add_subdirectory(analysis)
add_subdirectory(utils)
if (TENSORRT_FOUND)
  add_subdirectory(tensorrt)
endif()
31

石晓伟 已提交
32 33 34 35
if (WITH_LITE)
  add_subdirectory(lite)
endif()

36
# fluid_modules exclude API-interface of inference/api and inference/capi_exp
37
get_property(fluid_modules GLOBAL PROPERTY FLUID_MODULES)
38
get_property(phi_modules GLOBAL PROPERTY PHI_MODULES)
39
set(utils_modules stringpiece pretty_log string_helper)
40

41
add_subdirectory(api)
42 43 44

# Create static inference library if needed
# All static libs in inference/api
45
set(STATIC_INFERENCE_API paddle_inference_api analysis_predictor
T
tangwei12 已提交
46
     zero_copy_tensor reset_tensor_array
47
        analysis_config paddle_pass_builder activation_functions ${mkldnn_quantizer_cfg})
48 49 50 51 52

if(WITH_ONNXRUNTIME)
  set(STATIC_INFERENCE_API ${STATIC_INFERENCE_API} onnxruntime_predictor)
endif()

W
Wilber 已提交
53 54
#TODO(wilber, T8T9): Do we still need to support windows gpu static library?
if(WIN32 AND WITH_GPU)
55
  cc_library(paddle_inference DEPS ${fluid_modules} ${phi_modules} ${STATIC_INFERENCE_API} ${utils_modules})
W
Wilber 已提交
56
else()
57
  create_static_lib(paddle_inference ${fluid_modules} ${phi_modules} ${STATIC_INFERENCE_API} ${utils_modules})
W
Wilber 已提交
58
endif()
59

60
if(NOT APPLE)
61
  # TODO(liuyiqu: Temporarily disable the link flag because it is not support on Mac.
62 63
  set(LINK_FLAGS "-Wl,--retain-symbols-file ${CMAKE_CURRENT_SOURCE_DIR}/paddle_inference.sym")
  set_target_properties(paddle_inference PROPERTIES LINK_FLAGS "${LINK_FLAGS}")
64 65 66
endif()

# C inference API
67
add_subdirectory(capi_exp)
68

69
if(WITH_TESTING AND WITH_INFERENCE_API_TEST)
70 71 72
    add_subdirectory(tests/api)
endif()

N
nhzlx 已提交
73
set(SHARED_INFERENCE_SRCS
74 75 76 77 78 79
    io.cc
    ${CMAKE_CURRENT_SOURCE_DIR}/../framework/data_feed.cc
    ${CMAKE_CURRENT_SOURCE_DIR}/../framework/data_feed_factory.cc
    ${CMAKE_CURRENT_SOURCE_DIR}/../framework/dataset_factory.cc
    ${CMAKE_CURRENT_SOURCE_DIR}/api/api.cc
    ${CMAKE_CURRENT_SOURCE_DIR}/api/api_impl.cc
N
nhzlx 已提交
80
    ${CMAKE_CURRENT_SOURCE_DIR}/api/analysis_predictor.cc
81
    ${CMAKE_CURRENT_SOURCE_DIR}/api/paddle_infer_contrib.cc
82
    ${CMAKE_CURRENT_SOURCE_DIR}/api/details/zero_copy_tensor.cc
83
    ${CMAKE_CURRENT_SOURCE_DIR}/utils/io_utils.cc
84
    ${PADDLE_CUSTOM_OP_SRCS})
N
nhzlx 已提交
85

86
# shared inference library deps
87
set(SHARED_INFERENCE_DEPS ${fluid_modules} ${phi_modules} analysis_predictor)
88 89 90 91 92 93

if (WITH_CRYPTO) 
    set(SHARED_INFERENCE_DEPS ${SHARED_INFERENCE_DEPS} paddle_crypto)
endif (WITH_CRYPTO)

if (WITH_PSCORE)
94
    set(SHARED_INFERENCE_DEPS ${SHARED_INFERENCE_DEPS} fleet ps_service tensor_table)
95 96
endif ()

97 98 99 100 101 102 103
if (WITH_ONNXRUNTIME)
  set(SHARED_INFERENCE_SRCS ${SHARED_INFERENCE_SRCS} 
      ${CMAKE_CURRENT_SOURCE_DIR}/api/onnxruntime_predictor.cc
  )
  set(SHARED_INFERENCE_DEPS ${SHARED_INFERENCE_DEPS} onnxruntime_predictor)
endif (WITH_ONNXRUNTIME)

104
# Create shared inference library
105
cc_library(paddle_inference_shared SHARED SRCS ${SHARED_INFERENCE_SRCS}
106
    DEPS ${SHARED_INFERENCE_DEPS})
107

108
get_property(os_dependency_modules GLOBAL PROPERTY OS_DEPENDENCY_MODULES)
109
target_link_libraries(paddle_inference_shared ${os_dependency_modules})
110
if(WIN32)
111
    target_link_libraries(paddle_inference_shared gflags)
112
endif()
Y
Yan Chunwei 已提交
113

114
set_target_properties(paddle_inference_shared PROPERTIES OUTPUT_NAME paddle_inference)
P
peizhilin 已提交
115
if(NOT APPLE AND NOT WIN32)
Y
Update  
Yi Wang 已提交
116
  # TODO(liuyiqun): Temporarily disable the link flag because it is not support on Mac.
117 118
  set(LINK_FLAGS "-Wl,--version-script ${CMAKE_CURRENT_SOURCE_DIR}/paddle_inference.map")
  set_target_properties(paddle_inference_shared PROPERTIES LINK_FLAGS "${LINK_FLAGS}")
119 120
  # check symbol hidden
  FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/check_symbol.cmake
chen.zhiyu's avatar
chen.zhiyu 已提交
121
    "execute_process(COMMAND sh -c \"${CMAKE_CURRENT_SOURCE_DIR}/check_symbol.sh"
122
    " ${CMAKE_CURRENT_BINARY_DIR}/libpaddle_inference.so\" RESULT_VARIABLE symbol_res)\n"
123 124 125 126 127 128
    "if(NOT \"\${symbol_res}\" STREQUAL \"0\")\n"
    "  message(FATAL_ERROR \"Check symbol failed.\")\n"
    "endif()\n")
  add_custom_command(
    OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/.check_symbol"
    COMMAND ${CMAKE_COMMAND} -P "${CMAKE_CURRENT_BINARY_DIR}/check_symbol.cmake"
129
    DEPENDS paddle_inference_shared)
130
  add_custom_target(check_symbol ALL DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/.check_symbol")
Y
Update  
Yi Wang 已提交
131
endif()