CMakeLists.txt 12.3 KB
Newer Older
1 2
cmake_minimum_required(VERSION 3.0)
project(cpp_inference_demo CXX C)
3 4 5 6 7 8 9
option(WITH_MKL "Compile demo with MKL/OpenBlas support, default use MKL." ON)
option(WITH_GPU "Compile demo with GPU/CPU, default use CPU." OFF)
option(WITH_STATIC_LIB
       "Compile demo with static/shared library, default use static." OFF)
option(USE_TENSORRT "Compile demo with TensorRT." OFF)
option(WITH_GTEST "Compile demo with GTEST" OFF)
option(WITH_ONNXRUNTIME "Compile demo with ONNXRuntime" OFF)
10 11 12 13

if(NOT WITH_STATIC_LIB)
  add_definitions("-DPADDLE_WITH_SHARED_LIB")
else()
14
  # PD_INFER_DECL is mainly used to set the dllimport/dllexport attribute in dynamic library mode.
15 16 17 18 19
  # Set it to empty in static library mode to avoid compilation issues.
  add_definitions("/DPD_INFER_DECL=")
endif()

macro(safe_set_static_flag)
20 21 22 23 24
  foreach(flag_var
          CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
          CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO)
    if(${flag_var} MATCHES "/MD")
      string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}")
W
Wilber 已提交
25 26
    endif()
  endforeach()
27 28 29
endmacro()

if(NOT DEFINED PADDLE_LIB)
30 31
  message(
    FATAL_ERROR "please set PADDLE_LIB with -DPADDLE_LIB=/path/paddle/lib")
32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50
endif()
if(NOT DEFINED DEMO_NAME)
  message(FATAL_ERROR "please set DEMO_NAME with -DDEMO_NAME=demo_name")
endif()

include_directories("${PADDLE_LIB}/")
set(PADDLE_LIB_THIRD_PARTY_PATH "${PADDLE_LIB}/third_party/install/")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}protobuf/include")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}glog/include")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}gflags/include")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}xxhash/include")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}cryptopp/include")

link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}protobuf/lib")
link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}glog/lib")
link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}gflags/lib")
link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}xxhash/lib")
link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}cryptopp/lib")
link_directories("${PADDLE_LIB}/paddle/lib")
51
if(WITH_ONNXRUNTIME)
52 53 54 55 56 57
  include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/include")
  include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}paddle2onnx/include")

  link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib")
  link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}paddle2onnx/lib")
endif()
58

59
if(WIN32)
60 61
  add_definitions("/DGOOGLE_GLOG_DLL_DECL=")
  option(MSVC_STATIC_CRT "use static C Runtime library by default" ON)
62 63
  if(MSVC_STATIC_CRT)
    if(WITH_MKL)
64 65
      set(FLAG_OPENMP "/openmp")
    endif()
66
    set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /wd4244 /wd4530")
67 68 69 70
    set(CMAKE_C_FLAGS_DEBUG
        "${CMAKE_C_FLAGS_DEBUG} /bigobj /MTd ${FLAG_OPENMP}")
    set(CMAKE_C_FLAGS_RELEASE
        "${CMAKE_C_FLAGS_RELEASE} /bigobj /MT ${FLAG_OPENMP}")
71
    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4244 /wd4530")
72 73 74 75
    set(CMAKE_CXX_FLAGS_DEBUG
        "${CMAKE_CXX_FLAGS_DEBUG} /bigobj /MTd ${FLAG_OPENMP}")
    set(CMAKE_CXX_FLAGS_RELEASE
        "${CMAKE_CXX_FLAGS_RELEASE} /bigobj /MT ${FLAG_OPENMP}")
76
    safe_set_static_flag()
77
    if(WITH_STATIC_LIB)
78 79 80 81 82 83 84
      add_definitions(-DSTATIC_LIB)
    endif()
  endif()
else()
  if(WITH_MKL)
    set(FLAG_OPENMP "-fopenmp")
  endif()
85
  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17 ${FLAG_OPENMP}")
86 87 88 89
endif()

if(WITH_GPU)
  if(NOT WIN32)
90
    add_definitions("-DPADDLE_WITH_GPU")
91 92 93
    set(CUDA_LIB
        "/usr/local/cuda/lib64/"
        CACHE STRING "CUDA Library")
94
    include_directories("${CUDA_LIB}/../include")
95
  else()
96 97 98
    set(CUDA_LIB
        ""
        CACHE STRING "CUDA_LIB")
99
    if("${CUDA_LIB}" STREQUAL "")
100 101 102
      if(DEFINED ENV{CUDA_PATH})
        set(CUDA_LIB "$ENV{CUDA_PATH}\\lib\\x64")
      else()
103 104 105
        set(CUDA_LIB
            "C:\\Program\ Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v10.2\\lib\\x64"
        )
106 107
      endif()
    endif()
108
    message(STATUS "Current CUDA lib path: ${CUDA_LIB}")
W
Wilber 已提交
109
  endif()
110 111
endif()

112 113 114 115
if(USE_TENSORRT AND WITH_GPU)
  set(TENSORRT_ROOT
      ""
      CACHE STRING "The root directory of TensorRT library")
116
  if("${TENSORRT_ROOT}" STREQUAL "")
117 118 119 120
    message(
      FATAL_ERROR
        "The TENSORRT_ROOT is empty, you must assign it a value with CMake command. Such as: -DTENSORRT_ROOT=TENSORRT_ROOT_PATH "
    )
121 122 123 124
  endif()
  set(TENSORRT_INCLUDE_DIR ${TENSORRT_ROOT}/include)
  set(TENSORRT_LIB_DIR ${TENSORRT_ROOT}/lib)
  file(READ ${TENSORRT_INCLUDE_DIR}/NvInfer.h TENSORRT_VERSION_FILE_CONTENTS)
125 126 127 128 129 130 131 132
  string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)"
               TENSORRT_MAJOR_VERSION "${TENSORRT_VERSION_FILE_CONTENTS}")
  string(REGEX MATCH "define NV_TENSORRT_MINOR +([0-9]+)"
               TENSORRT_MINOR_VERSION "${TENSORRT_VERSION_FILE_CONTENTS}")
  string(REGEX MATCH "define NV_TENSORRT_PATCH +([0-9]+)"
               TENSORRT_PATCH_VERSION "${TENSORRT_VERSION_FILE_CONTENTS}")
  string(REGEX MATCH "define NV_TENSORRT_BUILD +([0-9]+)"
               TENSORRT_BUILD_VERSION "${TENSORRT_VERSION_FILE_CONTENTS}")
133
  if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
134 135 136 137 138 139 140 141 142 143
    file(READ ${TENSORRT_INCLUDE_DIR}/NvInferVersion.h
         TENSORRT_VERSION_FILE_CONTENTS)
    string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)"
                 TENSORRT_MAJOR_VERSION "${TENSORRT_VERSION_FILE_CONTENTS}")
    string(REGEX MATCH "define NV_TENSORRT_MINOR +([0-9]+)"
                 TENSORRT_MINOR_VERSION "${TENSORRT_VERSION_FILE_CONTENTS}")
    string(REGEX MATCH "define NV_TENSORRT_PATCH +([0-9]+)"
                 TENSORRT_PATCH_VERSION "${TENSORRT_VERSION_FILE_CONTENTS}")
    string(REGEX MATCH "define NV_TENSORRT_BUILD +([0-9]+)"
                 TENSORRT_BUILD_VERSION "${TENSORRT_VERSION_FILE_CONTENTS}")
144 145 146 147 148
  endif()
  if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
    message(SEND_ERROR "Failed to detect TensorRT version.")
  endif()
  string(REGEX REPLACE "define NV_TENSORRT_MAJOR +([0-9]+)" "\\1"
149
                       TENSORRT_MAJOR_VERSION "${TENSORRT_MAJOR_VERSION}")
150
  string(REGEX REPLACE "define NV_TENSORRT_MINOR +([0-9]+)" "\\1"
151
                       TENSORRT_MINOR_VERSION "${TENSORRT_MINOR_VERSION}")
152
  string(REGEX REPLACE "define NV_TENSORRT_PATCH +([0-9]+)" "\\1"
153
                       TENSORRT_PATCH_VERSION "${TENSORRT_PATCH_VERSION}")
154
  string(REGEX REPLACE "define NV_TENSORRT_BUILD +([0-9]+)" "\\1"
155 156 157 158 159 160
                       TENSORRT_BUILD_VERSION "${TENSORRT_BUILD_VERSION}")
  message(
    STATUS
      "Current TensorRT header is ${TENSORRT_INCLUDE_DIR}/NvInfer.h. "
      "Current TensorRT version is v${TENSORRT_MAJOR_VERSION}.${TENSORRT_MINOR_VERSION}.${TENSORRT_PATCH_VERSION}.${TENSORRT_BUILD_VERSION} "
  )
161 162
  include_directories("${TENSORRT_INCLUDE_DIR}")
  link_directories("${TENSORRT_LIB_DIR}")
163 164 165 166
  add_compile_definitions(NV_TENSORRT_MAJOR=${TENSORRT_MAJOR_VERSION})
  add_compile_definitions(NV_TENSORRT_MINOR=${TENSORRT_MINOR_VERSION})
  add_compile_definitions(NV_TENSORRT_PATCH=${TENSORRT_PATCH_VERSION})
  add_compile_definitions(NV_TENSORRT_BUILD=${TENSORRT_BUILD_VERSION})
167 168 169 170 171 172 173 174 175
endif()

if(WITH_MKL)
  set(MATH_LIB_PATH "${PADDLE_LIB_THIRD_PARTY_PATH}mklml")
  include_directories("${MATH_LIB_PATH}/include")
  if(WIN32)
    set(MATH_LIB ${MATH_LIB_PATH}/lib/mklml${CMAKE_STATIC_LIBRARY_SUFFIX}
                 ${MATH_LIB_PATH}/lib/libiomp5md${CMAKE_STATIC_LIBRARY_SUFFIX})
  else()
176 177 178
    set(MATH_LIB
        ${MATH_LIB_PATH}/lib/libmklml_intel${CMAKE_SHARED_LIBRARY_SUFFIX}
        ${MATH_LIB_PATH}/lib/libiomp5${CMAKE_SHARED_LIBRARY_SUFFIX})
179 180 181 182 183 184
  endif()
  set(MKLDNN_PATH "${PADDLE_LIB_THIRD_PARTY_PATH}mkldnn")
  if(EXISTS ${MKLDNN_PATH})
    include_directories("${MKLDNN_PATH}/include")
    if(WIN32)
      set(MKLDNN_LIB ${MKLDNN_PATH}/lib/mkldnn.lib)
W
Wilber 已提交
185
    else()
186
      set(MKLDNN_LIB ${MKLDNN_PATH}/lib/libmkldnn.so.0)
W
Wilber 已提交
187
    endif()
188 189 190 191 192
  endif()
else()
  set(OPENBLAS_LIB_PATH "${PADDLE_LIB_THIRD_PARTY_PATH}openblas")
  include_directories("${OPENBLAS_LIB_PATH}/include/openblas")
  if(WIN32)
193 194
    set(MATH_LIB
        ${OPENBLAS_LIB_PATH}/lib/openblas${CMAKE_STATIC_LIBRARY_SUFFIX})
195
  else()
196 197
    set(MATH_LIB
        ${OPENBLAS_LIB_PATH}/lib/libopenblas${CMAKE_STATIC_LIBRARY_SUFFIX})
198 199 200 201
  endif()
endif()

if(WITH_STATIC_LIB)
202 203 204
  set(DEPS
      ${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX}
  )
205 206
else()
  if(WIN32)
207 208
    set(DEPS
        ${PADDLE_LIB}/paddle/lib/paddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX})
209
  else()
210 211 212
    set(DEPS
        ${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX}
    )
213 214 215
  endif()
endif()

216
if(WITH_ONNXRUNTIME)
H
heliqi 已提交
217
  set(DEPS ${DEPS} onnxruntime paddle2onnx)
218 219
endif()

220
if(NOT WIN32)
221
  set(EXTERNAL_LIB "-lrt -ldl -lpthread")
222 223 224 225 226 227 228 229 230
  set(DEPS
      ${DEPS}
      ${MATH_LIB}
      ${MKLDNN_LIB}
      glog
      gflags
      protobuf
      xxhash
      cryptopp
231 232
      ${EXTERNAL_LIB})
else()
233 234 235 236 237 238 239 240 241 242
  set(DEPS
      ${DEPS}
      ${MATH_LIB}
      ${MKLDNN_LIB}
      glog
      gflags_static
      libprotobuf
      xxhash
      cryptopp-static
      ${EXTERNAL_LIB})
243
  set(DEPS ${DEPS} shlwapi.lib)
W
Wilber 已提交
244
endif()
245 246 247

if(WITH_GPU)
  if(NOT WIN32)
248 249 250 251 252 253
    if(USE_TENSORRT)
      set(DEPS ${DEPS}
               ${TENSORRT_LIB_DIR}/libnvinfer${CMAKE_SHARED_LIBRARY_SUFFIX})
      set(DEPS
          ${DEPS}
          ${TENSORRT_LIB_DIR}/libnvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX})
254 255 256 257
    endif()
    set(DEPS ${DEPS} ${CUDA_LIB}/libcudart${CMAKE_SHARED_LIBRARY_SUFFIX})
  else()
    if(USE_TENSORRT)
258 259 260 261
      set(DEPS ${DEPS}
               ${TENSORRT_LIB_DIR}/nvinfer${CMAKE_STATIC_LIBRARY_SUFFIX})
      set(DEPS ${DEPS}
               ${TENSORRT_LIB_DIR}/nvinfer_plugin${CMAKE_STATIC_LIBRARY_SUFFIX})
W
wangye707 已提交
262
      if(${TENSORRT_MAJOR_VERSION} EQUAL 7)
263 264
        set(DEPS ${DEPS}
                 ${TENSORRT_LIB_DIR}/myelin64_1${CMAKE_STATIC_LIBRARY_SUFFIX})
265 266
      endif()
    endif()
267 268 269
    set(DEPS ${DEPS} ${CUDA_LIB}/cudart${CMAKE_STATIC_LIBRARY_SUFFIX})
    set(DEPS ${DEPS} ${CUDA_LIB}/cublas${CMAKE_STATIC_LIBRARY_SUFFIX})
    set(DEPS ${DEPS} ${CUDA_LIB}/cudnn${CMAKE_STATIC_LIBRARY_SUFFIX})
270 271 272 273 274 275 276 277 278 279 280 281 282 283
  endif()
endif()

if(WITH_GTEST)
  include(ExternalProject)
  include(external-cmake/gtest-cpp.cmake)
endif()

add_executable(${DEMO_NAME} ${DEMO_NAME}.cc)
target_link_libraries(${DEMO_NAME} ${DEPS})
if(WITH_GTEST)
  include(GNUInstallDirs)
  include_directories(${GTEST_INSTALL_DIR}/include)
  add_dependencies(${DEMO_NAME} thirdparty_gtest)
284
  if(WIN32)
285
    target_link_libraries(${DEMO_NAME} ${GTEST_LIBRARIES})
286 287 288 289 290
  else()
    target_link_libraries(
      ${DEMO_NAME}
      ${GTEST_INSTALL_DIR}/${CMAKE_INSTALL_LIBDIR}/libgtest${CMAKE_STATIC_LIBRARY_SUFFIX}
    )
W
Wilber 已提交
291
  endif()
292 293
endif()
if(WIN32)
294 295 296 297 298 299
  if("${CMAKE_GENERATOR}" MATCHES "Ninja")
    set(LIB_PATH ${CMAKE_BINARY_DIR})
  else()
    set(LIB_PATH ${CMAKE_BINARY_DIR}/${CMAKE_BUILD_TYPE})
  endif()

300
  if(USE_TENSORRT)
301 302 303 304 305 306 307 308 309 310
    add_custom_command(
      TARGET ${DEMO_NAME}
      POST_BUILD
      COMMAND
        ${CMAKE_COMMAND} -E copy
        ${TENSORRT_LIB_DIR}/nvinfer${CMAKE_SHARED_LIBRARY_SUFFIX} ${LIB_PATH}
      COMMAND
        ${CMAKE_COMMAND} -E copy
        ${TENSORRT_LIB_DIR}/nvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX}
        ${LIB_PATH})
W
wangye707 已提交
311
    if(${TENSORRT_MAJOR_VERSION} EQUAL 7)
312 313 314 315 316 317 318
      add_custom_command(
        TARGET ${DEMO_NAME}
        POST_BUILD
        COMMAND
          ${CMAKE_COMMAND} -E copy
          ${TENSORRT_LIB_DIR}/myelin64_1${CMAKE_SHARED_LIBRARY_SUFFIX}
          ${LIB_PATH})
319 320 321
    endif()
  endif()
  if(WITH_MKL)
322
    message("LIB_PATH IS ${LIB_PATH}")
323 324 325 326 327 328 329 330 331
    add_custom_command(
      TARGET ${DEMO_NAME}
      POST_BUILD
      COMMAND ${CMAKE_COMMAND} -E copy ${MATH_LIB_PATH}/lib/mklml.dll
              ${LIB_PATH}
      COMMAND ${CMAKE_COMMAND} -E copy ${MATH_LIB_PATH}/lib/libiomp5md.dll
              ${LIB_PATH}
      COMMAND ${CMAKE_COMMAND} -E copy ${MKLDNN_PATH}/lib/mkldnn.dll
              ${LIB_PATH})
332
  else()
333 334 335 336 337
    add_custom_command(
      TARGET ${DEMO_NAME}
      POST_BUILD
      COMMAND ${CMAKE_COMMAND} -E copy ${OPENBLAS_LIB_PATH}/lib/openblas.dll
              ${LIB_PATH})
338
  endif()
339
  if(WITH_ONNXRUNTIME)
340 341 342 343 344 345 346 347 348 349 350
    add_custom_command(
      TARGET ${DEMO_NAME}
      POST_BUILD
      COMMAND
        ${CMAKE_COMMAND} -E copy
        ${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib/onnxruntime.dll
        ${LIB_PATH}
      COMMAND
        ${CMAKE_COMMAND} -E copy
        ${PADDLE_LIB_THIRD_PARTY_PATH}paddle2onnx/lib/paddle2onnx.dll
        ${LIB_PATH})
351
  endif()
352
  if(NOT WITH_STATIC_LIB)
353 354 355 356 357
    add_custom_command(
      TARGET ${DEMO_NAME}
      POST_BUILD
      COMMAND ${CMAKE_COMMAND} -E copy
              "${PADDLE_LIB}/paddle/lib/paddle_inference.dll" ${LIB_PATH})
358 359
  endif()
endif()