CMakeLists.txt 2.8 KB
Newer Older
1
if(APPLE)
2
  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=pessimizing-move")
W
Wilber 已提交
3
endif()
4

Y
Yiqun Liu 已提交
5 6
add_subdirectory(details)

7
if(WITH_MKLDNN)
8
  set(mkldnn_quantizer_cfg mkldnn_quantizer_config)
9
  set(mkldnn_quantizer_src ${CMAKE_CURRENT_SOURCE_DIR}/mkldnn_quantizer.cc)
10 11 12 13 14 15 16
  cc_library(
    ${mkldnn_quantizer_cfg}
    SRCS mkldnn_quantizer_config.cc
    DEPS lod_tensor paddle_pass_builder)
  set(mkldnn_quantizer_cfg
      ${mkldnn_quantizer_cfg}
      PARENT_SCOPE)
17
endif()
R
risemeup1 已提交
18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35
if(WIN32)
  cc_library(
    analysis_config
    SRCS analysis_config.cc
    DEPS ${mkldnn_quantizer_cfg} lod_tensor paddle_pass_builder table_printer
         utf8proc)
else()
  cc_library(
    analysis_config
    SRCS analysis_config.cc
    DEPS analysis_helper
         processgroup_comm_utils
         ${mkldnn_quantizer_cfg}
         lod_tensor
         paddle_pass_builder
         table_printer
         utf8proc)
endif()
36

37 38 39 40
cc_library(
  paddle_infer_contrib
  SRCS paddle_infer_contrib.cc
  DEPS zero_copy_tensor)
41
cc_library(paddle_pass_builder SRCS paddle_pass_builder.cc)
42

43 44 45 46 47 48
set(paddle_inference_api_deps
    reset_tensor_array
    analysis_config
    paddle_infer_contrib
    zero_copy_tensor
    trainer_desc_proto
R
risemeup1 已提交
49 50 51
    custom_operator
    lod_tensor
    scope)
52

53
if(WITH_CRYPTO)
54
  list(APPEND paddle_inference_api_deps paddle_crypto)
55
endif()
56
if(WITH_CUSTOM_DEVICE)
57
  set(paddle_inference_api_deps ${paddle_inference_api_deps} phi)
58
endif()
R
risemeup1 已提交
59 60 61 62 63 64 65 66 67 68 69
if(WIN32)
  cc_library(
    paddle_inference_api
    SRCS api.cc api_impl.cc helper.cc
    DEPS executor ${paddle_inference_api_deps})
else()
  cc_library(
    paddle_inference_api
    SRCS api.cc api_impl.cc helper.cc
    DEPS executor paddle_inference_io ${paddle_inference_api_deps})
endif()
70

71
if(WIN32)
72
  target_link_libraries(paddle_inference_api phi)
73 74
endif()

75 76
set(inference_deps ${analysis_deps} paddle_inference_api analysis
                   naive_executor ${GLOB_PASS_LIB})
77 78

if(WITH_GPU AND TENSORRT_FOUND)
79
  set(inference_deps ${inference_deps} tensorrt_engine tensorrt_converter)
80 81
endif()

82 83 84 85 86 87 88 89 90 91
if(WITH_ONNXRUNTIME)
  cc_library(
    analysis_predictor
    SRCS analysis_predictor.cc onnxruntime_predictor.cc resource_manager.cc
         infer_context.cc ${mkldnn_quantizer_src}
    DEPS ${inference_deps}
         zero_copy_tensor
         ir_pass_manager
         op_compatible_info
         infer_io_utils
92
         model_utils
93 94
         onnxruntime
         paddle2onnx)
W
Wilber 已提交
95
else()
96 97 98 99 100
  cc_library(
    analysis_predictor
    SRCS analysis_predictor.cc resource_manager.cc infer_context.cc
         ${mkldnn_quantizer_src}
    DEPS ${inference_deps} zero_copy_tensor ir_pass_manager op_compatible_info
101
         infer_io_utils model_utils)
W
Wilber 已提交
102
endif()
103

104 105 106 107 108
if(WITH_ONNXRUNTIME AND WIN32)
  # Copy onnxruntime for some c++ test in Windows, since the test will
  # be build only in CI, so suppose the generator in Windows is Ninja.
  copy_onnx(test_paddle_inference_api)
endif()