提交 b5a5d93b 编写于 作者: Z zhouwei25 提交者: liuwei1031

fix the dependencies of third party and inference lib (#19684)

上级 32b1151f
...@@ -24,9 +24,8 @@ set(COPY_SCRIPT_DIR ${PADDLE_SOURCE_DIR}/cmake) ...@@ -24,9 +24,8 @@ set(COPY_SCRIPT_DIR ${PADDLE_SOURCE_DIR}/cmake)
function(copy TARGET) function(copy TARGET)
set(options "") set(options "")
set(oneValueArgs "") set(oneValueArgs "")
set(multiValueArgs SRCS DSTS DEPS) set(multiValueArgs SRCS DSTS)
cmake_parse_arguments(copy_lib "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) cmake_parse_arguments(copy_lib "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
set(fluid_lib_dist_dep ${TARGET} ${fluid_lib_dist_dep} PARENT_SCOPE)
list(LENGTH copy_lib_SRCS copy_lib_SRCS_len) list(LENGTH copy_lib_SRCS copy_lib_SRCS_len)
list(LENGTH copy_lib_DSTS copy_lib_DSTS_len) list(LENGTH copy_lib_DSTS copy_lib_DSTS_len)
...@@ -34,7 +33,6 @@ function(copy TARGET) ...@@ -34,7 +33,6 @@ function(copy TARGET)
message(FATAL_ERROR "${TARGET} source numbers are not equal to destination numbers") message(FATAL_ERROR "${TARGET} source numbers are not equal to destination numbers")
endif () endif ()
math(EXPR len "${copy_lib_SRCS_len} - 1") math(EXPR len "${copy_lib_SRCS_len} - 1")
add_custom_target(${TARGET} DEPENDS ${copy_lib_DEPS})
foreach (index RANGE ${len}) foreach (index RANGE ${len})
list(GET copy_lib_SRCS ${index} src) list(GET copy_lib_SRCS ${index} src)
list(GET copy_lib_DSTS ${index} dst) list(GET copy_lib_DSTS ${index} dst)
...@@ -44,7 +42,7 @@ function(copy TARGET) ...@@ -44,7 +42,7 @@ function(copy TARGET)
add_custom_command(TARGET ${TARGET} POST_BUILD add_custom_command(TARGET ${TARGET} POST_BUILD
COMMAND ${PYTHON_EXECUTABLE} ${COPY_SCRIPT_DIR}/copyfile.py ${native_src} ${native_dst}) COMMAND ${PYTHON_EXECUTABLE} ${COPY_SCRIPT_DIR}/copyfile.py ${native_src} ${native_dst})
else (WIN32) #not windows else (WIN32) #not windows
add_custom_command(TARGET ${TARGET} PRE_BUILD add_custom_command(TARGET ${TARGET} POST_BUILD
COMMAND mkdir -p "${dst}" COMMAND mkdir -p "${dst}"
COMMAND cp -r "${src}" "${dst}" COMMAND cp -r "${src}" "${dst}"
COMMENT "copying ${src} -> ${dst}") COMMENT "copying ${src} -> ${dst}")
...@@ -53,196 +51,189 @@ function(copy TARGET) ...@@ -53,196 +51,189 @@ function(copy TARGET)
endfunction() endfunction()
# third party # third party
set(dst_dir "${FLUID_INSTALL_DIR}/third_party/eigen3") set(third_party_deps eigen3 gflags glog boost xxhash zlib)
copy(eigen3_lib if(NOT PROTOBUF_FOUND OR WIN32)
SRCS ${EIGEN_INCLUDE_DIR}/Eigen/Core ${EIGEN_INCLUDE_DIR}/Eigen/src ${EIGEN_INCLUDE_DIR}/unsupported/Eigen list(APPEND third_party_deps extern_protobuf)
DSTS ${dst_dir}/Eigen ${dst_dir}/Eigen ${dst_dir}/unsupported endif ()
DEPS eigen3
)
set(dst_dir "${FLUID_INSTALL_DIR}/third_party/install/gflags") if (WITH_MKLML)
copy(gflags_lib list(APPEND third_party_deps mklml)
SRCS ${GFLAGS_INCLUDE_DIR} ${GFLAGS_LIBRARIES} elseif (NOT CBLAS_FOUND OR WIN32)
DSTS ${dst_dir} ${dst_dir}/lib list(APPEND third_party_deps extern_openblas)
DEPS gflags endif ()
)
set(dst_dir "${FLUID_INSTALL_DIR}/third_party/install/glog") if (WITH_MKLDNN)
copy(glog_lib list(APPEND third_party_deps mkldnn_shared_lib)
SRCS ${GLOG_INCLUDE_DIR} ${GLOG_LIBRARIES} endif ()
DSTS ${dst_dir} ${dst_dir}/lib
DEPS glog
)
set(dst_dir "${FLUID_INSTALL_DIR}/third_party/boost/") if (WITH_NGRAPH)
copy(boost_lib list(APPEND third_party_deps ngraph)
SRCS ${BOOST_INCLUDE_DIR}/boost endif ()
DSTS ${dst_dir}
DEPS boost
)
set(dst_dir "${FLUID_INSTALL_DIR}/third_party/install/xxhash") add_custom_target(third_party DEPENDS ${third_party_deps})
copy(xxhash_lib
SRCS ${XXHASH_INCLUDE_DIR} ${XXHASH_LIBRARIES}
DSTS ${dst_dir} ${dst_dir}/lib
DEPS xxhash
)
if (NOT PROTOBUF_FOUND OR WIN32) # inference-only library
set(dst_dir "${FLUID_INSTALL_DIR}/third_party/install/protobuf") set(inference_lib_deps third_party paddle_fluid paddle_fluid_shared)
copy(protobuf_lib add_custom_target(inference_lib_dist DEPENDS ${inference_lib_deps})
SRCS ${PROTOBUF_INCLUDE_DIR} ${PROTOBUF_LIBRARY}
DSTS ${dst_dir} ${dst_dir}/lib
DEPS extern_protobuf
)
endif ()
if (WITH_MKLML) set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/eigen3")
set(dst_dir "${FLUID_INSTALL_DIR}/third_party/install/mklml") copy(inference_lib_dist
SRCS ${EIGEN_INCLUDE_DIR}/Eigen/Core ${EIGEN_INCLUDE_DIR}/Eigen/src ${EIGEN_INCLUDE_DIR}/unsupported/Eigen
DSTS ${dst_dir}/Eigen ${dst_dir}/Eigen ${dst_dir}/unsupported)
set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/boost")
copy(inference_lib_dist
SRCS ${BOOST_INCLUDE_DIR}/boost
DSTS ${dst_dir})
if(WITH_MKLML)
set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/install/mklml")
if(WIN32) if(WIN32)
copy(mklml_lib copy(inference_lib_dist
SRCS ${MKLML_LIB} ${MKLML_IOMP_LIB} ${MKLML_SHARED_LIB} SRCS ${MKLML_LIB} ${MKLML_IOMP_LIB} ${MKLML_SHARED_LIB}
${MKLML_SHARED_LIB_DEPS} ${MKLML_SHARED_IOMP_LIB} ${MKLML_INC_DIR} ${MKLML_SHARED_LIB_DEPS} ${MKLML_SHARED_IOMP_LIB} ${MKLML_INC_DIR}
DSTS ${dst_dir}/lib ${dst_dir}/lib ${dst_dir}/lib DSTS ${dst_dir}/lib ${dst_dir}/lib ${dst_dir}/lib
${dst_dir}/lib ${dst_dir}/lib ${dst_dir} ${dst_dir}/lib ${dst_dir}/lib ${dst_dir})
DEPS mklml
)
else() else()
copy(mklml_lib copy(inference_lib_dist
SRCS ${MKLML_LIB} ${MKLML_IOMP_LIB} ${MKLML_INC_DIR} SRCS ${MKLML_LIB} ${MKLML_IOMP_LIB} ${MKLML_INC_DIR}
DSTS ${dst_dir}/lib ${dst_dir}/lib ${dst_dir} DSTS ${dst_dir}/lib ${dst_dir}/lib ${dst_dir})
DEPS mklml
)
endif() endif()
elseif (NOT CBLAS_FOUND OR WIN32) elseif (NOT CBLAS_FOUND OR WIN32)
set(dst_dir "${FLUID_INSTALL_DIR}/third_party/install/openblas") set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/install/openblas")
copy(openblas_lib copy(inference_lib_dist
SRCS ${CBLAS_INSTALL_DIR}/lib ${CBLAS_INSTALL_DIR}/include SRCS ${CBLAS_INSTALL_DIR}/lib ${CBLAS_INSTALL_DIR}/include
DSTS ${dst_dir} ${dst_dir} DSTS ${dst_dir} ${dst_dir})
DEPS extern_openblas
)
endif ()
if (WITH_MKLDNN)
set(dst_dir "${FLUID_INSTALL_DIR}/third_party/install/mkldnn")
if(WIN32)
copy(mkldnn_lib
SRCS ${MKLDNN_INC_DIR} ${MKLDNN_SHARED_LIB} ${MKLDNN_LIB}
DSTS ${dst_dir} ${dst_dir}/lib ${dst_dir}/lib
DEPS mkldnn_shared_lib
)
else()
copy(mkldnn_lib
SRCS ${MKLDNN_INC_DIR} ${MKLDNN_SHARED_LIB}
DSTS ${dst_dir} ${dst_dir}/lib
DEPS mkldnn_shared_lib
)
endif()
endif () endif ()
if (WITH_NGRAPH) if(WITH_MKLDNN)
set(dst_dir "${FLUID_INSTALL_DIR}/third_party/install/ngraph") set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/install/mkldnn")
copy(ngraph_lib if(WIN32)
SRCS ${NGRAPH_INC_DIR} ${NGRAPH_LIB_DIR} copy(inference_lib_dist
DSTS ${dst_dir} ${dst_dir} SRCS ${MKLDNN_INC_DIR} ${MKLDNN_SHARED_LIB} ${MKLDNN_LIB}
DEPS ngraph DSTS ${dst_dir} ${dst_dir}/lib ${dst_dir}/lib)
) else()
endif () copy(inference_lib_dist
SRCS ${MKLDNN_INC_DIR} ${MKLDNN_SHARED_LIB}
DSTS ${dst_dir} ${dst_dir}/lib)
endif()
endif()
set(dst_dir "${FLUID_INSTALL_DIR}/third_party/install/zlib") set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/install/gflags")
copy(zlib_lib copy(inference_lib_dist
SRCS ${ZLIB_INCLUDE_DIR} ${ZLIB_LIBRARIES} SRCS ${GFLAGS_INCLUDE_DIR} ${GFLAGS_LIBRARIES}
DSTS ${dst_dir} ${dst_dir}/lib DSTS ${dst_dir} ${dst_dir}/lib)
DEPS zlib)
# paddle fluid module set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/install/glog")
set(src_dir "${PADDLE_SOURCE_DIR}/paddle/fluid") copy(inference_lib_dist
set(dst_dir "${FLUID_INSTALL_DIR}/paddle/fluid") SRCS ${GLOG_INCLUDE_DIR} ${GLOG_LIBRARIES}
set(module "framework") DSTS ${dst_dir} ${dst_dir}/lib)
set(framework_lib_deps framework_proto)
copy(framework_lib DEPS ${framework_lib_deps} set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/install/xxhash")
SRCS ${src_dir}/${module}/*.h ${src_dir}/${module}/details/*.h ${PADDLE_BINARY_DIR}/paddle/fluid/framework/framework.pb.h ${PADDLE_BINARY_DIR}/paddle/fluid/framework/data_feed.pb.h ${src_dir}/${module}/ir/memory_optimize_pass/*.h copy(inference_lib_dist
${src_dir}/${module}/ir/*.h ${src_dir}/${module}/fleet/*.h SRCS ${XXHASH_INCLUDE_DIR} ${XXHASH_LIBRARIES}
DSTS ${dst_dir}/${module} ${dst_dir}/${module}/details ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module}/ir/memory_optimize_pass ${dst_dir}/${module}/ir ${dst_dir}/${module}/fleet DSTS ${dst_dir} ${dst_dir}/lib)
)
set(module "memory") set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/install/zlib")
copy(memory_lib copy(inference_lib_dist
SRCS ${src_dir}/${module}/*.h ${src_dir}/${module}/detail/*.h ${src_dir}/${module}/allocation/*.h SRCS ${ZLIB_INCLUDE_DIR} ${ZLIB_LIBRARIES}
DSTS ${dst_dir}/${module} ${dst_dir}/${module}/detail ${dst_dir}/${module}/allocation DSTS ${dst_dir} ${dst_dir}/lib)
)
set(inference_deps paddle_fluid_shared paddle_fluid) if (NOT PROTOBUF_FOUND OR WIN32)
set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/install/protobuf")
copy(inference_lib_dist
SRCS ${PROTOBUF_INCLUDE_DIR} ${PROTOBUF_LIBRARY}
DSTS ${dst_dir} ${dst_dir}/lib)
endif ()
set(module "inference/api") if (WITH_NGRAPH)
set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/install/ngraph")
copy(inference_lib_dist
SRCS ${NGRAPH_INC_DIR} ${NGRAPH_LIB_DIR}
DSTS ${dst_dir} ${dst_dir})
endif ()
if (TENSORRT_FOUND) if (TENSORRT_FOUND)
copy(tensorrt_lib DEPS ${inference_deps} set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/install/tensorrt")
copy(inference_lib_dist
SRCS ${TENSORRT_ROOT}/include/Nv*.h ${TENSORRT_ROOT}/lib/*nvinfer* SRCS ${TENSORRT_ROOT}/include/Nv*.h ${TENSORRT_ROOT}/lib/*nvinfer*
DSTS ${FLUID_INSTALL_DIR}/third_party/install/tensorrt/include ${FLUID_INSTALL_DIR}/third_party/install/tensorrt/lib) DSTS ${dst_dir}/include ${dst_dir}/lib)
endif () endif ()
if (ANAKIN_FOUND) if (ANAKIN_FOUND)
copy(anakin_lib DEPS ${inference_deps} set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/install/anakin")
copy(inference_lib_dist
SRCS ${ANAKIN_ROOT}/* SRCS ${ANAKIN_ROOT}/*
DSTS ${FLUID_INSTALL_DIR}/third_party/install/anakin) DSTS ${dst_dir})
endif () endif ()
set(module "inference") copy(inference_lib_dist
SRCS ${CMAKE_CURRENT_BINARY_DIR}/CMakeCache.txt
DSTS ${FLUID_INFERENCE_INSTALL_DIR})
set(src_dir "${PADDLE_SOURCE_DIR}/paddle/fluid")
if(WIN32) if(WIN32)
set(paddle_fluid_lib ${PADDLE_BINARY_DIR}/paddle/fluid/inference/${CMAKE_BUILD_TYPE}/libpaddle_fluid.*) set(paddle_fluid_lib ${PADDLE_BINARY_DIR}/paddle/fluid/inference/${CMAKE_BUILD_TYPE}/libpaddle_fluid.*)
else(WIN32) else(WIN32)
set(paddle_fluid_lib ${PADDLE_BINARY_DIR}/paddle/fluid/inference/libpaddle_fluid.*) set(paddle_fluid_lib ${PADDLE_BINARY_DIR}/paddle/fluid/inference/libpaddle_fluid.*)
endif(WIN32) endif(WIN32)
copy(inference_lib DEPS ${inference_deps}
SRCS ${src_dir}/${module}/*.h ${paddle_fluid_lib} copy(inference_lib_dist
${src_dir}/${module}/api/paddle_*.h SRCS ${src_dir}/inference/api/paddle_*.h ${paddle_fluid_lib}
DSTS ${FLUID_INFERENCE_INSTALL_DIR}/paddle/include ${FLUID_INFERENCE_INSTALL_DIR}/paddle/lib)
# fluid library for both train and inference
set(fluid_lib_deps inference_lib_dist)
add_custom_target(fluid_lib_dist ALL DEPENDS ${fluid_lib_deps})
set(dst_dir "${FLUID_INSTALL_DIR}/paddle/fluid")
set(module "inference")
copy(fluid_lib_dist
SRCS ${src_dir}/${module}/*.h ${src_dir}/${module}/api/paddle_*.h ${paddle_fluid_lib}
DSTS ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module} DSTS ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module}
)
set(module "framework")
set(framework_lib_deps framework_proto)
add_dependencies(fluid_lib_dist ${framework_lib_deps})
copy(fluid_lib_dist
SRCS ${src_dir}/${module}/*.h ${src_dir}/${module}/details/*.h ${PADDLE_BINARY_DIR}/paddle/fluid/framework/framework.pb.h ${PADDLE_BINARY_DIR}/paddle/fluid/framework/data_feed.pb.h ${src_dir}/${module}/ir/memory_optimize_pass/*.h
${src_dir}/${module}/ir/*.h ${src_dir}/${module}/fleet/*.h
DSTS ${dst_dir}/${module} ${dst_dir}/${module}/details ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module}/ir/memory_optimize_pass ${dst_dir}/${module}/ir ${dst_dir}/${module}/fleet)
set(module "memory")
copy(fluid_lib_dist
SRCS ${src_dir}/${module}/*.h ${src_dir}/${module}/detail/*.h ${src_dir}/${module}/allocation/*.h
DSTS ${dst_dir}/${module} ${dst_dir}/${module}/detail ${dst_dir}/${module}/allocation
) )
set(module "platform") set(module "platform")
copy(platform_lib DEPS profiler_py_proto set(platform_lib_deps profiler_proto)
SRCS ${src_dir}/${module}/*.h ${src_dir}/${module}/dynload/*.h ${src_dir}/${module}/details/*.h add_dependencies(fluid_lib_dist ${platform_lib_deps})
DSTS ${dst_dir}/${module} ${dst_dir}/${module}/dynload ${dst_dir}/${module}/details copy(fluid_lib_dist
SRCS ${src_dir}/${module}/*.h ${src_dir}/${module}/dynload/*.h ${src_dir}/${module}/details/*.h ${PADDLE_BINARY_DIR}/paddle/fluid/platform/profiler.pb.h
DSTS ${dst_dir}/${module} ${dst_dir}/${module}/dynload ${dst_dir}/${module}/details ${dst_dir}/${module}
) )
set(module "string") set(module "string")
copy(string_lib copy(fluid_lib_dist
SRCS ${src_dir}/${module}/*.h ${src_dir}/${module}/tinyformat/*.h SRCS ${src_dir}/${module}/*.h ${src_dir}/${module}/tinyformat/*.h
DSTS ${dst_dir}/${module} ${dst_dir}/${module}/tinyformat DSTS ${dst_dir}/${module} ${dst_dir}/${module}/tinyformat
) )
set(module "pybind") set(module "pybind")
copy(pybind_lib copy(fluid_lib_dist
SRCS ${CMAKE_CURRENT_BINARY_DIR}/paddle/fluid/${module}/pybind.h SRCS ${CMAKE_CURRENT_BINARY_DIR}/paddle/fluid/${module}/pybind.h
DSTS ${dst_dir}/${module} DSTS ${dst_dir}/${module}
) )
# CMakeCache Info # CMakeCache Info
copy(cmake_cache copy(fluid_lib_dist
SRCS ${CMAKE_CURRENT_BINARY_DIR}/CMakeCache.txt SRCS ${FLUID_INFERENCE_INSTALL_DIR}/third_party ${CMAKE_CURRENT_BINARY_DIR}/CMakeCache.txt
DSTS ${FLUID_INSTALL_DIR}) DSTS ${FLUID_INSTALL_DIR} ${FLUID_INSTALL_DIR}
# This command generates a complete fluid library for both train and inference
add_custom_target(fluid_lib_dist DEPENDS ${fluid_lib_dist_dep})
# Following commands generate a inference-only fluid library
# third_party, version.txt and CMakeCache.txt are the same position with ${FLUID_INSTALL_DIR}
copy(third_party DEPS fluid_lib_dist
SRCS ${FLUID_INSTALL_DIR}/third_party ${FLUID_INSTALL_DIR}/CMakeCache.txt
DSTS ${FLUID_INFERENCE_INSTALL_DIR} ${FLUID_INFERENCE_INSTALL_DIR}
) )
# only need libpaddle_fluid.so/a and paddle_*.h for inference-only library
copy(inference_api_lib DEPS fluid_lib_dist
SRCS ${paddle_fluid_lib}
${FLUID_INSTALL_DIR}/paddle/fluid/inference/paddle_*.h
DSTS ${FLUID_INFERENCE_INSTALL_DIR}/paddle/lib ${FLUID_INFERENCE_INSTALL_DIR}/paddle/include
)
add_custom_target(inference_lib_dist DEPENDS third_party inference_api_lib)
# paddle fluid version # paddle fluid version
function(version version_file) function(version version_file)
execute_process( execute_process(
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册