diff --git a/CMakeLists.txt b/CMakeLists.txt index 8dab01f14a7a82213ae92d5fbcfce619e9939a96..83f9ca4c7dd09b428863f5492996f355fa4b0f07 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -75,6 +75,7 @@ include(generic) include(flags) if (NOT CLIENT_ONLY) +include(external/cudnn) include(paddlepaddle) include(external/opencv) endif() diff --git a/cmake/external/cudnn.cmake b/cmake/external/cudnn.cmake new file mode 100644 index 0000000000000000000000000000000000000000..98466d44fc0dd91ef0cc8e8eac2660c42a19267c --- /dev/null +++ b/cmake/external/cudnn.cmake @@ -0,0 +1,102 @@ +if(NOT WITH_GPU) + return() +endif() + +if(WIN32) + set(CUDNN_ROOT ${CUDA_TOOLKIT_ROOT_DIR}) +else(WIN32) + set(CUDNN_ROOT "/usr" CACHE PATH "CUDNN ROOT") +endif(WIN32) + +find_path(CUDNN_INCLUDE_DIR cudnn.h + PATHS ${CUDNN_ROOT} ${CUDNN_ROOT}/include + $ENV{CUDNN_ROOT} $ENV{CUDNN_ROOT}/include ${CUDA_TOOLKIT_INCLUDE} + NO_DEFAULT_PATH +) + +get_filename_component(__libpath_hist ${CUDA_CUDART_LIBRARY} PATH) + +set(TARGET_ARCH "x86_64") +if(NOT ${CMAKE_SYSTEM_PROCESSOR}) + set(TARGET_ARCH ${CMAKE_SYSTEM_PROCESSOR}) +endif() + +list(APPEND CUDNN_CHECK_LIBRARY_DIRS + ${CUDNN_ROOT} + ${CUDNN_ROOT}/lib64 + ${CUDNN_ROOT}/lib + ${CUDNN_ROOT}/lib/${TARGET_ARCH}-linux-gnu + ${CUDNN_ROOT}/local/cuda-${CUDA_VERSION}/targets/${TARGET_ARCH}-linux/lib/ + $ENV{CUDNN_ROOT} + $ENV{CUDNN_ROOT}/lib64 + $ENV{CUDNN_ROOT}/lib + /usr/lib + ${CUDA_TOOLKIT_ROOT_DIR} + ${CUDA_TOOLKIT_ROOT_DIR}/lib/x64 + ) +set(CUDNN_LIB_NAME "") +if (LINUX) +set(CUDNN_LIB_NAME "libcudnn.so") +endif(LINUX) + +if(WIN32) +# only support cudnn7 +set(CUDNN_LIB_NAME "cudnn.lib" "cudnn64_7.dll") +endif(WIN32) + +if(APPLE) +set(CUDNN_LIB_NAME "libcudnn.dylib" "libcudnn.so") +endif(APPLE) + +find_library(CUDNN_LIBRARY NAMES ${CUDNN_LIB_NAME} # libcudnn_static.a + PATHS ${CUDNN_CHECK_LIBRARY_DIRS} ${CUDNN_INCLUDE_DIR} ${__libpath_hist} + NO_DEFAULT_PATH + DOC "Path to cuDNN library.") + + +if(CUDNN_INCLUDE_DIR AND CUDNN_LIBRARY) + set(CUDNN_FOUND ON) +else() + set(CUDNN_FOUND OFF) +endif() + +if(CUDNN_FOUND) + file(READ ${CUDNN_INCLUDE_DIR}/cudnn.h CUDNN_VERSION_FILE_CONTENTS) + + get_filename_component(CUDNN_LIB_PATH ${CUDNN_LIBRARY} DIRECTORY) + + string(REGEX MATCH "define CUDNN_VERSION +([0-9]+)" + CUDNN_VERSION "${CUDNN_VERSION_FILE_CONTENTS}") + string(REGEX REPLACE "define CUDNN_VERSION +([0-9]+)" "\\1" + CUDNN_VERSION "${CUDNN_VERSION}") + + if("${CUDNN_VERSION}" STREQUAL "2000") + message(STATUS "Current cuDNN version is v2. ") + else() + string(REGEX MATCH "define CUDNN_MAJOR +([0-9]+)" CUDNN_MAJOR_VERSION + "${CUDNN_VERSION_FILE_CONTENTS}") + string(REGEX REPLACE "define CUDNN_MAJOR +([0-9]+)" "\\1" + CUDNN_MAJOR_VERSION "${CUDNN_MAJOR_VERSION}") + string(REGEX MATCH "define CUDNN_MINOR +([0-9]+)" CUDNN_MINOR_VERSION + "${CUDNN_VERSION_FILE_CONTENTS}") + string(REGEX REPLACE "define CUDNN_MINOR +([0-9]+)" "\\1" + CUDNN_MINOR_VERSION "${CUDNN_MINOR_VERSION}") + string(REGEX MATCH "define CUDNN_PATCHLEVEL +([0-9]+)" + CUDNN_PATCHLEVEL_VERSION "${CUDNN_VERSION_FILE_CONTENTS}") + string(REGEX REPLACE "define CUDNN_PATCHLEVEL +([0-9]+)" "\\1" + CUDNN_PATCHLEVEL_VERSION "${CUDNN_PATCHLEVEL_VERSION}") + + if(NOT CUDNN_MAJOR_VERSION) + set(CUDNN_VERSION "???") + else() + add_definitions("-DPADDLE_CUDNN_BINVER=\"${CUDNN_MAJOR_VERSION}\"") + math(EXPR CUDNN_VERSION + "${CUDNN_MAJOR_VERSION} * 1000 + + ${CUDNN_MINOR_VERSION} * 100 + ${CUDNN_PATCHLEVEL_VERSION}") + endif() + + message(STATUS "Current cuDNN header is ${CUDNN_INCLUDE_DIR}/cudnn.h. " + "Current cuDNN version is v${CUDNN_MAJOR_VERSION}.${CUDNN_MINOR_VERSION}. ") + + endif() +endif() diff --git a/cmake/paddlepaddle.cmake b/cmake/paddlepaddle.cmake index 3e2d0f742a07a59986a2441d3d56c4202e866961..1cf2c0c867b2ae4b9d8144ebbb25f724882fa3a1 100644 --- a/cmake/paddlepaddle.cmake +++ b/cmake/paddlepaddle.cmake @@ -15,71 +15,70 @@ INCLUDE(ExternalProject) SET(PADDLE_SOURCES_DIR ${THIRD_PARTY_PATH}/Paddle) +SET(PADDLE_DOWNLOAD_DIR ${PADDLE_SOURCES_DIR}/src/extern_paddle) SET(PADDLE_INSTALL_DIR ${THIRD_PARTY_PATH}/install/Paddle/) SET(PADDLE_INCLUDE_DIR "${PADDLE_INSTALL_DIR}/include" CACHE PATH "PaddlePaddle include directory." FORCE) SET(PADDLE_LIBRARIES "${PADDLE_INSTALL_DIR}/lib/libpaddle_fluid.a" CACHE FILEPATH "Paddle library." FORCE) -INCLUDE_DIRECTORIES(${CMAKE_BINARY_DIR}/Paddle/fluid_install_dir) # Reference https://stackoverflow.com/questions/45414507/pass-a-list-of-prefix-paths-to-externalproject-add-in-cmake-args set(prefix_path "${THIRD_PARTY_PATH}/install/gflags|${THIRD_PARTY_PATH}/install/leveldb|${THIRD_PARTY_PATH}/install/snappy|${THIRD_PARTY_PATH}/install/gtest|${THIRD_PARTY_PATH}/install/protobuf|${THIRD_PARTY_PATH}/install/zlib|${THIRD_PARTY_PATH}/install/glog") message( "WITH_GPU = ${WITH_GPU}") -# If minimal .a is need, you can set WITH_DEBUG_SYMBOLS=OFF + +# Paddle Version should be one of: +# latest: latest develop build +# version number like 1.5.2 +SET(PADDLE_VERSION "latest") + +if (WITH_GPU) + SET(PADDLE_LIB_VERSION "${PADDLE_VERSION}-gpu-cuda${CUDA_VERSION_MAJOR}-cudnn7-avx-mkl") +else() + if (AVX_FOUND) + if (WITH_MKLML) + SET(PADDLE_LIB_VERSION "${PADDLE_VERSION}-cpu-avx-mkl") + else() + SET(PADDLE_LIB_VERSION "${PADDLE_VERSION}-cpu-avx-openblas") + endif() + else() + SET(PADDLE_LIB_VERSION "${PADDLE_VERSION}-cpu-noavx-openblas") + endif() +endif() + +SET(PADDLE_LIB_PATH "http://paddle-inference-lib.bj.bcebos.com/${PADDLE_LIB_VERSION}/fluid_inference.tgz") +MESSAGE(STATUS "PADDLE_LIB_PATH=${PADDLE_LIB_PATH}") + ExternalProject_Add( - extern_paddle + "extern_paddle" ${EXTERNAL_PROJECT_LOG_ARGS} - # TODO(wangguibao): change to de newst repo when they changed. - GIT_REPOSITORY "https://github.com/PaddlePaddle/Paddle" - GIT_TAG "v1.5.1" - PREFIX ${PADDLE_SOURCES_DIR} - UPDATE_COMMAND "" - BINARY_DIR ${CMAKE_BINARY_DIR}/Paddle - CMAKE_ARGS -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} - -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} - -DCMAKE_INSTALL_PREFIX=${PADDLE_INSTALL_DIR} - -DCMAKE_INSTALL_LIBDIR=${PADDLE_INSTALL_DIR}/lib - -DCMAKE_POSITION_INDEPENDENT_CODE=ON - -DCMAKE_BUILD_TYPE=${THIRD_PARTY_BUILD_TYPE} - -DCMAKE_PREFIX_PATH=${prefix_path} - -DCMAKE_BINARY_DIR=${CMAKE_CURRENT_BINARY_DIR} - -DWITH_SWIG_PY=OFF - -DWITH_PYTHON=OFF - -DWITH_MKL=${WITH_MKL} - -DWITH_AVX=${WITH_AVX} - -DWITH_MKLDNN=OFF - -DWITH_GPU=${WITH_GPU} - -DWITH_FLUID_ONLY=ON - -DWITH_TESTING=OFF - -DWITH_DISTRIBUTE=OFF - -DON_INFER=ON - ${EXTERNAL_OPTIONAL_ARGS} - LIST_SEPARATOR | - CMAKE_CACHE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${PADDLE_INSTALL_DIR} - -DCMAKE_INSTALL_LIBDIR:PATH=${PADDLE_INSTALL_DIR}/lib - -DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=ON - -DCMAKE_BUILD_TYPE:STRING=${THIRD_PARTY_BUILD_TYPE} - BUILD_COMMAND $(MAKE) - INSTALL_COMMAND $(MAKE) fluid_lib_dist + URL "${PADDLE_LIB_PATH}" + PREFIX "${PADDLE_SOURCES_DIR}" + DOWNLOAD_DIR "${PADDLE_DOWNLOAD_DIR}" + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + UPDATE_COMMAND "" + INSTALL_COMMAND + ${CMAKE_COMMAND} -E copy_directory ${PADDLE_DOWNLOAD_DIR}/paddle/include ${PADDLE_INSTALL_DIR}/include && + ${CMAKE_COMMAND} -E copy_directory ${PADDLE_DOWNLOAD_DIR}/paddle/lib ${PADDLE_INSTALL_DIR}/lib && + ${CMAKE_COMMAND} -E copy_directory ${PADDLE_DOWNLOAD_DIR}/third_party ${PADDLE_INSTALL_DIR}/third_party && + ${CMAKE_COMMAND} -E copy ${PADDLE_INSTALL_DIR}/third_party/install/mkldnn/lib/libmkldnn.so.0 ${PADDLE_INSTALL_DIR}/third_party/install/mkldnn/lib/libmkldnn.so ) -ExternalProject_Get_Property(extern_paddle BINARY_DIR) -SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_RPATH}" "${BINARY_DIR}/fluid_install_dir/third_party/install/mklml/lib") -LINK_DIRECTORIES(${BINARY_DIR}/fluid_install_dir/third_party/install/mklml/lib) - -ADD_LIBRARY(paddle_fluid STATIC IMPORTED GLOBAL) -SET_PROPERTY(TARGET paddle_fluid PROPERTY IMPORTED_LOCATION ${BINARY_DIR}/fluid_install_dir/paddle/fluid/inference/libpaddle_fluid.a) +INCLUDE_DIRECTORIES(${PADDLE_INCLUDE_DIR}) +SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_RPATH}" "${PADDLE_INSTALL_DIR}/third_party/install/mklml/lib") +LINK_DIRECTORIES(${PADDLE_INSTALL_DIR}/third_party/install/mklml/lib) -LIST(APPEND external_project_dependencies paddle) +SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_RPATH}" "${PADDLE_INSTALL_DIR}/third_party/install/mkldnn/lib") +LINK_DIRECTORIES(${PADDLE_INSTALL_DIR}/third_party/install/mkldnn/lib) -ADD_LIBRARY(snappystream STATIC IMPORTED GLOBAL) -SET_PROPERTY(TARGET snappystream PROPERTY IMPORTED_LOCATION ${BINARY_DIR}/fluid_install_dir/third_party/install/snappystream/lib/libsnappystream.a) +ADD_LIBRARY(paddle_fluid STATIC IMPORTED GLOBAL) +SET_PROPERTY(TARGET paddle_fluid PROPERTY IMPORTED_LOCATION ${PADDLE_INSTALL_DIR}/lib/libpaddle_fluid.a) ADD_LIBRARY(xxhash STATIC IMPORTED GLOBAL) -SET_PROPERTY(TARGET xxhash PROPERTY IMPORTED_LOCATION ${BINARY_DIR}/fluid_install_dir/third_party/install/xxhash/lib/libxxhash.a) +SET_PROPERTY(TARGET xxhash PROPERTY IMPORTED_LOCATION ${PADDLE_INSTALL_DIR}/third_party/install/xxhash/lib/libxxhash.a) + +LIST(APPEND external_project_dependencies paddle) LIST(APPEND paddle_depend_libs - snappystream - snappy xxhash) diff --git a/demo-serving/CMakeLists.txt b/demo-serving/CMakeLists.txt index 93650cf13a4e8c7fe3077e1780e15074081b2de0..82875d23d566cb97205cc2acb009f8da2642e460 100644 --- a/demo-serving/CMakeLists.txt +++ b/demo-serving/CMakeLists.txt @@ -59,7 +59,7 @@ target_link_libraries(serving kvdb rocksdb) if(WITH_GPU) target_link_libraries(serving ${CUDA_LIBRARIES}) endif() -target_link_libraries(serving -liomp5 -lmklml_intel -lpthread +target_link_libraries(serving -liomp5 -lmklml_intel -lmkldnn -lpthread -lcrypto -lm -lrt -lssl -ldl -lz -lbz2) install(TARGETS serving @@ -75,7 +75,10 @@ install(FILES ${inc} DESTINATION ${PADDLE_SERVING_INSTALL_DIR}/include/serving) if (${WITH_MKL}) - install(FILES ${CMAKE_BINARY_DIR}/Paddle/third_party/install/mklml/lib/libmklml_intel.so - ${CMAKE_BINARY_DIR}/Paddle/third_party/install/mklml/lib/libiomp5.so DESTINATION + install(FILES + ${CMAKE_BINARY_DIR}/third_party/install/Paddle/third_party/install/mklml/lib/libmklml_intel.so + ${CMAKE_BINARY_DIR}/third_party/install/Paddle/third_party/install/mklml/lib/libiomp5.so + ${CMAKE_BINARY_DIR}/third_party/install/Paddle/third_party/install/mkldnn/lib/libmkldnn.so + DESTINATION ${PADDLE_SERVING_INSTALL_DIR}/demo/serving/bin) endif() diff --git a/demo-serving/op/classify_op.h b/demo-serving/op/classify_op.h index 366793cc7c1ec38197912399b06b6e9e8db8e996..c381f032d0e3a7e19e1a711b1ebe1747ee3145d8 100644 --- a/demo-serving/op/classify_op.h +++ b/demo-serving/op/classify_op.h @@ -21,7 +21,7 @@ #include "paddle/fluid/inference/api/paddle_inference_api.h" #endif #else -#include "paddle/fluid/inference/paddle_inference_api.h" +#include "paddle_inference_api.h" // NOLINT #endif #include "demo-serving/image_class.pb.h" diff --git a/demo-serving/op/ctr_prediction_op.h b/demo-serving/op/ctr_prediction_op.h index a12cccab68c06c2238e7205b90b095318b28f3f0..6bec7c64c1f580ee10419ec4743776df9729ef51 100644 --- a/demo-serving/op/ctr_prediction_op.h +++ b/demo-serving/op/ctr_prediction_op.h @@ -21,7 +21,7 @@ #include "paddle/fluid/inference/api/paddle_inference_api.h" #endif #else -#include "paddle/fluid/inference/paddle_inference_api.h" +#include "paddle_inference_api.h" // NOLINT #endif #include "demo-serving/ctr_prediction.pb.h" diff --git a/demo-serving/op/reader_op.h b/demo-serving/op/reader_op.h index 484d6f62d0f64bd3efbf7f8de3b4068a344ae048..546ca19667af0161ddb62f354e32791d15d8ae4b 100644 --- a/demo-serving/op/reader_op.h +++ b/demo-serving/op/reader_op.h @@ -35,7 +35,7 @@ #include "paddle/fluid/inference/api/paddle_inference_api.h" #endif #else -#include "paddle/fluid/inference/paddle_inference_api.h" +#include "paddle_inference_api.h" // NOLINT #endif namespace baidu { diff --git a/demo-serving/op/text_classification_op.h b/demo-serving/op/text_classification_op.h index bef8ec520dc45e97def913715d714e2c46067429..21ac6991be1b47654618c52c4123a5f99f4bc185 100644 --- a/demo-serving/op/text_classification_op.h +++ b/demo-serving/op/text_classification_op.h @@ -21,7 +21,7 @@ #include "paddle/fluid/inference/api/paddle_inference_api.h" #endif #else -#include "paddle/fluid/inference/paddle_inference_api.h" +#include "paddle_inference_api.h" // NOLINT #endif #include "demo-serving/text_classification.pb.h" diff --git a/inferencer-fluid-cpu/include/fluid_cpu_engine.h b/inferencer-fluid-cpu/include/fluid_cpu_engine.h index 41ab63992361f70f1434efe2c4982342e34b5525..b951670ee6193f6ecd7bc88e2a6bc57134943fe9 100644 --- a/inferencer-fluid-cpu/include/fluid_cpu_engine.h +++ b/inferencer-fluid-cpu/include/fluid_cpu_engine.h @@ -28,7 +28,7 @@ #include "paddle/fluid/inference/api/paddle_inference_api.h" #endif #else -#include "paddle/fluid/inference/paddle_inference_api.h" +#include "paddle_inference_api.h" // NOLINT #endif #include "predictor/framework/infer.h" diff --git a/inferencer-fluid-gpu/include/fluid_gpu_engine.h b/inferencer-fluid-gpu/include/fluid_gpu_engine.h index 226e114f8cea32eda110cde6d7810506f7236d61..30bae22d2f8878d4a2351a7417eb927e7312c251 100644 --- a/inferencer-fluid-gpu/include/fluid_gpu_engine.h +++ b/inferencer-fluid-gpu/include/fluid_gpu_engine.h @@ -28,7 +28,7 @@ #include "paddle/fluid/inference/api/paddle_inference_api.h" #endif #else -#include "paddle/fluid/inference/paddle_inference_api.h" +#include "paddle_inference_api.h" // NOLINT #endif #include "predictor/framework/infer.h"