From 61cae53e7910465107d8978b348c8e6d70b44e51 Mon Sep 17 00:00:00 2001 From: luotao1 Date: Thu, 6 Sep 2018 14:16:25 +0800 Subject: [PATCH] support anakin for only-cpu environment --- CMakeLists.txt | 4 +++- cmake/external/anakin.cmake | 19 +++++----------- cmake/inference_lib.cmake | 2 +- paddle/fluid/inference/api/CMakeLists.txt | 22 ++++++++++++++----- .../fluid/inference/api/api_anakin_engine.cc | 7 ++++++ 5 files changed, 34 insertions(+), 20 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index b1d0abdf2c..c2fa5420e9 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -213,9 +213,11 @@ include(configure) # add paddle env configuration if(WITH_GPU) include(cuda) include(tensorrt) +endif() +if(WITH_MKL OR WITH_MKLML) include(external/anakin) elseif() - set(WITH_ANAKIN OFF CACHE STRING "Anakin is used in GPU only now." FORCE) + set(WITH_ANAKIN OFF CACHE STRING "Anakin is used in MKL only now." FORCE) endif() include(generic) # simplify cmake module diff --git a/cmake/external/anakin.cmake b/cmake/external/anakin.cmake index dc6730662f..5a12c6490e 100644 --- a/cmake/external/anakin.cmake +++ b/cmake/external/anakin.cmake @@ -16,16 +16,6 @@ set(ANAKIN_LIBRARY ${ANAKIN_INSTALL_DIR}) set(ANAKIN_SHARED_LIB ${ANAKIN_LIBRARY}/libanakin.so) set(ANAKIN_SABER_LIB ${ANAKIN_LIBRARY}/libanakin_saber_common.so) -# TODO(luotao): ANAKIN_MODLE_URL etc will move to demo ci later. -set(INFERENCE_URL "http://paddle-inference-dist.bj.bcebos.com") -set(ANAKIN_MODLE_URL "${INFERENCE_URL}/mobilenet_v2.anakin.bin") -set(ANAKIN_RNN_MODLE_URL "${INFERENCE_URL}/anakin_test%2Fditu_rnn.anakin2.model.bin") -set(ANAKIN_RNN_DATA_URL "${INFERENCE_URL}/anakin_test%2Fditu_rnn_data.txt") -execute_process(COMMAND bash -c "mkdir -p ${ANAKIN_SOURCE_DIR}") -execute_process(COMMAND bash -c "cd ${ANAKIN_SOURCE_DIR}; wget -q --no-check-certificate ${ANAKIN_MODLE_URL} -N") -execute_process(COMMAND bash -c "cd ${ANAKIN_SOURCE_DIR}; wget -q --no-check-certificate ${ANAKIN_RNN_MODLE_URL} -N") -execute_process(COMMAND bash -c "cd ${ANAKIN_SOURCE_DIR}; wget -q --no-check-certificate ${ANAKIN_RNN_DATA_URL} -N") - include_directories(${ANAKIN_INCLUDE}) include_directories(${ANAKIN_INCLUDE}/saber/) include_directories(${ANAKIN_INCLUDE}/saber/core/) @@ -48,6 +38,11 @@ set(ANAKIN_COMPILE_EXTRA_FLAGS -Wno-reorder -Wno-error=cpp) +if(WITH_GPU) + set(CMAKE_ARGS_PREFIX -DUSE_GPU_PLACE=YES -DCUDNN_ROOT=${CUDNN_ROOT} -DCUDNN_INCLUDE_DIR=${CUDNN_INCLUDE_DIR}) +else() + set(CMAKE_ARGS_PREFIX -DUSE_GPU_PLACE=NO) +endif() ExternalProject_Add( extern_anakin ${EXTERNAL_PROJECT_LOG_ARGS} @@ -56,13 +51,11 @@ ExternalProject_Add( GIT_TAG "9424277cf9ae180a14aff09560d3cd60a49c76d2" PREFIX ${ANAKIN_SOURCE_DIR} UPDATE_COMMAND "" - CMAKE_ARGS -DUSE_GPU_PLACE=YES + CMAKE_ARGS ${CMAKE_ARGS_PREFIX} -DUSE_X86_PLACE=YES -DBUILD_WITH_UNIT_TEST=NO -DPROTOBUF_ROOT=${THIRD_PARTY_PATH}/install/protobuf -DMKLML_ROOT=${THIRD_PARTY_PATH}/install/mklml - -DCUDNN_ROOT=${CUDNN_ROOT} - -DCUDNN_INCLUDE_DIR=${CUDNN_INCLUDE_DIR} -DENABLE_OP_TIMER=${ANAKIN_ENABLE_OP_TIMER} ${EXTERNAL_OPTIONAL_ARGS} CMAKE_CACHE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${ANAKIN_INSTALL_DIR} diff --git a/cmake/inference_lib.cmake b/cmake/inference_lib.cmake index f61770514e..6e66ba94ab 100644 --- a/cmake/inference_lib.cmake +++ b/cmake/inference_lib.cmake @@ -145,7 +145,7 @@ copy(memory_lib set(inference_deps paddle_fluid_shared paddle_fluid) set(module "inference/api") -if (WITH_ANAKIN AND WITH_GPU) +if (WITH_ANAKIN AND WITH_MKL) copy(anakin_inference_lib DEPS paddle_inference_api inference_anakin_api SRCS ${PADDLE_BINARY_DIR}/paddle/fluid/inference/api/libinference_anakin_api* # compiled anakin api diff --git a/paddle/fluid/inference/api/CMakeLists.txt b/paddle/fluid/inference/api/CMakeLists.txt index ea00bf3649..907d1163e7 100644 --- a/paddle/fluid/inference/api/CMakeLists.txt +++ b/paddle/fluid/inference/api/CMakeLists.txt @@ -73,7 +73,7 @@ cc_library(paddle_inference_tensorrt_subgraph_engine inference_api_test(test_api_tensorrt_subgraph_engine SRC api_tensorrt_subgraph_engine_tester.cc ARGS test_word2vec) endif() -if (WITH_ANAKIN AND WITH_GPU) # only needed in CI +if (WITH_ANAKIN AND WITH_MKL) # only needed in CI # compile the libinference_anakin_api.a and anakin.so. cc_library(inference_anakin_api SRCS api.cc api_anakin_engine.cc DEPS anakin_shared anakin_saber mklml) cc_library(inference_anakin_api_shared SHARED SRCS api.cc api_anakin_engine.cc DEPS anakin_shared anakin_saber) @@ -83,12 +83,24 @@ if (WITH_ANAKIN AND WITH_GPU) # only needed in CI anakin_target(inference_anakin_api) anakin_target(inference_anakin_api_shared) if (WITH_TESTING) - cc_test(api_anakin_engine_tester SRCS api_anakin_engine_tester.cc - ARGS --model=${ANAKIN_SOURCE_DIR}/mobilenet_v2.anakin.bin - DEPS inference_anakin_api_shared dynload_cuda SERIAL) + # TODO(luotao): ANAKIN_MODLE_URL etc will move to demo ci later. + set(INFERENCE_URL "http://paddle-inference-dist.bj.bcebos.com") + set(ANAKIN_RNN_MODLE_URL "${INFERENCE_URL}/anakin_test%2Fditu_rnn.anakin2.model.bin") + set(ANAKIN_RNN_DATA_URL "${INFERENCE_URL}/anakin_test%2Fditu_rnn_data.txt") + execute_process(COMMAND bash -c "mkdir -p ${ANAKIN_SOURCE_DIR}") + execute_process(COMMAND bash -c "cd ${ANAKIN_SOURCE_DIR}; wget -q --no-check-certificate ${ANAKIN_RNN_MODLE_URL} -N") + execute_process(COMMAND bash -c "cd ${ANAKIN_SOURCE_DIR}; wget -q --no-check-certificate ${ANAKIN_RNN_DATA_URL} -N") + if(WITH_GPU) + set(anakin_test_extra_deps dynload_cuda) + set(ANAKIN_MODLE_URL "${INFERENCE_URL}/mobilenet_v2.anakin.bin") + execute_process(COMMAND bash -c "cd ${ANAKIN_SOURCE_DIR}; wget -q --no-check-certificate ${ANAKIN_MODLE_URL} -N") + cc_test(api_anakin_engine_tester SRCS api_anakin_engine_tester.cc + ARGS --model=${ANAKIN_SOURCE_DIR}/mobilenet_v2.anakin.bin + DEPS inference_anakin_api_shared ${anakin_test_extra_deps} SERIAL) + endif() cc_test(api_anakin_engine_rnn_tester SRCS api_anakin_engine_rnn_tester.cc ARGS --model=${ANAKIN_SOURCE_DIR}/anakin_test%2Fditu_rnn.anakin2.model.bin --datapath=${ANAKIN_SOURCE_DIR}/anakin_test%2Fditu_rnn_data.txt - DEPS inference_anakin_api_shared dynload_cuda SERIAL) + DEPS inference_anakin_api_shared ${anakin_test_extra_deps} SERIAL) endif(WITH_TESTING) endif() diff --git a/paddle/fluid/inference/api/api_anakin_engine.cc b/paddle/fluid/inference/api/api_anakin_engine.cc index ea66aa89b8..43b31269d2 100644 --- a/paddle/fluid/inference/api/api_anakin_engine.cc +++ b/paddle/fluid/inference/api/api_anakin_engine.cc @@ -193,7 +193,9 @@ PaddleInferenceAnakinPredictor::Clone() { return std::move(cls); } +#ifdef PADDLE_WITH_CUDA template class PaddleInferenceAnakinPredictor; +#endif template class PaddleInferenceAnakinPredictor; // A factory to help create difference predictor. @@ -202,10 +204,15 @@ std::unique_ptr CreatePaddlePredictor< AnakinConfig, PaddleEngineKind::kAnakin>(const AnakinConfig &config) { VLOG(3) << "Anakin Predictor create."; if (config.target_type == AnakinConfig::NVGPU) { +#ifdef PADDLE_WITH_CUDA VLOG(3) << "Anakin Predictor create on [ NVIDIA GPU ]."; std::unique_ptr x( new PaddleInferenceAnakinPredictor(config)); return x; +#else + LOG(ERROR) << "AnakinConfig::NVGPU could not used in ONLY-CPU environment"; + return nullptr; +#endif } else if (config.target_type == AnakinConfig::X86) { VLOG(3) << "Anakin Predictor create on [ Intel X86 ]."; std::unique_ptr x( -- GitLab