未验证 提交 2c0a4a34 编写于 作者: W Wilber 提交者: GitHub

call_statck is turned on default when ON_INFER=ON (#29798)

上级 067d7f1d
...@@ -83,14 +83,24 @@ if (USE_TENSORRT AND WITH_GPU) ...@@ -83,14 +83,24 @@ if (USE_TENSORRT AND WITH_GPU)
endif() endif()
set(TENSORRT_INCLUDE_DIR ${TENSORRT_ROOT}/include) set(TENSORRT_INCLUDE_DIR ${TENSORRT_ROOT}/include)
set(TENSORRT_LIB_DIR ${TENSORRT_ROOT}/lib) set(TENSORRT_LIB_DIR ${TENSORRT_ROOT}/lib)
endif() file(READ ${TENSORRT_INCLUDE_DIR}/NvInfer.h TENSORRT_VERSION_FILE_CONTENTS)
string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
if (NOT WIN32) "${TENSORRT_VERSION_FILE_CONTENTS}")
if (USE_TENSORRT AND WITH_GPU) if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
include_directories("${TENSORRT_INCLUDE_DIR}") file(READ ${TENSORRT_INCLUDE_DIR}/NvInferVersion.h TENSORRT_VERSION_FILE_CONTENTS)
link_directories("${TENSORRT_LIB_DIR}") string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
"${TENSORRT_VERSION_FILE_CONTENTS}")
endif() endif()
endif(NOT WIN32) if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
message(SEND_ERROR "Failed to detect TensorRT version.")
endif()
string(REGEX REPLACE "define NV_TENSORRT_MAJOR +([0-9]+)" "\\1"
TENSORRT_MAJOR_VERSION "${TENSORRT_MAJOR_VERSION}")
message(STATUS "Current TensorRT header is ${TENSORRT_INCLUDE_DIR}/NvInfer.h. "
"Current TensorRT version is v${TENSORRT_MAJOR_VERSION}. ")
include_directories("${TENSORRT_INCLUDE_DIR}")
link_directories("${TENSORRT_LIB_DIR}")
endif()
if(WITH_MKL) if(WITH_MKL)
set(MATH_LIB_PATH "${PADDLE_LIB_THIRD_PARTY_PATH}mklml") set(MATH_LIB_PATH "${PADDLE_LIB_THIRD_PARTY_PATH}mklml")
...@@ -147,14 +157,17 @@ endif(NOT WIN32) ...@@ -147,14 +157,17 @@ endif(NOT WIN32)
if(WITH_GPU) if(WITH_GPU)
if(NOT WIN32) if(NOT WIN32)
if (USE_TENSORRT) if (USE_TENSORRT)
set(DEPS ${DEPS} ${TENSORRT_LIB_DIR}/libnvinfer${CMAKE_STATIC_LIBRARY_SUFFIX}) set(DEPS ${DEPS} ${TENSORRT_LIB_DIR}/libnvinfer${CMAKE_SHARED_LIBRARY_SUFFIX})
set(DEPS ${DEPS} ${TENSORRT_LIB_DIR}/libnvinfer_plugin${CMAKE_STATIC_LIBRARY_SUFFIX}) set(DEPS ${DEPS} ${TENSORRT_LIB_DIR}/libnvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX})
endif() endif()
set(DEPS ${DEPS} ${CUDA_LIB}/libcudart${CMAKE_SHARED_LIBRARY_SUFFIX}) set(DEPS ${DEPS} ${CUDA_LIB}/libcudart${CMAKE_SHARED_LIBRARY_SUFFIX})
else() else()
if(USE_TENSORRT) if(USE_TENSORRT)
set(DEPS ${DEPS} ${TENSORRT_LIB_DIR}/nvinfer${CMAKE_STATIC_LIBRARY_SUFFIX}) set(DEPS ${DEPS} ${TENSORRT_LIB_DIR}/nvinfer${CMAKE_STATIC_LIBRARY_SUFFIX})
set(DEPS ${DEPS} ${TENSORRT_LIB_DIR}/nvinfer_plugin${CMAKE_STATIC_LIBRARY_SUFFIX}) set(DEPS ${DEPS} ${TENSORRT_LIB_DIR}/nvinfer_plugin${CMAKE_STATIC_LIBRARY_SUFFIX})
if(${TENSORRT_MAJOR_VERSION} GREATER_EQUAL 7)
set(DEPS ${DEPS} ${TENSORRT_LIB_DIR}/myelin64_1${CMAKE_STATIC_LIBRARY_SUFFIX})
endif()
endif() endif()
set(DEPS ${DEPS} ${CUDA_LIB}/cudart${CMAKE_STATIC_LIBRARY_SUFFIX} ) set(DEPS ${DEPS} ${CUDA_LIB}/cudart${CMAKE_STATIC_LIBRARY_SUFFIX} )
set(DEPS ${DEPS} ${CUDA_LIB}/cublas${CMAKE_STATIC_LIBRARY_SUFFIX} ) set(DEPS ${DEPS} ${CUDA_LIB}/cublas${CMAKE_STATIC_LIBRARY_SUFFIX} )
...@@ -172,6 +185,11 @@ if(WIN32) ...@@ -172,6 +185,11 @@ if(WIN32)
COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_LIB_DIR}/nvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX} COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_LIB_DIR}/nvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX}
${CMAKE_BINARY_DIR}/${CMAKE_BUILD_TYPE} ${CMAKE_BINARY_DIR}/${CMAKE_BUILD_TYPE}
) )
if(${TENSORRT_MAJOR_VERSION} GREATER_EQUAL 7)
add_custom_command(TARGET ${DEMO_NAME} POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_LIB_DIR}/myelin64_1${CMAKE_SHARED_LIBRARY_SUFFIX}
${CMAKE_BINARY_DIR}/${CMAKE_BUILD_TYPE})
endif()
endif() endif()
if(WITH_MKL) if(WITH_MKL)
add_custom_command(TARGET ${DEMO_NAME} POST_BUILD add_custom_command(TARGET ${DEMO_NAME} POST_BUILD
......
...@@ -498,8 +498,14 @@ DEFINE_bool(use_mkldnn, false, "Use MKLDNN to run"); ...@@ -498,8 +498,14 @@ DEFINE_bool(use_mkldnn, false, "Use MKLDNN to run");
* If FLAGS_call_stack_level == 2, the python stack, c++ stack, and error * If FLAGS_call_stack_level == 2, the python stack, c++ stack, and error
* message summary will be shown. * message summary will be shown.
*/ */
#ifdef PADDLE_ON_INFERENCE
static const int32_t kDefaultCallStackLevel = 2;
#else
static const int32_t kDefaultCallStackLevel = 1;
#endif
DEFINE_int32( DEFINE_int32(
call_stack_level, 1, call_stack_level, kDefaultCallStackLevel,
"Determine the call stack to print when error or exeception happens." "Determine the call stack to print when error or exeception happens."
// TODO(zhiqiu): implement logic of FLAGS_call_stack_level==0 // TODO(zhiqiu): implement logic of FLAGS_call_stack_level==0
// "If FLAGS_call_stack_level == 0, only the error message summary will be " // "If FLAGS_call_stack_level == 0, only the error message summary will be "
......
...@@ -47,6 +47,7 @@ static void *dlsym(void *handle, const char *symbol_name) { ...@@ -47,6 +47,7 @@ static void *dlsym(void *handle, const char *symbol_name) {
found_symbol = GetProcAddress((HMODULE)handle, symbol_name); found_symbol = GetProcAddress((HMODULE)handle, symbol_name);
if (found_symbol == NULL) { if (found_symbol == NULL) {
LOG(ERROR) << "Load symbol " << symbol_name << " failed.";
throw std::runtime_error(std::string(symbol_name) + " not found."); throw std::runtime_error(std::string(symbol_name) + " not found.");
} }
return reinterpret_cast<void *>(found_symbol); return reinterpret_cast<void *>(found_symbol);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册