diff --git a/CMakeLists.txt b/CMakeLists.txt index fb796103350ac4403d4151cf08eb4315bcde68fd..b1554fba5e1fa48b5cbdfe2e5b9f317a4f7fefb3 100755 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -63,8 +63,29 @@ if(WIN32) set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} /bigobj /MT") set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /bigobj /MTd") set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /bigobj /MT") + foreach(flag_var + CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE + CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO + CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE + CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) + if(${flag_var} MATCHES "/MD") + string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") + endif() + endforeach(flag_var) endif() - + + # windows build turn off warnings. + foreach(flag_var + CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE + CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO + CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE + CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) + string(REGEX REPLACE "/W[1-4]" " /W0 " ${flag_var} "${${flag_var}}") + endforeach(flag_var) + foreach(flag_var CMAKE_CXX_FLAGS CMAKE_C_FLAGS) + set(${flag_var} "${${flag_var}} /w") + endforeach(flag_var) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /wd4068 /wd4129 /wd4244 /wd4267 /wd4297 /wd4530 /wd4577 /wd4819 /wd4838 /MP") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4068 /wd4129 /wd4244 /wd4267 /wd4297 /wd4530 /wd4577 /wd4819 /wd4838 /MP") message(STATUS "Using parallel compiling (/MP)") diff --git a/cmake/external/cryptopp.cmake b/cmake/external/cryptopp.cmake index af5dd0e2c9b2d19929f58363d08e7ff40d43b013..351ef1c7c7aebb698a5d41689352a913d0b950e8 100644 --- a/cmake/external/cryptopp.cmake +++ b/cmake/external/cryptopp.cmake @@ -22,23 +22,8 @@ SET(CRYPTOPP_TAG CRYPTOPP_8_2_0) IF(WIN32) SET(CRYPTOPP_LIBRARIES "${CRYPTOPP_INSTALL_DIR}/lib/cryptopp-static.lib" CACHE FILEPATH "cryptopp library." FORCE) - SET(CRYPTOPP_CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") - set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /MT") - set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /MTd") - set(CompilerFlags - CMAKE_CXX_FLAGS - CMAKE_CXX_FLAGS_DEBUG - CMAKE_CXX_FLAGS_RELEASE - CMAKE_C_FLAGS - CMAKE_C_FLAGS_DEBUG - CMAKE_C_FLAGS_RELEASE - ) - foreach(CompilerFlag ${CompilerFlags}) - string(REPLACE "/MD" "/MT" ${CompilerFlag} "${${CompilerFlag}}") - endforeach() ELSE(WIN32) SET(CRYPTOPP_LIBRARIES "${CRYPTOPP_INSTALL_DIR}/lib/libcryptopp.a" CACHE FILEPATH "cryptopp library." FORCE) - SET(CRYPTOPP_CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS}) ENDIF(WIN32) set(CRYPTOPP_CMAKE_ARGS ${COMMON_CMAKE_ARGS} @@ -48,7 +33,7 @@ set(CRYPTOPP_CMAKE_ARGS ${COMMON_CMAKE_ARGS} -DCMAKE_INSTALL_LIBDIR=${CRYPTOPP_INSTALL_DIR}/lib -DCMAKE_INSTALL_PREFIX=${CRYPTOPP_INSTALL_DIR} -DCMAKE_BUILD_TYPE=${THIRD_PARTY_BUILD_TYPE} - -DCMAKE_CXX_FLAGS=${CRYPTOPP_CMAKE_CXX_FLAGS} + -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS} -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} diff --git a/cmake/flags.cmake b/cmake/flags.cmake index 415e07c75425345f5f1ad29a8544e02a5bfb12e4..ed0bf8396b3faa22350811cf1711f5d1e5b89998 100644 --- a/cmake/flags.cmake +++ b/cmake/flags.cmake @@ -90,20 +90,6 @@ macro(safe_set_nvflag flag_name) endif() endmacro() -macro(safe_set_static_flag) # set c_flags and cxx_flags to static or shared - if (BUILD_SHARED_LIBS) - return() # if build shared libs, the flags keep same with '/MD' - endif(BUILD_SHARED_LIBS) - foreach(flag_var - CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE - CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO - CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE - CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) - if(${flag_var} MATCHES "/MD") - string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") - endif(${flag_var} MATCHES "/MD") - endforeach(flag_var) -endmacro() CHECK_CXX_SYMBOL_EXISTS(UINT64_MAX "stdint.h" UINT64_MAX_EXISTS) if(NOT UINT64_MAX_EXISTS) @@ -229,20 +215,3 @@ endforeach() set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} ${SAFE_GPU_COMMON_FLAGS}") - -if(WIN32) - # windows build turn off warnings. - if(MSVC_STATIC_CRT) - safe_set_static_flag() - endif() - foreach(flag_var - CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE - CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO - CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE - CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) - string(REGEX REPLACE "/W[1-4]" " /W0 " ${flag_var} "${${flag_var}}") - endforeach(flag_var) - foreach(flag_var CMAKE_CXX_FLAGS CMAKE_C_FLAGS) - set(${flag_var} "${${flag_var}} /w") - endforeach(flag_var) -endif() diff --git a/cmake/inference_lib.cmake b/cmake/inference_lib.cmake index e3c2409f103d36befed29176b354f77257fea9ec..f19f0eb43d34bd0f3748d7beb1fcf403fa1c9037 100644 --- a/cmake/inference_lib.cmake +++ b/cmake/inference_lib.cmake @@ -24,7 +24,7 @@ set(PADDLE_INFERENCE_INSTALL_DIR "${CMAKE_BINARY_DIR}/paddle_inference_install_d # so the generation of static lib is temporarily turned off. if(WIN32) #todo: remove the option - option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static." OFF) + option(WITH_STATIC_LIB "Compile demo with static/shared library, default use dynamic." OFF) if(NOT PYTHON_EXECUTABLE) FIND_PACKAGE(PythonInterp REQUIRED) endif() @@ -165,25 +165,22 @@ copy_part_of_thrid_party(inference_lib_dist ${PADDLE_INFERENCE_INSTALL_DIR}) set(src_dir "${PADDLE_SOURCE_DIR}/paddle/fluid") if(WIN32) if(WITH_STATIC_LIB) - set(paddle_fluid_lib ${PADDLE_BINARY_DIR}/paddle/fluid/inference/${CMAKE_BUILD_TYPE}/libpaddle_fluid.lib) + set(paddle_fluid_lib ${PADDLE_BINARY_DIR}/paddle/fluid/inference/${CMAKE_BUILD_TYPE}/libpaddle_fluid.lib + ${PADDLE_BINARY_DIR}/paddle/fluid/inference/${CMAKE_BUILD_TYPE}/paddle_fluid.*) else() set(paddle_fluid_lib ${PADDLE_BINARY_DIR}/paddle/fluid/inference/${CMAKE_BUILD_TYPE}/paddle_fluid.dll - ${PADDLE_BINARY_DIR}/paddle/fluid/inference/${CMAKE_BUILD_TYPE}/paddle_fluid.lib) + ${PADDLE_BINARY_DIR}/paddle/fluid/inference/${CMAKE_BUILD_TYPE}/paddle_fluid.lib) endif() + copy(inference_lib_dist + SRCS ${src_dir}/inference/api/paddle_*.h ${paddle_fluid_lib} + DSTS ${PADDLE_INFERENCE_INSTALL_DIR}/paddle/include ${PADDLE_INFERENCE_INSTALL_DIR}/paddle/lib + ${PADDLE_INFERENCE_INSTALL_DIR}/paddle/lib) else(WIN32) set(paddle_fluid_lib ${PADDLE_BINARY_DIR}/paddle/fluid/inference/libpaddle_fluid.*) -endif(WIN32) - -if(WIN32 AND NOT WITH_STATIC_LIB) - copy(inference_lib_dist - SRCS ${src_dir}/inference/api/paddle_*.h ${paddle_fluid_lib} - DSTS ${PADDLE_INFERENCE_INSTALL_DIR}/paddle/include ${PADDLE_INFERENCE_INSTALL_DIR}/paddle/lib - ${PADDLE_INFERENCE_INSTALL_DIR}/paddle/lib) -else() - copy(inference_lib_dist + copy(inference_lib_dist SRCS ${src_dir}/inference/api/paddle_*.h ${paddle_fluid_lib} DSTS ${PADDLE_INFERENCE_INSTALL_DIR}/paddle/include ${PADDLE_INFERENCE_INSTALL_DIR}/paddle/lib) -endif() +endif(WIN32) copy(inference_lib_dist SRCS ${CMAKE_BINARY_DIR}/paddle/fluid/framework/framework.pb.h @@ -211,12 +208,12 @@ add_custom_target(fluid_lib_dist ALL DEPENDS ${fluid_lib_deps}) set(dst_dir "${PADDLE_INSTALL_DIR}/paddle/fluid") set(module "inference") -if(WIN32 AND NOT WITH_STATIC_LIB) +if(WIN32) copy(fluid_lib_dist SRCS ${src_dir}/${module}/*.h ${src_dir}/${module}/api/paddle_*.h ${paddle_fluid_lib} DSTS ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module} ) -else() + else() copy(fluid_lib_dist SRCS ${src_dir}/${module}/*.h ${src_dir}/${module}/api/paddle_*.h ${paddle_fluid_lib} DSTS ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module} diff --git a/paddle/fluid/inference/CMakeLists.txt b/paddle/fluid/inference/CMakeLists.txt index 9dc96fdfe8622e3e78673664637ab50970fe93c6..cf6fcb7b64365b382c648dd83639e0c44670014d 100644 --- a/paddle/fluid/inference/CMakeLists.txt +++ b/paddle/fluid/inference/CMakeLists.txt @@ -44,10 +44,11 @@ add_subdirectory(api) set(STATIC_INFERENCE_API paddle_inference_api analysis_predictor zero_copy_tensor reset_tensor_array analysis_config paddle_pass_builder activation_functions ${mkldnn_quantizer_cfg}) -if(WIN32) +# TODO(xingzhaolong, jiweibo): remove this and create_static_lib(paddle_fluid) on windows GPU +if(WIN32 AND WITH_GPU) cc_library(paddle_fluid DEPS ${fluid_modules} ${STATIC_INFERENCE_API}) else() - create_static_lib(paddle_fluid ${fluid_modules} ${STATIC_INFERENCE_API}) + create_static_lib(paddle_fluid ${fluid_modules} ${STATIC_INFERENCE_API}) endif() if(NOT APPLE AND NOT WIN32) diff --git a/paddle/fluid/inference/api/demo_ci/CMakeLists.txt b/paddle/fluid/inference/api/demo_ci/CMakeLists.txt index 08a1a5428193c2d506f511112e4a26d73c382ff1..6a3760e1f749b2b4875df00b01def57c979b3c93 100644 --- a/paddle/fluid/inference/api/demo_ci/CMakeLists.txt +++ b/paddle/fluid/inference/api/demo_ci/CMakeLists.txt @@ -51,8 +51,8 @@ if (WIN32) set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} /bigobj /MT") set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /bigobj /MTd") set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /bigobj /MT") + safe_set_static_flag() if (WITH_STATIC_LIB) - safe_set_static_flag() add_definitions(-DSTATIC_LIB) endif() endif() @@ -136,7 +136,7 @@ else() set(DEPS ${DEPS} ${MATH_LIB} ${MKLDNN_LIB} glog gflags_static libprotobuf xxhash ${EXTERNAL_LIB}) - set(DEPS ${DEPS} libcmt shlwapi.lib) + set(DEPS ${DEPS} shlwapi.lib) endif(NOT WIN32) if(WITH_GPU) diff --git a/paddle/fluid/inference/api/demo_ci/run.sh b/paddle/fluid/inference/api/demo_ci/run.sh index 6b7fb0f619a67cc01dac2b09525bb2bfa05207ba..a3e7bec398af7e193a75395ad40175336f5f7503 100755 --- a/paddle/fluid/inference/api/demo_ci/run.sh +++ b/paddle/fluid/inference/api/demo_ci/run.sh @@ -6,7 +6,7 @@ TEST_GPU_CPU=$3 # test both GPU/CPU mode or only CPU mode DATA_DIR=$4 # dataset TENSORRT_INCLUDE_DIR=$5 # TensorRT header file dir, default to /usr/local/TensorRT/include TENSORRT_LIB_DIR=$6 # TensorRT lib file dir, default to /usr/local/TensorRT/lib - +MSVC_STATIC_CRT=$7 inference_install_dir=${PADDLE_ROOT}/build/paddle_inference_install_dir cd `dirname $0` @@ -66,43 +66,54 @@ mkdir -p build cd build rm -rf * -if [ $(echo `uname` | grep "Win") != "" ]; then - # -----simple_on_word2vec on windows----- - cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB=${inference_install_dir} \ - -DWITH_MKL=$TURN_ON_MKL \ - -DDEMO_NAME=simple_on_word2vec \ - -DWITH_GPU=$TEST_GPU_CPU \ - -DWITH_STATIC_LIB=OFF - msbuild /maxcpucount /property:Configuration=Release cpp_inference_demo.sln - Release/simple_on_word2vec.exe \ - --dirname=$DATA_DIR/word2vec/word2vec.inference.model \ - --use_gpu=False - if [ $? -ne 0 ]; then - echo "simple_on_word2vec demo runs fail." - exit 1 - fi - - # -----vis_demo on windows----- - rm -rf * - cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB=${inference_install_dir} \ - -DWITH_MKL=$TURN_ON_MKL \ - -DDEMO_NAME=vis_demo \ - -DWITH_GPU=$TEST_GPU_CPU \ - -DWITH_STATIC_LIB=OFF - msbuild /maxcpucount /property:Configuration=Release cpp_inference_demo.sln - for vis_demo_name in $vis_demo_list; do - Release/vis_demo.exe \ - --modeldir=$DATA_DIR/$vis_demo_name/model \ - --data=$DATA_DIR/$vis_demo_name/data.txt \ - --refer=$DATA_DIR/$vis_demo_name/result.txt \ - --use_gpu=False - if [ $? -ne 0 ]; then - echo "vis demo $vis_demo_name runs fail." - exit 1 +for WITH_STATIC_LIB in ON OFF; do + if [ $(echo `uname` | grep "Win") != "" ]; then + # TODO(xingzhaolong, jiweibo): remove this if windows GPU library is ready. + if [ $TEST_GPU_CPU == ON] && [ $WITH_STATIC_LIB ==ON ]; then + return 0 fi - done -else - for WITH_STATIC_LIB in ON OFF; do + + # -----simple_on_word2vec on windows----- + cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB=${inference_install_dir} \ + -DWITH_MKL=$TURN_ON_MKL \ + -DDEMO_NAME=simple_on_word2vec \ + -DWITH_GPU=$TEST_GPU_CPU \ + -DWITH_STATIC_LIB=$WITH_STATIC_LIB \ + -DMSVC_STATIC_CRT=$MSVC_STATIC_CRT + msbuild /maxcpucount /property:Configuration=Release cpp_inference_demo.sln + for use_gpu in $use_gpu_list; do + Release/simple_on_word2vec.exe \ + --dirname=$DATA_DIR/word2vec/word2vec.inference.model \ + --use_gpu=$use_gpu + if [ $? -ne 0 ]; then + echo "simple_on_word2vec demo runs fail." + exit 1 + fi + done + + # -----vis_demo on windows----- + rm -rf * + cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB=${inference_install_dir} \ + -DWITH_MKL=$TURN_ON_MKL \ + -DDEMO_NAME=vis_demo \ + -DWITH_GPU=$TEST_GPU_CPU \ + -DWITH_STATIC_LIB=$WITH_STATIC_LIB \ + -DMSVC_STATIC_CRT=$MSVC_STATIC_CRT + msbuild /maxcpucount /property:Configuration=Release cpp_inference_demo.sln + for use_gpu in $use_gpu_list; do + for vis_demo_name in $vis_demo_list; do + Release/vis_demo.exe \ + --modeldir=$DATA_DIR/$vis_demo_name/model \ + --data=$DATA_DIR/$vis_demo_name/data.txt \ + --refer=$DATA_DIR/$vis_demo_name/result.txt \ + --use_gpu=$use_gpu + if [ $? -ne 0 ]; then + echo "vis demo $vis_demo_name runs fail." + exit 1 + fi + done + done + else # -----simple_on_word2vec on linux/mac----- rm -rf * cmake .. -DPADDLE_LIB=${inference_install_dir} \ @@ -123,7 +134,6 @@ else fi done fi - # ---------vis_demo on linux/mac--------- rm -rf * cmake .. -DPADDLE_LIB=${inference_install_dir} \ @@ -145,7 +155,6 @@ else fi done done - # --------tensorrt mobilenet on linux/mac------ if [ $USE_TENSORRT == ON -a $TEST_GPU_CPU == ON ]; then rm -rf * @@ -167,6 +176,6 @@ else exit 1 fi fi - done -fi + fi +done set +x diff --git a/paddle/fluid/inference/api/paddle_infer_declare.h b/paddle/fluid/inference/api/paddle_infer_declare.h index 39c9653f16cefb71a9f2a0ddcc08723d189d411c..e8525f440fe7f2d54d045eedb79aed228513e550 100644 --- a/paddle/fluid/inference/api/paddle_infer_declare.h +++ b/paddle/fluid/inference/api/paddle_infer_declare.h @@ -17,11 +17,7 @@ #if defined(_WIN32) #ifndef PD_INFER_DECL #ifdef PADDLE_DLL_INFERENCE -#ifndef PADDLE_ON_INFERENCE -#define PD_INFER_DECL -#else #define PD_INFER_DECL __declspec(dllexport) -#endif // PADDLE_ON_INFERENCE #else #define PD_INFER_DECL __declspec(dllimport) #endif // PADDLE_DLL_INFERENCE diff --git a/paddle/scripts/paddle_build.bat b/paddle/scripts/paddle_build.bat index 60e4496bc545759173b68efbf85922efe8976fa4..524c086c07925c880dfb46a70a1f930686bae867 100644 --- a/paddle/scripts/paddle_build.bat +++ b/paddle/scripts/paddle_build.bat @@ -26,6 +26,7 @@ wmic process where name="op_function_generator.exe" call terminate 2>NUL rem ------initialize common variable------ if not defined CUDA_TOOLKIT_ROOT_DIR set CUDA_TOOLKIT_ROOT_DIR="C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v10.0" if not defined BRANCH set BRANCH=develop +if not defined TENSORRT_ROOT set TENSORRT_ROOT="C:/TensorRT-5.1.5.0" if not defined WITH_MKL set WITH_MKL=ON if not defined WITH_GPU set WITH_GPU=OFF if not defined WITH_AVX set WITH_AVX=ON @@ -33,9 +34,11 @@ if not defined WITH_TESTING set WITH_TESTING=ON if not defined WITH_PYTHON set WITH_PYTHON=ON if not defined ON_INFER set ON_INFER=ON if not defined WITH_INFERENCE_API_TEST set WITH_INFERENCE_API_TEST=ON +if not defined WITH_STATIC_LIB set WITH_STATIC_LIB=ON if not defined WITH_CACHE set WITH_CACHE=ON if not defined WITH_TPCACHE set WITH_TPCACHE=ON + rem -------set cache build work directory----------- if "%WITH_CACHE%"=="OFF" ( rmdir build /s/q @@ -99,6 +102,7 @@ set CLCACHE_OBJECT_CACHE_TIMEOUT_MS=1000000 :: set maximum cache size to 20G clcache.exe -M 21474836480 + rem ------set cache third_party------ set cache_dir=%work_dir:Paddle=cache% dir %cache_dir% @@ -138,6 +142,7 @@ exit /b 1 :CASE_wincheck_mkl set WITH_MKL=ON set WITH_GPU=OFF +set MSVC_STATIC_CRT=ON call :cmake || goto cmake_error call :build || goto build_error call :test_whl_pacakage || goto test_whl_pacakage_error @@ -149,11 +154,13 @@ goto:success :CASE_wincheck_openblas set WITH_MKL=OFF set WITH_GPU=ON +set MSVC_STATIC_CRT=OFF rem Temporarily turn off WITH_INFERENCE_API_TEST on GPU due to compile hang set WITH_INFERENCE_API_TEST=OFF call :cmake || goto cmake_error call :build || goto build_error call :test_whl_pacakage || goto test_whl_pacakage_error +:: call :test_inference || goto test_inference_error goto:success rem "Other configurations are added here" @@ -172,12 +179,14 @@ set start=%start:~4,10% echo cmake .. -G "Visual Studio 14 2015 Win64" -DWITH_AVX=%WITH_AVX% -DWITH_GPU=%WITH_GPU% -DWITH_MKL=%WITH_MKL% ^ -DWITH_TESTING=%WITH_TESTING% -DWITH_PYTHON=%WITH_PYTHON% -DCUDA_TOOLKIT_ROOT_DIR=%CUDA_TOOLKIT_ROOT_DIR% ^ -DON_INFER=%ON_INFER% -DWITH_INFERENCE_API_TEST=%WITH_INFERENCE_API_TEST% -DTHIRD_PARTY_PATH=%THIRD_PARTY_PATH% ^ --DINFERENCE_DEMO_INSTALL_DIR=%INFERENCE_DEMO_INSTALL_DIR% +-DINFERENCE_DEMO_INSTALL_DIR=%INFERENCE_DEMO_INSTALL_DIR% -DWITH_STATIC_LIB=%WITH_STATIC_LIB% ^ +-DTENSORRT_ROOT=%TENSORRT_ROOT% -DMSVC_STATIC_CRT=%MSVC_STATIC_CRT% cmake .. -G "Visual Studio 14 2015 Win64" -DWITH_AVX=%WITH_AVX% -DWITH_GPU=%WITH_GPU% -DWITH_MKL=%WITH_MKL% ^ -DWITH_TESTING=%WITH_TESTING% -DWITH_PYTHON=%WITH_PYTHON% -DCUDA_TOOLKIT_ROOT_DIR=%CUDA_TOOLKIT_ROOT_DIR% ^ -DON_INFER=%ON_INFER% -DWITH_INFERENCE_API_TEST=%WITH_INFERENCE_API_TEST% -DTHIRD_PARTY_PATH=%THIRD_PARTY_PATH% ^ --DINFERENCE_DEMO_INSTALL_DIR=%INFERENCE_DEMO_INSTALL_DIR% +-DINFERENCE_DEMO_INSTALL_DIR=%INFERENCE_DEMO_INSTALL_DIR% -DWITH_STATIC_LIB=%WITH_STATIC_LIB% ^ +-DTENSORRT_ROOT=%TENSORRT_ROOT% -DMSVC_STATIC_CRT=%MSVC_STATIC_CRT% goto:eof :cmake_error @@ -282,7 +291,9 @@ dir %THIRD_PARTY_PATH:/=\%\install\mklml\lib dir %THIRD_PARTY_PATH:/=\%\install\mkldnn\bin dir %THIRD_PARTY_PATH:/=\%\install\warpctc\bin -set PATH=%THIRD_PARTY_PATH:/=\%\install\openblas\lib;%THIRD_PARTY_PATH:/=\%\install\openblas\bin;%THIRD_PARTY_PATH:/=\%\install\zlib\bin;%THIRD_PARTY_PATH:/=\%\install\mklml\lib;%THIRD_PARTY_PATH:/=\%\install\mkldnn\bin;%THIRD_PARTY_PATH:/=\%\install\warpctc\bin;%PATH% +set PATH=%THIRD_PARTY_PATH:/=\%\install\openblas\lib;%THIRD_PARTY_PATH:/=\%\install\openblas\bin;^ +%THIRD_PARTY_PATH:/=\%\install\zlib\bin;%THIRD_PARTY_PATH:/=\%\install\mklml\lib;^ +%THIRD_PARTY_PATH:/=\%\install\mkldnn\bin;%THIRD_PARTY_PATH:/=\%\install\warpctc\bin;%PATH% ctest.exe --output-on-failure -C Release -j 8 --repeat until-pass:4 after-timeout:4 goto:eof @@ -305,7 +316,7 @@ set end=%end:~4,10% call :timestamp "%start%" "%end%" "TestCases Total" cd %work_dir%\paddle\fluid\inference\api\demo_ci -%cache_dir%\tools\busybox64.exe bash run.sh %work_dir:\=/% %WITH_MKL% %WITH_GPU% %cache_dir:\=/%/inference_demo +%cache_dir%\tools\busybox64.exe bash run.sh %work_dir:\=/% %WITH_MKL% %WITH_GPU% %cache_dir:\=/%/inference_demo %TENSORRT_ROOT%/include %TENSORRT_ROOT%/lib %MSVC_STATIC_CRT% goto:eof :test_inference_error