diff --git a/paddle/fluid/inference/tests/test.cmake b/paddle/fluid/inference/tests/test.cmake index b35ea51833ff1ff3b5654b8ec53f4123ed3094e3..41b78d39a2594cbe39bc0d0defef7a24047674dc 100644 --- a/paddle/fluid/inference/tests/test.cmake +++ b/paddle/fluid/inference/tests/test.cmake @@ -27,7 +27,7 @@ function(inference_download_and_uncompress INSTALL_DIR URL FILENAME) message(STATUS "Download inference test stuff from ${URL}/${FILENAME}") string(REGEX REPLACE "[-%./\\]" "_" FILENAME_EX ${FILENAME}) string(REGEX MATCH "[^/\\]+$" DOWNLOAD_NAME ${FILENAME}) - set(EXTERNAL_PROJECT_NAME "extern_inference_download_${FILENAME_EX}") + set(EXTERNAL_PROJECT_NAME "extern_download_${FILENAME_EX}") set(UNPACK_DIR "${INSTALL_DIR}/src/${EXTERNAL_PROJECT_NAME}") ExternalProject_Add( ${EXTERNAL_PROJECT_NAME} diff --git a/paddle/scripts/paddle_build.bat b/paddle/scripts/paddle_build.bat index d516649e44e0baec594901b85bb4f397a7081e43..a50b764c1cf8b73a6b3a721f5ac8edfb4aa6acb2 100644 --- a/paddle/scripts/paddle_build.bat +++ b/paddle/scripts/paddle_build.bat @@ -33,21 +33,28 @@ rem ------initialize common variable------ if not defined GENERATOR set GENERATOR="Visual Studio 14 2015 Win64" if not defined BRANCH set BRANCH=develop if not defined WITH_TENSORRT set WITH_TENSORRT=ON -if not defined TENSORRT_ROOT set TENSORRT_ROOT="D:/TensorRT" +if not defined TENSORRT_ROOT set TENSORRT_ROOT=D:/TensorRT +if not defined CUDA_ARCH_NAME set CUDA_ARCH_NAME=Auto +if not defined WITH_GPU set WITH_GPU=ON if not defined WITH_MKL set WITH_MKL=ON if not defined WITH_AVX set WITH_AVX=ON if not defined WITH_TESTING set WITH_TESTING=ON +if not defined MSVC_STATIC_CRT set MSVC_STATIC_CRT=OFF if not defined WITH_PYTHON set WITH_PYTHON=ON if not defined ON_INFER set ON_INFER=ON if not defined WITH_INFERENCE_API_TEST set WITH_INFERENCE_API_TEST=ON if not defined WITH_STATIC_LIB set WITH_STATIC_LIB=ON -if not defined WITH_CACHE set WITH_CACHE=OFF if not defined WITH_TPCACHE set WITH_TPCACHE=ON +if not defined WITH_CLCACHE set WITH_CLCACHE=OFF +if not defined WITH_CACHE set WITH_CACHE=OFF if not defined WITH_UNITY_BUILD set WITH_UNITY_BUILD=OFF if not defined INFERENCE_DEMO_INSTALL_DIR set INFERENCE_DEMO_INSTALL_DIR=%cache_dir:\=/%/inference_demo +if not defined LOG_LEVEL set LOG_LEVEL=normal -rem -------set cache build work directory----------- +rem -------set cache build directory----------- rmdir build\python /s/q +rmdir build\paddle_install_dir /s/q +rmdir build\paddle_inference_install_dir /s/q del build\CMakeCache.txt : set CI_SKIP_CPP_TEST if only *.py changed @@ -149,12 +156,11 @@ echo "wincheck_mkl: run Windows MKL/GPU/UnitTest CI tasks on Windows" echo "wincheck_openbals: run Windows OPENBLAS/CPU CI tasks on Windows" exit /b 1 +rem ------PR CI windows check for MKL/GPU---------- :CASE_wincheck_mkl - -rem ------initialize cmake variable for mkl------ set WITH_MKL=ON -set WITH_GPU=OFF -set MSVC_STATIC_CRT=ON +set WITH_GPU=ON +set MSVC_STATIC_CRT=OFF call :cmake || goto cmake_error call :build || goto build_error @@ -164,12 +170,11 @@ call :test_inference || goto test_inference_error :: call :check_change_of_unittest || goto check_change_of_unittest_error goto:success +rem ------PR CI windows check for OPENBLAS/CPU------ :CASE_wincheck_openblas - -rem ------initialize cmake variable for openblas------ set WITH_MKL=ON -set WITH_GPU=ON -set MSVC_STATIC_CRT=OFF +set WITH_GPU=OFF +set MSVC_STATIC_CRT=ON call :cmake || goto cmake_error call :build || goto build_error @@ -179,6 +184,38 @@ call :test_inference || goto test_inference_error :: call :check_change_of_unittest || goto check_change_of_unittest_error goto:success +rem ------Build windows avx whl package------ +:CASE_build_avx_whl +set WITH_AVX=ON +set ON_INFER=OFF +set CUDA_ARCH_NAME=All + +call :cmake || goto cmake_error +call :build || goto build_error +call :test_whl_pacakage || goto test_whl_pacakage_error +goto:success + +rem ------Build windows no-avx whl package------ +:CASE_build_no_avx_whl +set WITH_AVX=OFF +set ON_INFER=OFF +set CUDA_ARCH_NAME=All + +call :cmake || goto cmake_error +call :build || goto build_error +call :test_whl_pacakage || goto test_whl_pacakage_error +goto:success + +rem ------Build windows inference library------ +:CASE_build_inference_lib +set WITH_PYTHON=OFF +set CUDA_ARCH_NAME=All + +call :cmake || goto cmake_error +call :build || goto build_error +call :zip_file || goto zip_file_error +goto:success + rem "Other configurations are added here" rem :CASE_wincheck_others rem call ... @@ -196,7 +233,7 @@ set start=%start:~4,10% @ECHO ON if not defined CUDA_TOOLKIT_ROOT_DIR set CUDA_TOOLKIT_ROOT_DIR=C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v10.0 -set PATH=%CUDA_TOOLKIT_ROOT_DIR%\bin;%CUDA_TOOLKIT_ROOT_DIR%\libnvvp;%PATH% +set PATH=%TENSORRT_ROOT:/=\%\lib;%CUDA_TOOLKIT_ROOT_DIR%\bin;%CUDA_TOOLKIT_ROOT_DIR%\libnvvp;%PATH% set CUDA_PATH=%CUDA_TOOLKIT_ROOT_DIR% rem ------set third_party cache dir------ @@ -239,15 +276,15 @@ echo cmake .. -G %GENERATOR% -DWITH_AVX=%WITH_AVX% -DWITH_GPU=%WITH_GPU% -DWITH_ -DWITH_TESTING=%WITH_TESTING% -DWITH_PYTHON=%WITH_PYTHON% -DPYTHON_EXECUTABLE=%PYTHON_EXECUTABLE% -DON_INFER=%ON_INFER% ^ -DWITH_INFERENCE_API_TEST=%WITH_INFERENCE_API_TEST% -DTHIRD_PARTY_PATH=%THIRD_PARTY_PATH% ^ -DINFERENCE_DEMO_INSTALL_DIR=%INFERENCE_DEMO_INSTALL_DIR% -DWITH_STATIC_LIB=%WITH_STATIC_LIB% ^ --DWITH_TENSORRT=%WITH_TENSORRT% -DTENSORRT_ROOT=%TENSORRT_ROOT% -DMSVC_STATIC_CRT=%MSVC_STATIC_CRT% ^ --DWITH_UNITY_BUILD=%WITH_UNITY_BUILD% +-DWITH_TENSORRT=%WITH_TENSORRT% -DTENSORRT_ROOT="%TENSORRT_ROOT%" -DMSVC_STATIC_CRT=%MSVC_STATIC_CRT% ^ +-DWITH_UNITY_BUILD=%WITH_UNITY_BUILD% -DCUDA_ARCH_NAME=%CUDA_ARCH_NAME% cmake .. -G %GENERATOR% -DWITH_AVX=%WITH_AVX% -DWITH_GPU=%WITH_GPU% -DWITH_MKL=%WITH_MKL% ^ -DWITH_TESTING=%WITH_TESTING% -DWITH_PYTHON=%WITH_PYTHON% -DPYTHON_EXECUTABLE=%PYTHON_EXECUTABLE% -DON_INFER=%ON_INFER% ^ -DWITH_INFERENCE_API_TEST=%WITH_INFERENCE_API_TEST% -DTHIRD_PARTY_PATH=%THIRD_PARTY_PATH% ^ -DINFERENCE_DEMO_INSTALL_DIR=%INFERENCE_DEMO_INSTALL_DIR% -DWITH_STATIC_LIB=%WITH_STATIC_LIB% ^ --DWITH_TENSORRT=%WITH_TENSORRT% -DTENSORRT_ROOT=%TENSORRT_ROOT% -DMSVC_STATIC_CRT=%MSVC_STATIC_CRT% ^ --DWITH_UNITY_BUILD=%WITH_UNITY_BUILD% +-DWITH_TENSORRT=%WITH_TENSORRT% -DTENSORRT_ROOT="%TENSORRT_ROOT%" -DMSVC_STATIC_CRT=%MSVC_STATIC_CRT% ^ +-DWITH_UNITY_BUILD=%WITH_UNITY_BUILD% -DCUDA_ARCH_NAME=%CUDA_ARCH_NAME% goto:eof :cmake_error @@ -286,9 +323,9 @@ rem clcache.exe -z echo Build Paddle the %build_times% time: if "%WITH_CLCACHE%"=="OFF" ( - msbuild /m:%PARALLEL_PROJECT_COUNT% /p:Configuration=Release /verbosity:normal paddle.sln + msbuild /m:%PARALLEL_PROJECT_COUNT% /p:Configuration=Release /verbosity:%LOG_LEVEL% paddle.sln ) else ( - msbuild /m:%PARALLEL_PROJECT_COUNT% /p:TrackFileAccess=false /p:CLToolExe=clcache.exe /p:CLToolPath=%PYTHON_ROOT%\Scripts /p:Configuration=Release /verbosity:normal paddle.sln + msbuild /m:%PARALLEL_PROJECT_COUNT% /p:TrackFileAccess=false /p:CLToolExe=clcache.exe /p:CLToolPath=%PYTHON_ROOT%\Scripts /p:Configuration=Release /verbosity:%LOG_LEVEL% paddle.sln ) if %ERRORLEVEL% NEQ 0 ( @@ -328,19 +365,21 @@ setlocal enabledelayedexpansion for /F %%# in ('wmic os get localdatetime^|findstr 20') do set end=%%# set end=%end:~4,10% call :timestamp "%start%" "%end%" "Build" + tree /F %cd%\paddle_inference_install_dir\paddle %cache_dir%\tools\busybox64.exe du -h -d 0 -k %cd%\paddle_inference_install_dir\paddle\lib > lib_size.txt set /p libsize=< lib_size.txt - for /F %%i in ("%libsize%") do ( set /a libsize_m=%%i/1024 echo "Windows Paddle_Inference Size: !libsize_m!M" echo ipipe_log_param_Windows_Paddle_Inference_Size: !libsize_m!M ) + %cache_dir%\tools\busybox64.exe du -h -d 0 %cd%\python\dist > whl_size.txt set /p whlsize=< whl_size.txt for /F %%i in ("%whlsize%") do echo "Windows PR whl Size: %%i" for /F %%i in ("%whlsize%") do echo ipipe_log_param_Windows_PR_whl_Size: %%i + dir /s /b python\dist\*.whl > whl_file.txt set /p PADDLE_WHL_FILE_WIN=< whl_file.txt @@ -551,6 +590,23 @@ goto:eof :check_change_of_unittest_error exit /b 1 +rem --------------------------------------------------------------------------------------------- +:zip_file +tree /F %cd%\paddle_inference_install_dir\paddle +if exist paddle_inference.zip del paddle_inference.zip +python -c "import shutil;shutil.make_archive('paddle_inference', 'zip', root_dir='paddle_inference_install_dir')" +%cache_dir%\tools\busybox64.exe du -h -k paddle_inference.zip > lib_size.txt +set /p libsize=< lib_size.txt +for /F %%i in ("%libsize%") do ( + set /a libsize_m=%%i/1024 + echo "Windows Paddle_Inference Size: !libsize_m!M" + echo ipipe_log_param_Windows_Paddle_Inference_Size: !libsize_m!M +) +goto:eof + +:zip_file_error +echo Tar inference library failed! +exit /b 1 :timestamp setlocal enabledelayedexpansion