diff --git a/cmake/generic.cmake b/cmake/generic.cmake index b0a6dfe29020781e57d57861137861366864abdb..3bdf7c209b42ba514017c4a9a8beaba7b299b481 100644 --- a/cmake/generic.cmake +++ b/cmake/generic.cmake @@ -446,6 +446,9 @@ function(nv_library TARGET_NAME) message(FATAL "Please specify source file or library in nv_library.") endif() endif(nv_library_SRCS) + if (WIN32) + set_target_properties(${TARGET_NAME} PROPERTIES VS_USER_PROPS ${WIN_PROPS}) + endif(WIN32) endif() endfunction(nv_library) @@ -461,6 +464,9 @@ function(nv_binary TARGET_NAME) add_dependencies(${TARGET_NAME} ${nv_binary_DEPS}) common_link(${TARGET_NAME}) endif() + if (WIN32) + set_target_properties(${TARGET_NAME} PROPERTIES VS_USER_PROPS ${WIN_PROPS}) + endif(WIN32) endif() endfunction(nv_binary) @@ -482,6 +488,9 @@ function(nv_test TARGET_NAME) set_property(TEST ${TARGET_NAME} PROPERTY ENVIRONMENT FLAGS_cpu_deterministic=true) set_property(TEST ${TARGET_NAME} PROPERTY ENVIRONMENT FLAGS_init_allocated_mem=true) set_property(TEST ${TARGET_NAME} PROPERTY ENVIRONMENT FLAGS_cudnn_deterministic=true) + if (WIN32) + set_target_properties(${TARGET_NAME} PROPERTIES VS_USER_PROPS ${WIN_PROPS}) + endif(WIN32) endif() endfunction(nv_test) diff --git a/cmake/inference_lib.cmake b/cmake/inference_lib.cmake index f19f0eb43d34bd0f3748d7beb1fcf403fa1c9037..f4603051a0e7e9f206d5344fd948f7750a09c173 100644 --- a/cmake/inference_lib.cmake +++ b/cmake/inference_lib.cmake @@ -19,9 +19,8 @@ set(PADDLE_INSTALL_DIR "${CMAKE_BINARY_DIR}/paddle_install_dir" CACHE STRING set(PADDLE_INFERENCE_INSTALL_DIR "${CMAKE_BINARY_DIR}/paddle_inference_install_dir" CACHE STRING "A path setting paddle inference shared and static libraries") -# TODO(zhaolong) -# At present, the size of static lib in Windows exceeds the system limit, -# so the generation of static lib is temporarily turned off. +# At present, the size of static lib in Windows is very large, +# so we need to crop the library size. if(WIN32) #todo: remove the option option(WITH_STATIC_LIB "Compile demo with static/shared library, default use dynamic." OFF) @@ -196,7 +195,11 @@ set(PADDLE_INFERENCE_C_INSTALL_DIR "${CMAKE_BINARY_DIR}/paddle_inference_c_insta copy_part_of_thrid_party(inference_lib_dist ${PADDLE_INFERENCE_C_INSTALL_DIR}) set(src_dir "${PADDLE_SOURCE_DIR}/paddle/fluid") -set(paddle_fluid_c_lib ${PADDLE_BINARY_DIR}/paddle/fluid/inference/capi/libpaddle_fluid_c.*) +if(WIN32) + set(paddle_fluid_c_lib ${PADDLE_BINARY_DIR}/paddle/fluid/inference/capi/${CMAKE_BUILD_TYPE}/paddle_fluid_c.*) +else(WIN32) + set(paddle_fluid_c_lib ${PADDLE_BINARY_DIR}/paddle/fluid/inference/capi/libpaddle_fluid_c.*) +endif(WIN32) copy(inference_lib_dist SRCS ${src_dir}/inference/capi/paddle_c_api.h ${paddle_fluid_c_lib} diff --git a/cmake/init.cmake b/cmake/init.cmake index 7dfe60f9dd8f021facba6925a465cb58bc5de25d..902dfb11fc0afa40ea5bd2b36543b2432a3bf384 100644 --- a/cmake/init.cmake +++ b/cmake/init.cmake @@ -26,4 +26,7 @@ if(WITH_GPU) set(CMAKE_CUDA_FLAGS_MINSIZEREL "-O1 -DNDEBUG") endif() +if(WIN32) + set(WIN_PROPS ${CMAKE_SOURCE_DIR}/cmake/paddle_win.props) +endif() diff --git a/cmake/paddle_win.props b/cmake/paddle_win.props new file mode 100644 index 0000000000000000000000000000000000000000..7e434c6d907cc40733a81d8a7cdbe7c285a2bd41 --- /dev/null +++ b/cmake/paddle_win.props @@ -0,0 +1,91 @@ + + + + + + true + false + true + false + false + InheritFromHost + InheritFromHost + InheritFromHost + InheritFromHost + InheritFromHost + + -ccbin "%(VCBinDir)" -x cu [GenerateRelocatableDeviceCode] [Include] [RequiredIncludes] [InterleaveSourceInPTX] [GPUDebugInfo] [GenerateLineInfo] [Keep] [KeepDir] [MaxRegCount] [PtxAsOptionV] [TargetMachinePlatform] [NvccCompilation] [CudaRuntime] [AdditionalOptions] + --use-local-env + [CodeGeneration] + -clean + + -Xcompiler "/EHsc [Warning] /nologo [Optimization] [ProgramDataBaseFileName] $(CudaForceSynchronousPdbWrites) [RuntimeChecks] [Runtime] [TypeInfo]" + + %(BaseCommandLineTemplate) [CompileOut] "%(FullPath)" + %(BaseCommandLineTemplate) [HostDebugInfo] [Emulation] [FastMath] [Defines] %(HostCommandLineTemplate) [CompileOut] "%(FullPath)" + + +# (Approximate command-line. Settings inherited from host are not visible below.) +# (Please see the output window after a build for the full command-line) + +# Driver API (NVCC Compilation Type is .cubin, .gpu, or .ptx) +set CUDAFE_FLAGS=--sdk_dir "$(WindowsSdkDir)" +"$(CudaToolkitNvccPath)" %(BuildCommandLineTemplate) %(DriverApiCommandLineTemplate) + +# Runtime API (NVCC Compilation Type is hybrid object or .c file) +set CUDAFE_FLAGS=--sdk_dir "$(WindowsSdkDir)" +"$(CudaToolkitNvccPath)" %(BuildCommandLineTemplate) %(RuntimeApiCommandLineTemplate) + + Compiling CUDA source file %(Identity)... + Skipping CUDA source file %(Identity) (excluded from build). + + + %(Filename)%(Extension).cache + $(IntDir)%(PropsCacheOutputFile) + + $(MSBuildProjectFullPath) + + + + true + $(IntDir)$(TargetName).device-link.obj + + + true + + true + + InheritFromProject + InheritFromProject + + + + + + + + + + + + + -Xcompiler "/EHsc [Warning] /nologo [Optimization] [RuntimeChecks] [Runtime] [TypeInfo]" + "$(CudaToolkitNvccPath)" -dlink [LinkOut] %(HostCommandLineTemplate) [AdditionalLibraryDirectories] [AdditionalDependencies] [AdditionalOptions] [CodeGeneration] [GPUDebugInfo] [TargetMachinePlatform] [Inputs] + +# (Approximate command-line. Settings inherited from host are not visible below.) +# (Please see the output window after a build for the full command-line) + +%(LinkCommandLineTemplate) + + + + + %(AdditionalLibraryDirectories);$(CudaToolkitLibDir) + + + + %(AdditionalIncludeDirectories);$(CudaToolkitIncludeDir) + + + + diff --git a/paddle/fluid/inference/CMakeLists.txt b/paddle/fluid/inference/CMakeLists.txt index cf6fcb7b64365b382c648dd83639e0c44670014d..f85e1f651165646095dae06032cdec962a37a2d1 100644 --- a/paddle/fluid/inference/CMakeLists.txt +++ b/paddle/fluid/inference/CMakeLists.txt @@ -44,14 +44,9 @@ add_subdirectory(api) set(STATIC_INFERENCE_API paddle_inference_api analysis_predictor zero_copy_tensor reset_tensor_array analysis_config paddle_pass_builder activation_functions ${mkldnn_quantizer_cfg}) -# TODO(xingzhaolong, jiweibo): remove this and create_static_lib(paddle_fluid) on windows GPU -if(WIN32 AND WITH_GPU) - cc_library(paddle_fluid DEPS ${fluid_modules} ${STATIC_INFERENCE_API}) -else() - create_static_lib(paddle_fluid ${fluid_modules} ${STATIC_INFERENCE_API}) -endif() +create_static_lib(paddle_fluid ${fluid_modules} ${STATIC_INFERENCE_API}) -if(NOT APPLE AND NOT WIN32) +if(NOT APPLE) # TODO(liuyiqu: Temporarily disable the link flag because it is not support on Mac. set(LINK_FLAGS "-Wl,--retain-symbols-file ${CMAKE_CURRENT_SOURCE_DIR}/paddle_fluid.sym") set_target_properties(paddle_fluid PROPERTIES LINK_FLAGS "${LINK_FLAGS}") diff --git a/paddle/fluid/inference/api/demo_ci/run.sh b/paddle/fluid/inference/api/demo_ci/run.sh index a3e7bec398af7e193a75395ad40175336f5f7503..6d283ca56cb652515b5ade923ea85e38142bf08c 100755 --- a/paddle/fluid/inference/api/demo_ci/run.sh +++ b/paddle/fluid/inference/api/demo_ci/run.sh @@ -68,11 +68,6 @@ rm -rf * for WITH_STATIC_LIB in ON OFF; do if [ $(echo `uname` | grep "Win") != "" ]; then - # TODO(xingzhaolong, jiweibo): remove this if windows GPU library is ready. - if [ $TEST_GPU_CPU == ON] && [ $WITH_STATIC_LIB ==ON ]; then - return 0 - fi - # -----simple_on_word2vec on windows----- cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB=${inference_install_dir} \ -DWITH_MKL=$TURN_ON_MKL \