Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
292b24aa
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
292b24aa
编写于
9月 23, 2020
作者:
Z
Zhou Wei
提交者:
GitHub
9月 23, 2020
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix bug MD of compile, And add MD/STATIC/OPENBLAS inference lib check on windows (#27051)
上级
41b59555
变更
9
显示空白变更内容
内联
并排
Showing
9 changed file
with
105 addition
and
116 deletion
+105
-116
CMakeLists.txt
CMakeLists.txt
+22
-1
cmake/external/cryptopp.cmake
cmake/external/cryptopp.cmake
+1
-16
cmake/flags.cmake
cmake/flags.cmake
+0
-31
cmake/inference_lib.cmake
cmake/inference_lib.cmake
+12
-15
paddle/fluid/inference/CMakeLists.txt
paddle/fluid/inference/CMakeLists.txt
+3
-2
paddle/fluid/inference/api/demo_ci/CMakeLists.txt
paddle/fluid/inference/api/demo_ci/CMakeLists.txt
+2
-2
paddle/fluid/inference/api/demo_ci/run.sh
paddle/fluid/inference/api/demo_ci/run.sh
+50
-41
paddle/fluid/inference/api/paddle_infer_declare.h
paddle/fluid/inference/api/paddle_infer_declare.h
+0
-4
paddle/scripts/paddle_build.bat
paddle/scripts/paddle_build.bat
+15
-4
未找到文件。
CMakeLists.txt
浏览文件 @
292b24aa
...
@@ -63,7 +63,28 @@ if(WIN32)
...
@@ -63,7 +63,28 @@ if(WIN32)
set
(
CMAKE_C_FLAGS_RELEASE
"
${
CMAKE_C_FLAGS_RELEASE
}
/bigobj /MT"
)
set
(
CMAKE_C_FLAGS_RELEASE
"
${
CMAKE_C_FLAGS_RELEASE
}
/bigobj /MT"
)
set
(
CMAKE_CXX_FLAGS_DEBUG
"
${
CMAKE_CXX_FLAGS_DEBUG
}
/bigobj /MTd"
)
set
(
CMAKE_CXX_FLAGS_DEBUG
"
${
CMAKE_CXX_FLAGS_DEBUG
}
/bigobj /MTd"
)
set
(
CMAKE_CXX_FLAGS_RELEASE
"
${
CMAKE_CXX_FLAGS_RELEASE
}
/bigobj /MT"
)
set
(
CMAKE_CXX_FLAGS_RELEASE
"
${
CMAKE_CXX_FLAGS_RELEASE
}
/bigobj /MT"
)
foreach
(
flag_var
CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO
CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO
)
if
(
${
flag_var
}
MATCHES
"/MD"
)
string
(
REGEX REPLACE
"/MD"
"/MT"
${
flag_var
}
"
${${
flag_var
}}
"
)
endif
()
endif
()
endforeach
(
flag_var
)
endif
()
# windows build turn off warnings.
foreach
(
flag_var
CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO
CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO
)
string
(
REGEX REPLACE
"/W[1-4]"
" /W0 "
${
flag_var
}
"
${${
flag_var
}}
"
)
endforeach
(
flag_var
)
foreach
(
flag_var CMAKE_CXX_FLAGS CMAKE_C_FLAGS
)
set
(
${
flag_var
}
"
${${
flag_var
}}
/w"
)
endforeach
(
flag_var
)
set
(
CMAKE_C_FLAGS
"
${
CMAKE_C_FLAGS
}
/wd4068 /wd4129 /wd4244 /wd4267 /wd4297 /wd4530 /wd4577 /wd4819 /wd4838 /MP"
)
set
(
CMAKE_C_FLAGS
"
${
CMAKE_C_FLAGS
}
/wd4068 /wd4129 /wd4244 /wd4267 /wd4297 /wd4530 /wd4577 /wd4819 /wd4838 /MP"
)
set
(
CMAKE_CXX_FLAGS
"
${
CMAKE_CXX_FLAGS
}
/wd4068 /wd4129 /wd4244 /wd4267 /wd4297 /wd4530 /wd4577 /wd4819 /wd4838 /MP"
)
set
(
CMAKE_CXX_FLAGS
"
${
CMAKE_CXX_FLAGS
}
/wd4068 /wd4129 /wd4244 /wd4267 /wd4297 /wd4530 /wd4577 /wd4819 /wd4838 /MP"
)
...
...
cmake/external/cryptopp.cmake
浏览文件 @
292b24aa
...
@@ -22,23 +22,8 @@ SET(CRYPTOPP_TAG CRYPTOPP_8_2_0)
...
@@ -22,23 +22,8 @@ SET(CRYPTOPP_TAG CRYPTOPP_8_2_0)
IF
(
WIN32
)
IF
(
WIN32
)
SET
(
CRYPTOPP_LIBRARIES
"
${
CRYPTOPP_INSTALL_DIR
}
/lib/cryptopp-static.lib"
CACHE FILEPATH
"cryptopp library."
FORCE
)
SET
(
CRYPTOPP_LIBRARIES
"
${
CRYPTOPP_INSTALL_DIR
}
/lib/cryptopp-static.lib"
CACHE FILEPATH
"cryptopp library."
FORCE
)
SET
(
CRYPTOPP_CMAKE_CXX_FLAGS
"
${
CMAKE_CXX_FLAGS
}
"
)
set
(
CMAKE_CXX_FLAGS_RELEASE
"
${
CMAKE_CXX_FLAGS_RELEASE
}
/MT"
)
set
(
CMAKE_CXX_FLAGS_DEBUG
"
${
CMAKE_CXX_FLAGS_DEBUG
}
/MTd"
)
set
(
CompilerFlags
CMAKE_CXX_FLAGS
CMAKE_CXX_FLAGS_DEBUG
CMAKE_CXX_FLAGS_RELEASE
CMAKE_C_FLAGS
CMAKE_C_FLAGS_DEBUG
CMAKE_C_FLAGS_RELEASE
)
foreach
(
CompilerFlag
${
CompilerFlags
}
)
string
(
REPLACE
"/MD"
"/MT"
${
CompilerFlag
}
"
${${
CompilerFlag
}}
"
)
endforeach
()
ELSE
(
WIN32
)
ELSE
(
WIN32
)
SET
(
CRYPTOPP_LIBRARIES
"
${
CRYPTOPP_INSTALL_DIR
}
/lib/libcryptopp.a"
CACHE FILEPATH
"cryptopp library."
FORCE
)
SET
(
CRYPTOPP_LIBRARIES
"
${
CRYPTOPP_INSTALL_DIR
}
/lib/libcryptopp.a"
CACHE FILEPATH
"cryptopp library."
FORCE
)
SET
(
CRYPTOPP_CMAKE_CXX_FLAGS
${
CMAKE_CXX_FLAGS
}
)
ENDIF
(
WIN32
)
ENDIF
(
WIN32
)
set
(
CRYPTOPP_CMAKE_ARGS
${
COMMON_CMAKE_ARGS
}
set
(
CRYPTOPP_CMAKE_ARGS
${
COMMON_CMAKE_ARGS
}
...
@@ -48,7 +33,7 @@ set(CRYPTOPP_CMAKE_ARGS ${COMMON_CMAKE_ARGS}
...
@@ -48,7 +33,7 @@ set(CRYPTOPP_CMAKE_ARGS ${COMMON_CMAKE_ARGS}
-DCMAKE_INSTALL_LIBDIR=
${
CRYPTOPP_INSTALL_DIR
}
/lib
-DCMAKE_INSTALL_LIBDIR=
${
CRYPTOPP_INSTALL_DIR
}
/lib
-DCMAKE_INSTALL_PREFIX=
${
CRYPTOPP_INSTALL_DIR
}
-DCMAKE_INSTALL_PREFIX=
${
CRYPTOPP_INSTALL_DIR
}
-DCMAKE_BUILD_TYPE=
${
THIRD_PARTY_BUILD_TYPE
}
-DCMAKE_BUILD_TYPE=
${
THIRD_PARTY_BUILD_TYPE
}
-DCMAKE_CXX_FLAGS=
${
C
RYPTOPP_C
MAKE_CXX_FLAGS
}
-DCMAKE_CXX_FLAGS=
${
CMAKE_CXX_FLAGS
}
-DCMAKE_CXX_FLAGS_RELEASE=
${
CMAKE_CXX_FLAGS_RELEASE
}
-DCMAKE_CXX_FLAGS_RELEASE=
${
CMAKE_CXX_FLAGS_RELEASE
}
-DCMAKE_C_COMPILER=
${
CMAKE_C_COMPILER
}
-DCMAKE_C_COMPILER=
${
CMAKE_C_COMPILER
}
-DCMAKE_CXX_COMPILER=
${
CMAKE_CXX_COMPILER
}
-DCMAKE_CXX_COMPILER=
${
CMAKE_CXX_COMPILER
}
...
...
cmake/flags.cmake
浏览文件 @
292b24aa
...
@@ -90,20 +90,6 @@ macro(safe_set_nvflag flag_name)
...
@@ -90,20 +90,6 @@ macro(safe_set_nvflag flag_name)
endif
()
endif
()
endmacro
()
endmacro
()
macro
(
safe_set_static_flag
)
# set c_flags and cxx_flags to static or shared
if
(
BUILD_SHARED_LIBS
)
return
()
# if build shared libs, the flags keep same with '/MD'
endif
(
BUILD_SHARED_LIBS
)
foreach
(
flag_var
CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO
CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO
)
if
(
${
flag_var
}
MATCHES
"/MD"
)
string
(
REGEX REPLACE
"/MD"
"/MT"
${
flag_var
}
"
${${
flag_var
}}
"
)
endif
(
${
flag_var
}
MATCHES
"/MD"
)
endforeach
(
flag_var
)
endmacro
()
CHECK_CXX_SYMBOL_EXISTS
(
UINT64_MAX
"stdint.h"
UINT64_MAX_EXISTS
)
CHECK_CXX_SYMBOL_EXISTS
(
UINT64_MAX
"stdint.h"
UINT64_MAX_EXISTS
)
if
(
NOT UINT64_MAX_EXISTS
)
if
(
NOT UINT64_MAX_EXISTS
)
...
@@ -229,20 +215,3 @@ endforeach()
...
@@ -229,20 +215,3 @@ endforeach()
set
(
CMAKE_CUDA_FLAGS
"
${
CMAKE_CUDA_FLAGS
}
${
SAFE_GPU_COMMON_FLAGS
}
"
)
set
(
CMAKE_CUDA_FLAGS
"
${
CMAKE_CUDA_FLAGS
}
${
SAFE_GPU_COMMON_FLAGS
}
"
)
if
(
WIN32
)
# windows build turn off warnings.
if
(
MSVC_STATIC_CRT
)
safe_set_static_flag
()
endif
()
foreach
(
flag_var
CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO
CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO
)
string
(
REGEX REPLACE
"/W[1-4]"
" /W0 "
${
flag_var
}
"
${${
flag_var
}}
"
)
endforeach
(
flag_var
)
foreach
(
flag_var CMAKE_CXX_FLAGS CMAKE_C_FLAGS
)
set
(
${
flag_var
}
"
${${
flag_var
}}
/w"
)
endforeach
(
flag_var
)
endif
()
cmake/inference_lib.cmake
浏览文件 @
292b24aa
...
@@ -24,7 +24,7 @@ set(PADDLE_INFERENCE_INSTALL_DIR "${CMAKE_BINARY_DIR}/paddle_inference_install_d
...
@@ -24,7 +24,7 @@ set(PADDLE_INFERENCE_INSTALL_DIR "${CMAKE_BINARY_DIR}/paddle_inference_install_d
# so the generation of static lib is temporarily turned off.
# so the generation of static lib is temporarily turned off.
if
(
WIN32
)
if
(
WIN32
)
#todo: remove the option
#todo: remove the option
option
(
WITH_STATIC_LIB
"Compile demo with static/shared library, default use
stat
ic."
OFF
)
option
(
WITH_STATIC_LIB
"Compile demo with static/shared library, default use
dynam
ic."
OFF
)
if
(
NOT PYTHON_EXECUTABLE
)
if
(
NOT PYTHON_EXECUTABLE
)
FIND_PACKAGE
(
PythonInterp REQUIRED
)
FIND_PACKAGE
(
PythonInterp REQUIRED
)
endif
()
endif
()
...
@@ -165,25 +165,22 @@ copy_part_of_thrid_party(inference_lib_dist ${PADDLE_INFERENCE_INSTALL_DIR})
...
@@ -165,25 +165,22 @@ copy_part_of_thrid_party(inference_lib_dist ${PADDLE_INFERENCE_INSTALL_DIR})
set
(
src_dir
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid"
)
set
(
src_dir
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid"
)
if
(
WIN32
)
if
(
WIN32
)
if
(
WITH_STATIC_LIB
)
if
(
WITH_STATIC_LIB
)
set
(
paddle_fluid_lib
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/
${
CMAKE_BUILD_TYPE
}
/libpaddle_fluid.lib
)
set
(
paddle_fluid_lib
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/
${
CMAKE_BUILD_TYPE
}
/libpaddle_fluid.lib
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/
${
CMAKE_BUILD_TYPE
}
/paddle_fluid.*
)
else
()
else
()
set
(
paddle_fluid_lib
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/
${
CMAKE_BUILD_TYPE
}
/paddle_fluid.dll
set
(
paddle_fluid_lib
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/
${
CMAKE_BUILD_TYPE
}
/paddle_fluid.dll
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/
${
CMAKE_BUILD_TYPE
}
/paddle_fluid.lib
)
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/
${
CMAKE_BUILD_TYPE
}
/paddle_fluid.lib
)
endif
()
endif
()
else
(
WIN32
)
set
(
paddle_fluid_lib
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/libpaddle_fluid.*
)
endif
(
WIN32
)
if
(
WIN32 AND NOT WITH_STATIC_LIB
)
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
src_dir
}
/inference/api/paddle_*.h
${
paddle_fluid_lib
}
SRCS
${
src_dir
}
/inference/api/paddle_*.h
${
paddle_fluid_lib
}
DSTS
${
PADDLE_INFERENCE_INSTALL_DIR
}
/paddle/include
${
PADDLE_INFERENCE_INSTALL_DIR
}
/paddle/lib
DSTS
${
PADDLE_INFERENCE_INSTALL_DIR
}
/paddle/include
${
PADDLE_INFERENCE_INSTALL_DIR
}
/paddle/lib
${
PADDLE_INFERENCE_INSTALL_DIR
}
/paddle/lib
)
${
PADDLE_INFERENCE_INSTALL_DIR
}
/paddle/lib
)
else
()
else
(
WIN32
)
set
(
paddle_fluid_lib
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/libpaddle_fluid.*
)
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
src_dir
}
/inference/api/paddle_*.h
${
paddle_fluid_lib
}
SRCS
${
src_dir
}
/inference/api/paddle_*.h
${
paddle_fluid_lib
}
DSTS
${
PADDLE_INFERENCE_INSTALL_DIR
}
/paddle/include
${
PADDLE_INFERENCE_INSTALL_DIR
}
/paddle/lib
)
DSTS
${
PADDLE_INFERENCE_INSTALL_DIR
}
/paddle/include
${
PADDLE_INFERENCE_INSTALL_DIR
}
/paddle/lib
)
endif
()
endif
(
WIN32
)
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
CMAKE_BINARY_DIR
}
/paddle/fluid/framework/framework.pb.h
SRCS
${
CMAKE_BINARY_DIR
}
/paddle/fluid/framework/framework.pb.h
...
@@ -211,12 +208,12 @@ add_custom_target(fluid_lib_dist ALL DEPENDS ${fluid_lib_deps})
...
@@ -211,12 +208,12 @@ add_custom_target(fluid_lib_dist ALL DEPENDS ${fluid_lib_deps})
set
(
dst_dir
"
${
PADDLE_INSTALL_DIR
}
/paddle/fluid"
)
set
(
dst_dir
"
${
PADDLE_INSTALL_DIR
}
/paddle/fluid"
)
set
(
module
"inference"
)
set
(
module
"inference"
)
if
(
WIN32
AND NOT WITH_STATIC_LIB
)
if
(
WIN32
)
copy
(
fluid_lib_dist
copy
(
fluid_lib_dist
SRCS
${
src_dir
}
/
${
module
}
/*.h
${
src_dir
}
/
${
module
}
/api/paddle_*.h
${
paddle_fluid_lib
}
SRCS
${
src_dir
}
/
${
module
}
/*.h
${
src_dir
}
/
${
module
}
/api/paddle_*.h
${
paddle_fluid_lib
}
DSTS
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
DSTS
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
)
)
else
()
else
()
copy
(
fluid_lib_dist
copy
(
fluid_lib_dist
SRCS
${
src_dir
}
/
${
module
}
/*.h
${
src_dir
}
/
${
module
}
/api/paddle_*.h
${
paddle_fluid_lib
}
SRCS
${
src_dir
}
/
${
module
}
/*.h
${
src_dir
}
/
${
module
}
/api/paddle_*.h
${
paddle_fluid_lib
}
DSTS
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
DSTS
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
...
...
paddle/fluid/inference/CMakeLists.txt
浏览文件 @
292b24aa
...
@@ -44,7 +44,8 @@ add_subdirectory(api)
...
@@ -44,7 +44,8 @@ add_subdirectory(api)
set
(
STATIC_INFERENCE_API paddle_inference_api analysis_predictor
set
(
STATIC_INFERENCE_API paddle_inference_api analysis_predictor
zero_copy_tensor reset_tensor_array
zero_copy_tensor reset_tensor_array
analysis_config paddle_pass_builder activation_functions
${
mkldnn_quantizer_cfg
}
)
analysis_config paddle_pass_builder activation_functions
${
mkldnn_quantizer_cfg
}
)
if
(
WIN32
)
# TODO(xingzhaolong, jiweibo): remove this and create_static_lib(paddle_fluid) on windows GPU
if
(
WIN32 AND WITH_GPU
)
cc_library
(
paddle_fluid DEPS
${
fluid_modules
}
${
STATIC_INFERENCE_API
}
)
cc_library
(
paddle_fluid DEPS
${
fluid_modules
}
${
STATIC_INFERENCE_API
}
)
else
()
else
()
create_static_lib
(
paddle_fluid
${
fluid_modules
}
${
STATIC_INFERENCE_API
}
)
create_static_lib
(
paddle_fluid
${
fluid_modules
}
${
STATIC_INFERENCE_API
}
)
...
...
paddle/fluid/inference/api/demo_ci/CMakeLists.txt
浏览文件 @
292b24aa
...
@@ -51,8 +51,8 @@ if (WIN32)
...
@@ -51,8 +51,8 @@ if (WIN32)
set
(
CMAKE_C_FLAGS_RELEASE
"
${
CMAKE_C_FLAGS_RELEASE
}
/bigobj /MT"
)
set
(
CMAKE_C_FLAGS_RELEASE
"
${
CMAKE_C_FLAGS_RELEASE
}
/bigobj /MT"
)
set
(
CMAKE_CXX_FLAGS_DEBUG
"
${
CMAKE_CXX_FLAGS_DEBUG
}
/bigobj /MTd"
)
set
(
CMAKE_CXX_FLAGS_DEBUG
"
${
CMAKE_CXX_FLAGS_DEBUG
}
/bigobj /MTd"
)
set
(
CMAKE_CXX_FLAGS_RELEASE
"
${
CMAKE_CXX_FLAGS_RELEASE
}
/bigobj /MT"
)
set
(
CMAKE_CXX_FLAGS_RELEASE
"
${
CMAKE_CXX_FLAGS_RELEASE
}
/bigobj /MT"
)
if
(
WITH_STATIC_LIB
)
safe_set_static_flag
()
safe_set_static_flag
()
if
(
WITH_STATIC_LIB
)
add_definitions
(
-DSTATIC_LIB
)
add_definitions
(
-DSTATIC_LIB
)
endif
()
endif
()
endif
()
endif
()
...
@@ -136,7 +136,7 @@ else()
...
@@ -136,7 +136,7 @@ else()
set
(
DEPS
${
DEPS
}
set
(
DEPS
${
DEPS
}
${
MATH_LIB
}
${
MKLDNN_LIB
}
${
MATH_LIB
}
${
MKLDNN_LIB
}
glog gflags_static libprotobuf xxhash
${
EXTERNAL_LIB
}
)
glog gflags_static libprotobuf xxhash
${
EXTERNAL_LIB
}
)
set
(
DEPS
${
DEPS
}
libcmt
shlwapi.lib
)
set
(
DEPS
${
DEPS
}
shlwapi.lib
)
endif
(
NOT WIN32
)
endif
(
NOT WIN32
)
if
(
WITH_GPU
)
if
(
WITH_GPU
)
...
...
paddle/fluid/inference/api/demo_ci/run.sh
浏览文件 @
292b24aa
...
@@ -6,7 +6,7 @@ TEST_GPU_CPU=$3 # test both GPU/CPU mode or only CPU mode
...
@@ -6,7 +6,7 @@ TEST_GPU_CPU=$3 # test both GPU/CPU mode or only CPU mode
DATA_DIR
=
$4
# dataset
DATA_DIR
=
$4
# dataset
TENSORRT_INCLUDE_DIR
=
$5
# TensorRT header file dir, default to /usr/local/TensorRT/include
TENSORRT_INCLUDE_DIR
=
$5
# TensorRT header file dir, default to /usr/local/TensorRT/include
TENSORRT_LIB_DIR
=
$6
# TensorRT lib file dir, default to /usr/local/TensorRT/lib
TENSORRT_LIB_DIR
=
$6
# TensorRT lib file dir, default to /usr/local/TensorRT/lib
MSVC_STATIC_CRT
=
$7
inference_install_dir
=
${
PADDLE_ROOT
}
/build/paddle_inference_install_dir
inference_install_dir
=
${
PADDLE_ROOT
}
/build/paddle_inference_install_dir
cd
`
dirname
$0
`
cd
`
dirname
$0
`
...
@@ -66,21 +66,30 @@ mkdir -p build
...
@@ -66,21 +66,30 @@ mkdir -p build
cd
build
cd
build
rm
-rf
*
rm
-rf
*
if
[
$(
echo
`
uname
`
|
grep
"Win"
)
!=
""
]
;
then
for
WITH_STATIC_LIB
in
ON OFF
;
do
if
[
$(
echo
`
uname
`
|
grep
"Win"
)
!=
""
]
;
then
# TODO(xingzhaolong, jiweibo): remove this if windows GPU library is ready.
if
[
$TEST_GPU_CPU
==
ON]
&&
[
$WITH_STATIC_LIB
==
ON
]
;
then
return
0
fi
# -----simple_on_word2vec on windows-----
# -----simple_on_word2vec on windows-----
cmake ..
-G
"Visual Studio 14 2015"
-A
x64
-DPADDLE_LIB
=
${
inference_install_dir
}
\
cmake ..
-G
"Visual Studio 14 2015"
-A
x64
-DPADDLE_LIB
=
${
inference_install_dir
}
\
-DWITH_MKL
=
$TURN_ON_MKL
\
-DWITH_MKL
=
$TURN_ON_MKL
\
-DDEMO_NAME
=
simple_on_word2vec
\
-DDEMO_NAME
=
simple_on_word2vec
\
-DWITH_GPU
=
$TEST_GPU_CPU
\
-DWITH_GPU
=
$TEST_GPU_CPU
\
-DWITH_STATIC_LIB
=
OFF
-DWITH_STATIC_LIB
=
$WITH_STATIC_LIB
\
-DMSVC_STATIC_CRT
=
$MSVC_STATIC_CRT
msbuild /maxcpucount /property:Configuration
=
Release cpp_inference_demo.sln
msbuild /maxcpucount /property:Configuration
=
Release cpp_inference_demo.sln
for
use_gpu
in
$use_gpu_list
;
do
Release/simple_on_word2vec.exe
\
Release/simple_on_word2vec.exe
\
--dirname
=
$DATA_DIR
/word2vec/word2vec.inference.model
\
--dirname
=
$DATA_DIR
/word2vec/word2vec.inference.model
\
--use_gpu
=
False
--use_gpu
=
$use_gpu
if
[
$?
-ne
0
]
;
then
if
[
$?
-ne
0
]
;
then
echo
"simple_on_word2vec demo runs fail."
echo
"simple_on_word2vec demo runs fail."
exit
1
exit
1
fi
fi
done
# -----vis_demo on windows-----
# -----vis_demo on windows-----
rm
-rf
*
rm
-rf
*
...
@@ -88,21 +97,23 @@ if [ $(echo `uname` | grep "Win") != "" ]; then
...
@@ -88,21 +97,23 @@ if [ $(echo `uname` | grep "Win") != "" ]; then
-DWITH_MKL
=
$TURN_ON_MKL
\
-DWITH_MKL
=
$TURN_ON_MKL
\
-DDEMO_NAME
=
vis_demo
\
-DDEMO_NAME
=
vis_demo
\
-DWITH_GPU
=
$TEST_GPU_CPU
\
-DWITH_GPU
=
$TEST_GPU_CPU
\
-DWITH_STATIC_LIB
=
OFF
-DWITH_STATIC_LIB
=
$WITH_STATIC_LIB
\
-DMSVC_STATIC_CRT
=
$MSVC_STATIC_CRT
msbuild /maxcpucount /property:Configuration
=
Release cpp_inference_demo.sln
msbuild /maxcpucount /property:Configuration
=
Release cpp_inference_demo.sln
for
use_gpu
in
$use_gpu_list
;
do
for
vis_demo_name
in
$vis_demo_list
;
do
for
vis_demo_name
in
$vis_demo_list
;
do
Release/vis_demo.exe
\
Release/vis_demo.exe
\
--modeldir
=
$DATA_DIR
/
$vis_demo_name
/model
\
--modeldir
=
$DATA_DIR
/
$vis_demo_name
/model
\
--data
=
$DATA_DIR
/
$vis_demo_name
/data.txt
\
--data
=
$DATA_DIR
/
$vis_demo_name
/data.txt
\
--refer
=
$DATA_DIR
/
$vis_demo_name
/result.txt
\
--refer
=
$DATA_DIR
/
$vis_demo_name
/result.txt
\
--use_gpu
=
False
--use_gpu
=
$use_gpu
if
[
$?
-ne
0
]
;
then
if
[
$?
-ne
0
]
;
then
echo
"vis demo
$vis_demo_name
runs fail."
echo
"vis demo
$vis_demo_name
runs fail."
exit
1
exit
1
fi
fi
done
done
els
e
don
e
for
WITH_STATIC_LIB
in
ON OFF
;
do
else
# -----simple_on_word2vec on linux/mac-----
# -----simple_on_word2vec on linux/mac-----
rm
-rf
*
rm
-rf
*
cmake ..
-DPADDLE_LIB
=
${
inference_install_dir
}
\
cmake ..
-DPADDLE_LIB
=
${
inference_install_dir
}
\
...
@@ -123,7 +134,6 @@ else
...
@@ -123,7 +134,6 @@ else
fi
fi
done
done
fi
fi
# ---------vis_demo on linux/mac---------
# ---------vis_demo on linux/mac---------
rm
-rf
*
rm
-rf
*
cmake ..
-DPADDLE_LIB
=
${
inference_install_dir
}
\
cmake ..
-DPADDLE_LIB
=
${
inference_install_dir
}
\
...
@@ -145,7 +155,6 @@ else
...
@@ -145,7 +155,6 @@ else
fi
fi
done
done
done
done
# --------tensorrt mobilenet on linux/mac------
# --------tensorrt mobilenet on linux/mac------
if
[
$USE_TENSORRT
==
ON
-a
$TEST_GPU_CPU
==
ON
]
;
then
if
[
$USE_TENSORRT
==
ON
-a
$TEST_GPU_CPU
==
ON
]
;
then
rm
-rf
*
rm
-rf
*
...
@@ -167,6 +176,6 @@ else
...
@@ -167,6 +176,6 @@ else
exit
1
exit
1
fi
fi
fi
fi
done
fi
fi
done
set
+x
set
+x
paddle/fluid/inference/api/paddle_infer_declare.h
浏览文件 @
292b24aa
...
@@ -17,11 +17,7 @@
...
@@ -17,11 +17,7 @@
#if defined(_WIN32)
#if defined(_WIN32)
#ifndef PD_INFER_DECL
#ifndef PD_INFER_DECL
#ifdef PADDLE_DLL_INFERENCE
#ifdef PADDLE_DLL_INFERENCE
#ifndef PADDLE_ON_INFERENCE
#define PD_INFER_DECL
#else
#define PD_INFER_DECL __declspec(dllexport)
#define PD_INFER_DECL __declspec(dllexport)
#endif // PADDLE_ON_INFERENCE
#else
#else
#define PD_INFER_DECL __declspec(dllimport)
#define PD_INFER_DECL __declspec(dllimport)
#endif // PADDLE_DLL_INFERENCE
#endif // PADDLE_DLL_INFERENCE
...
...
paddle/scripts/paddle_build.bat
浏览文件 @
292b24aa
...
@@ -26,6 +26,7 @@ wmic process where name="op_function_generator.exe" call terminate 2>NUL
...
@@ -26,6 +26,7 @@ wmic process where name="op_function_generator.exe" call terminate 2>NUL
rem ------initialize common variable------
rem ------initialize common variable------
if
not
defined
CUDA_TOOLKIT_ROOT_DIR
set
CUDA_TOOLKIT_ROOT_DIR
=
"C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v10.0"
if
not
defined
CUDA_TOOLKIT_ROOT_DIR
set
CUDA_TOOLKIT_ROOT_DIR
=
"C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v10.0"
if
not
defined
BRANCH
set
BRANCH
=
develop
if
not
defined
BRANCH
set
BRANCH
=
develop
if
not
defined
TENSORRT_ROOT
set
TENSORRT_ROOT
=
"C:/TensorRT-5.1.5.0"
if
not
defined
WITH_MKL
set
WITH_MKL
=
ON
if
not
defined
WITH_MKL
set
WITH_MKL
=
ON
if
not
defined
WITH_GPU
set
WITH_GPU
=
OFF
if
not
defined
WITH_GPU
set
WITH_GPU
=
OFF
if
not
defined
WITH_AVX
set
WITH_AVX
=
ON
if
not
defined
WITH_AVX
set
WITH_AVX
=
ON
...
@@ -33,9 +34,11 @@ if not defined WITH_TESTING set WITH_TESTING=ON
...
@@ -33,9 +34,11 @@ if not defined WITH_TESTING set WITH_TESTING=ON
if
not
defined
WITH_PYTHON
set
WITH_PYTHON
=
ON
if
not
defined
WITH_PYTHON
set
WITH_PYTHON
=
ON
if
not
defined
ON_INFER
set
ON_INFER
=
ON
if
not
defined
ON_INFER
set
ON_INFER
=
ON
if
not
defined
WITH_INFERENCE_API_TEST
set
WITH_INFERENCE_API_TEST
=
ON
if
not
defined
WITH_INFERENCE_API_TEST
set
WITH_INFERENCE_API_TEST
=
ON
if
not
defined
WITH_STATIC_LIB
set
WITH_STATIC_LIB
=
ON
if
not
defined
WITH_CACHE
set
WITH_CACHE
=
ON
if
not
defined
WITH_CACHE
set
WITH_CACHE
=
ON
if
not
defined
WITH_TPCACHE
set
WITH_TPCACHE
=
ON
if
not
defined
WITH_TPCACHE
set
WITH_TPCACHE
=
ON
rem -------set cache build work directory-----------
rem -------set cache build work directory-----------
if
"
%WITH_CACHE%
"
==
"OFF"
(
if
"
%WITH_CACHE%
"
==
"OFF"
(
rmdir
build
/s/q
rmdir
build
/s/q
...
@@ -99,6 +102,7 @@ set CLCACHE_OBJECT_CACHE_TIMEOUT_MS=1000000
...
@@ -99,6 +102,7 @@ set CLCACHE_OBJECT_CACHE_TIMEOUT_MS=1000000
:: set maximum cache size to 20G
:: set maximum cache size to 20G
clcache
.exe
-M
21474836480
clcache
.exe
-M
21474836480
rem ------set cache third_party------
rem ------set cache third_party------
set
cache_dir
=
%work
_dir:Paddle
=
cache
%
set
cache_dir
=
%work
_dir:Paddle
=
cache
%
dir
%cache_dir%
dir
%cache_dir%
...
@@ -138,6 +142,7 @@ exit /b 1
...
@@ -138,6 +142,7 @@ exit /b 1
:CASE
_wincheck_mkl
:CASE
_wincheck_mkl
set
WITH_MKL
=
ON
set
WITH_MKL
=
ON
set
WITH_GPU
=
OFF
set
WITH_GPU
=
OFF
set
MSVC_STATIC_CRT
=
ON
call
:cmake
||
goto
cmake_error
call
:cmake
||
goto
cmake_error
call
:build
||
goto
build_error
call
:build
||
goto
build_error
call
:test
_whl_pacakage
||
goto
test_whl_pacakage_error
call
:test
_whl_pacakage
||
goto
test_whl_pacakage_error
...
@@ -149,11 +154,13 @@ goto:success
...
@@ -149,11 +154,13 @@ goto:success
:CASE
_wincheck_openblas
:CASE
_wincheck_openblas
set
WITH_MKL
=
OFF
set
WITH_MKL
=
OFF
set
WITH_GPU
=
ON
set
WITH_GPU
=
ON
set
MSVC_STATIC_CRT
=
OFF
rem Temporarily turn off WITH_INFERENCE_API_TEST on GPU due to compile hang
rem Temporarily turn off WITH_INFERENCE_API_TEST on GPU due to compile hang
set
WITH_INFERENCE_API_TEST
=
OFF
set
WITH_INFERENCE_API_TEST
=
OFF
call
:cmake
||
goto
cmake_error
call
:cmake
||
goto
cmake_error
call
:build
||
goto
build_error
call
:build
||
goto
build_error
call
:test
_whl_pacakage
||
goto
test_whl_pacakage_error
call
:test
_whl_pacakage
||
goto
test_whl_pacakage_error
:: call :test_inference || goto test_inference_error
goto
:success
goto
:success
rem "Other configurations are added here"
rem "Other configurations are added here"
...
@@ -172,12 +179,14 @@ set start=%start:~4,10%
...
@@ -172,12 +179,14 @@ set start=%start:~4,10%
echo
cmake
..
-G
"Visual Studio 14 2015 Win64"
-DWITH
_AVX
=
%WITH_AVX%
-DWITH
_GPU
=
%WITH_GPU%
-DWITH
_MKL
=
%WITH_MKL%
^
echo
cmake
..
-G
"Visual Studio 14 2015 Win64"
-DWITH
_AVX
=
%WITH_AVX%
-DWITH
_GPU
=
%WITH_GPU%
-DWITH
_MKL
=
%WITH_MKL%
^
-DWITH
_TESTING
=
%WITH_TESTING%
-DWITH
_PYTHON
=
%WITH_PYTHON%
-DCUDA
_TOOLKIT_ROOT_DIR
=
%CUDA_TOOLKIT_ROOT_DIR%
^
-DWITH
_TESTING
=
%WITH_TESTING%
-DWITH
_PYTHON
=
%WITH_PYTHON%
-DCUDA
_TOOLKIT_ROOT_DIR
=
%CUDA_TOOLKIT_ROOT_DIR%
^
-DON
_INFER
=
%ON_INFER%
-DWITH
_INFERENCE_API_TEST
=
%WITH_INFERENCE_API_TEST%
-DTHIRD
_PARTY_PATH
=
%THIRD_PARTY_PATH%
^
-DON
_INFER
=
%ON_INFER%
-DWITH
_INFERENCE_API_TEST
=
%WITH_INFERENCE_API_TEST%
-DTHIRD
_PARTY_PATH
=
%THIRD_PARTY_PATH%
^
-DINFERENCE
_DEMO_INSTALL_DIR
=
%INFERENCE_DEMO_INSTALL_DIR%
-DINFERENCE
_DEMO_INSTALL_DIR
=
%INFERENCE_DEMO_INSTALL_DIR%
-DWITH
_STATIC_LIB
=
%WITH_STATIC_LIB%
^
-DTENSORRT
_ROOT
=
%TENSORRT_ROOT%
-DMSVC
_STATIC_CRT
=
%MSVC_STATIC_CRT%
cmake
..
-G
"Visual Studio 14 2015 Win64"
-DWITH
_AVX
=
%WITH_AVX%
-DWITH
_GPU
=
%WITH_GPU%
-DWITH
_MKL
=
%WITH_MKL%
^
cmake
..
-G
"Visual Studio 14 2015 Win64"
-DWITH
_AVX
=
%WITH_AVX%
-DWITH
_GPU
=
%WITH_GPU%
-DWITH
_MKL
=
%WITH_MKL%
^
-DWITH
_TESTING
=
%WITH_TESTING%
-DWITH
_PYTHON
=
%WITH_PYTHON%
-DCUDA
_TOOLKIT_ROOT_DIR
=
%CUDA_TOOLKIT_ROOT_DIR%
^
-DWITH
_TESTING
=
%WITH_TESTING%
-DWITH
_PYTHON
=
%WITH_PYTHON%
-DCUDA
_TOOLKIT_ROOT_DIR
=
%CUDA_TOOLKIT_ROOT_DIR%
^
-DON
_INFER
=
%ON_INFER%
-DWITH
_INFERENCE_API_TEST
=
%WITH_INFERENCE_API_TEST%
-DTHIRD
_PARTY_PATH
=
%THIRD_PARTY_PATH%
^
-DON
_INFER
=
%ON_INFER%
-DWITH
_INFERENCE_API_TEST
=
%WITH_INFERENCE_API_TEST%
-DTHIRD
_PARTY_PATH
=
%THIRD_PARTY_PATH%
^
-DINFERENCE
_DEMO_INSTALL_DIR
=
%INFERENCE_DEMO_INSTALL_DIR%
-DINFERENCE
_DEMO_INSTALL_DIR
=
%INFERENCE_DEMO_INSTALL_DIR%
-DWITH
_STATIC_LIB
=
%WITH_STATIC_LIB%
^
-DTENSORRT
_ROOT
=
%TENSORRT_ROOT%
-DMSVC
_STATIC_CRT
=
%MSVC_STATIC_CRT%
goto
:eof
goto
:eof
:cmake
_error
:cmake
_error
...
@@ -282,7 +291,9 @@ dir %THIRD_PARTY_PATH:/=\%\install\mklml\lib
...
@@ -282,7 +291,9 @@ dir %THIRD_PARTY_PATH:/=\%\install\mklml\lib
dir
%THIRD
_PARTY_PATH:/
=
\
%
\install\mkldnn\bin
dir
%THIRD
_PARTY_PATH:/
=
\
%
\install\mkldnn\bin
dir
%THIRD
_PARTY_PATH:/
=
\
%
\install\warpctc\bin
dir
%THIRD
_PARTY_PATH:/
=
\
%
\install\warpctc\bin
set
PATH
=
%THIRD
_PARTY_PATH:/
=
\
%
\install\openblas\lib
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\openblas\bin
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\zlib\bin
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\mklml\lib
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\mkldnn\bin
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\warpctc\bin
;
%PATH%
set
PATH
=
%THIRD
_PARTY_PATH:/
=
\
%
\install\openblas\lib
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\openblas\bin
;
^
%THIRD
_PARTY_PATH:/
=
\
%
\install\zlib\bin
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\mklml\lib
;
^
%THIRD
_PARTY_PATH:/
=
\
%
\install\mkldnn\bin
;
%THIRD
_PARTY_PATH:/
=
\
%
\install\warpctc\bin
;
%PATH%
ctest
.exe
--output-on-failure -C
Release
-j
8
--repeat
until
-pass
:4
after
-timeout
:4
ctest
.exe
--output-on-failure -C
Release
-j
8
--repeat
until
-pass
:4
after
-timeout
:4
goto
:eof
goto
:eof
...
@@ -305,7 +316,7 @@ set end=%end:~4,10%
...
@@ -305,7 +316,7 @@ set end=%end:~4,10%
call
:timestamp
"
%start%
"
"
%end%
"
"TestCases Total"
call
:timestamp
"
%start%
"
"
%end%
"
"TestCases Total"
cd
%work_dir%
\paddle\fluid\inference\api\demo_ci
cd
%work_dir%
\paddle\fluid\inference\api\demo_ci
%cache_dir%
\tools\busybox64.exe
bash
run
.sh
%work
_dir:\
=
/
%
%WITH_MKL%
%WITH_GPU%
%cache
_dir:\
=
/
%
/inference
_demo
%cache_dir%
\tools\busybox64.exe
bash
run
.sh
%work
_dir:\
=
/
%
%WITH_MKL%
%WITH_GPU%
%cache
_dir:\
=
/
%
/inference
_demo
%TENSORRT_ROOT%
/include
%TENSORRT_ROOT%
/lib
%MSVC_STATIC_CRT%
goto
:eof
goto
:eof
:test
_inference_error
:test
_inference_error
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录