Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Crayon鑫
Paddle
提交
4b429c19
P
Paddle
项目概览
Crayon鑫
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
4b429c19
编写于
11月 26, 2019
作者:
S
silingtong123
提交者:
liuwei1031
11月 26, 2019
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
package the CAPI inference library and third_party (#21299)
上级
f4cf028a
变更
1
隐藏空白更改
内联
并排
Showing
1 changed file
with
101 addition
and
73 deletion
+101
-73
cmake/inference_lib.cmake
cmake/inference_lib.cmake
+101
-73
未找到文件。
cmake/inference_lib.cmake
浏览文件 @
4b429c19
...
...
@@ -55,8 +55,81 @@ function(copy TARGET)
endforeach
()
endfunction
()
function
(
copy_part_of_thrid_party TARGET DST
)
if
(
${
CBLAS_PROVIDER
}
STREQUAL MKLML
)
set
(
dst_dir
"
${
DST
}
/third_party/install/mklml"
)
if
(
WIN32
)
copy
(
${
TARGET
}
SRCS
${
MKLML_LIB
}
${
MKLML_IOMP_LIB
}
${
MKLML_SHARED_LIB
}
${
MKLML_SHARED_LIB_DEPS
}
${
MKLML_SHARED_IOMP_LIB
}
${
MKLML_INC_DIR
}
DSTS
${
dst_dir
}
/lib
${
dst_dir
}
/lib
${
dst_dir
}
/lib
${
dst_dir
}
/lib
${
dst_dir
}
/lib
${
dst_dir
}
)
else
()
copy
(
${
TARGET
}
SRCS
${
MKLML_LIB
}
${
MKLML_IOMP_LIB
}
${
MKLML_INC_DIR
}
DSTS
${
dst_dir
}
/lib
${
dst_dir
}
/lib
${
dst_dir
}
)
endif
()
elseif
(
${
CBLAS_PROVIDER
}
STREQUAL EXTERN_OPENBLAS
)
set
(
dst_dir
"
${
DST
}
/third_party/install/openblas"
)
copy
(
${
TARGET
}
SRCS
${
CBLAS_INSTALL_DIR
}
/lib
${
CBLAS_INSTALL_DIR
}
/include
DSTS
${
dst_dir
}
${
dst_dir
}
)
endif
()
if
(
WITH_MKLDNN
)
set
(
dst_dir
"
${
DST
}
/third_party/install/mkldnn"
)
if
(
WIN32
)
copy
(
${
TARGET
}
SRCS
${
MKLDNN_INC_DIR
}
${
MKLDNN_SHARED_LIB
}
${
MKLDNN_LIB
}
DSTS
${
dst_dir
}
${
dst_dir
}
/lib
${
dst_dir
}
/lib
)
else
()
copy
(
${
TARGET
}
SRCS
${
MKLDNN_INC_DIR
}
${
MKLDNN_SHARED_LIB
}
DSTS
${
dst_dir
}
${
dst_dir
}
/lib
)
endif
()
endif
()
set
(
dst_dir
"
${
DST
}
/third_party/install/gflags"
)
copy
(
${
TARGET
}
SRCS
${
GFLAGS_INCLUDE_DIR
}
${
GFLAGS_LIBRARIES
}
DSTS
${
dst_dir
}
${
dst_dir
}
/lib
)
set
(
dst_dir
"
${
DST
}
/third_party/install/glog"
)
copy
(
${
TARGET
}
SRCS
${
GLOG_INCLUDE_DIR
}
${
GLOG_LIBRARIES
}
DSTS
${
dst_dir
}
${
dst_dir
}
/lib
)
if
(
NOT PROTOBUF_FOUND OR WIN32
)
set
(
dst_dir
"
${
DST
}
/third_party/install/protobuf"
)
copy
(
${
TARGET
}
SRCS
${
PROTOBUF_INCLUDE_DIR
}
${
PROTOBUF_LIBRARY
}
DSTS
${
dst_dir
}
${
dst_dir
}
/lib
)
endif
()
if
(
WITH_NGRAPH
)
set
(
dst_dir
"
${
DST
}
/third_party/install/ngraph"
)
copy
(
${
TARGET
}
SRCS
${
NGRAPH_INC_DIR
}
${
NGRAPH_LIB_DIR
}
DSTS
${
dst_dir
}
${
dst_dir
}
)
endif
()
if
(
TENSORRT_FOUND
)
set
(
dst_dir
"
${
DST
}
/third_party/install/tensorrt"
)
copy
(
${
TARGET
}
SRCS
${
TENSORRT_INCLUDE_DIR
}
/Nv*.h
${
TENSORRT_LIBRARY_DIR
}
/*nvinfer*
DSTS
${
dst_dir
}
/include
${
dst_dir
}
/lib
)
endif
()
if
(
ANAKIN_FOUND
)
set
(
dst_dir
"
${
DST
}
/third_party/install/anakin"
)
copy
(
${
TARGET
}
SRCS
${
ANAKIN_ROOT
}
/*
DSTS
${
dst_dir
}
)
endif
()
endfunction
()
# inference library for only inference
set
(
inference_lib_deps third_party paddle_fluid paddle_fluid_shared
)
set
(
inference_lib_deps third_party paddle_fluid paddle_fluid_shared
paddle_fluid_c paddle_fluid_c_shared
)
add_custom_target
(
inference_lib_dist DEPENDS
${
inference_lib_deps
}
)
set
(
dst_dir
"
${
FLUID_INFERENCE_INSTALL_DIR
}
/third_party/eigen3"
)
...
...
@@ -74,50 +147,6 @@ copy(inference_lib_dist
SRCS
${
DLPACK_INCLUDE_DIR
}
/dlpack
DSTS
${
dst_dir
}
)
if
(
${
CBLAS_PROVIDER
}
STREQUAL MKLML
)
set
(
dst_dir
"
${
FLUID_INFERENCE_INSTALL_DIR
}
/third_party/install/mklml"
)
if
(
WIN32
)
copy
(
inference_lib_dist
SRCS
${
MKLML_LIB
}
${
MKLML_IOMP_LIB
}
${
MKLML_SHARED_LIB
}
${
MKLML_SHARED_LIB_DEPS
}
${
MKLML_SHARED_IOMP_LIB
}
${
MKLML_INC_DIR
}
DSTS
${
dst_dir
}
/lib
${
dst_dir
}
/lib
${
dst_dir
}
/lib
${
dst_dir
}
/lib
${
dst_dir
}
/lib
${
dst_dir
}
)
else
()
copy
(
inference_lib_dist
SRCS
${
MKLML_LIB
}
${
MKLML_IOMP_LIB
}
${
MKLML_INC_DIR
}
DSTS
${
dst_dir
}
/lib
${
dst_dir
}
/lib
${
dst_dir
}
)
endif
()
elseif
(
${
CBLAS_PROVIDER
}
STREQUAL EXTERN_OPENBLAS
)
set
(
dst_dir
"
${
FLUID_INFERENCE_INSTALL_DIR
}
/third_party/install/openblas"
)
copy
(
inference_lib_dist
SRCS
${
CBLAS_INSTALL_DIR
}
/lib
${
CBLAS_INSTALL_DIR
}
/include
DSTS
${
dst_dir
}
${
dst_dir
}
)
endif
()
if
(
WITH_MKLDNN
)
set
(
dst_dir
"
${
FLUID_INFERENCE_INSTALL_DIR
}
/third_party/install/mkldnn"
)
if
(
WIN32
)
copy
(
inference_lib_dist
SRCS
${
MKLDNN_INC_DIR
}
${
MKLDNN_SHARED_LIB
}
${
MKLDNN_LIB
}
DSTS
${
dst_dir
}
${
dst_dir
}
/lib
${
dst_dir
}
/lib
)
else
()
copy
(
inference_lib_dist
SRCS
${
MKLDNN_INC_DIR
}
${
MKLDNN_SHARED_LIB
}
DSTS
${
dst_dir
}
${
dst_dir
}
/lib
)
endif
()
endif
()
set
(
dst_dir
"
${
FLUID_INFERENCE_INSTALL_DIR
}
/third_party/install/gflags"
)
copy
(
inference_lib_dist
SRCS
${
GFLAGS_INCLUDE_DIR
}
${
GFLAGS_LIBRARIES
}
DSTS
${
dst_dir
}
${
dst_dir
}
/lib
)
set
(
dst_dir
"
${
FLUID_INFERENCE_INSTALL_DIR
}
/third_party/install/glog"
)
copy
(
inference_lib_dist
SRCS
${
GLOG_INCLUDE_DIR
}
${
GLOG_LIBRARIES
}
DSTS
${
dst_dir
}
${
dst_dir
}
/lib
)
set
(
dst_dir
"
${
FLUID_INFERENCE_INSTALL_DIR
}
/third_party/install/xxhash"
)
copy
(
inference_lib_dist
SRCS
${
XXHASH_INCLUDE_DIR
}
${
XXHASH_LIBRARIES
}
...
...
@@ -128,38 +157,12 @@ copy(inference_lib_dist
SRCS
${
ZLIB_INCLUDE_DIR
}
${
ZLIB_LIBRARIES
}
DSTS
${
dst_dir
}
${
dst_dir
}
/lib
)
if
(
NOT PROTOBUF_FOUND OR WIN32
)
set
(
dst_dir
"
${
FLUID_INFERENCE_INSTALL_DIR
}
/third_party/install/protobuf"
)
copy
(
inference_lib_dist
SRCS
${
PROTOBUF_INCLUDE_DIR
}
${
PROTOBUF_LIBRARY
}
DSTS
${
dst_dir
}
${
dst_dir
}
/lib
)
endif
()
if
(
WITH_NGRAPH
)
set
(
dst_dir
"
${
FLUID_INFERENCE_INSTALL_DIR
}
/third_party/install/ngraph"
)
copy
(
inference_lib_dist
SRCS
${
NGRAPH_INC_DIR
}
${
NGRAPH_LIB_DIR
}
DSTS
${
dst_dir
}
${
dst_dir
}
)
endif
()
if
(
TENSORRT_FOUND
)
set
(
dst_dir
"
${
FLUID_INFERENCE_INSTALL_DIR
}
/third_party/install/tensorrt"
)
copy
(
inference_lib_dist
SRCS
${
TENSORRT_INCLUDE_DIR
}
/Nv*.h
${
TENSORRT_LIBRARY_DIR
}
/*nvinfer*
DSTS
${
dst_dir
}
/include
${
dst_dir
}
/lib
)
endif
()
if
(
ANAKIN_FOUND
)
set
(
dst_dir
"
${
FLUID_INFERENCE_INSTALL_DIR
}
/third_party/install/anakin"
)
copy
(
inference_lib_dist
SRCS
${
ANAKIN_ROOT
}
/*
DSTS
${
dst_dir
}
)
endif
()
copy
(
inference_lib_dist
SRCS
${
CMAKE_CURRENT_BINARY_DIR
}
/CMakeCache.txt
DSTS
${
FLUID_INFERENCE_INSTALL_DIR
}
)
copy_part_of_thrid_party
(
inference_lib_dist
${
FLUID_INFERENCE_INSTALL_DIR
}
)
set
(
src_dir
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid"
)
if
(
WIN32
)
set
(
paddle_fluid_lib
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/
${
CMAKE_BUILD_TYPE
}
/libpaddle_fluid.*
)
...
...
@@ -172,6 +175,30 @@ copy(inference_lib_dist
DSTS
${
FLUID_INFERENCE_INSTALL_DIR
}
/paddle/include
${
FLUID_INFERENCE_INSTALL_DIR
}
/paddle/lib
)
# CAPI inference library for only inference
set
(
FLUID_INFERENCE_C_INSTALL_DIR
"
${
CMAKE_BINARY_DIR
}
/fluid_inference_c_install_dir"
CACHE STRING
"A path setting CAPI fluid inference shared"
)
copy_part_of_thrid_party
(
inference_lib_dist
${
FLUID_INFERENCE_C_INSTALL_DIR
}
)
set
(
src_dir
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid"
)
if
(
WIN32
)
set
(
paddle_fluid_c_lib
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/capi/
${
CMAKE_BUILD_TYPE
}
/paddle_fluid_c.dll
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/capi/
${
CMAKE_BUILD_TYPE
}
/paddle_fluid_c.lib
)
else
(
WIN32
)
set
(
paddle_fluid_c_lib
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/libpaddle_fluid.*
)
endif
(
WIN32
)
if
(
WIN32
)
copy
(
inference_lib_dist
SRCS
${
src_dir
}
/inference/capi/c_api.h
${
paddle_fluid_c_lib
}
DSTS
${
FLUID_INFERENCE_C_INSTALL_DIR
}
/paddle/include
${
FLUID_INFERENCE_C_INSTALL_DIR
}
/paddle/lib
${
FLUID_INFERENCE_C_INSTALL_DIR
}
/paddle/lib
)
else
()
copy
(
inference_lib_dist
SRCS
${
src_dir
}
/inference/capi/c_api.h
${
paddle_fluid_c_lib
}
DSTS
${
FLUID_INFERENCE_C_INSTALL_DIR
}
/paddle/include
${
FLUID_INFERENCE_C_INSTALL_DIR
}
/paddle/lib
)
endif
()
# fluid library for both train and inference
set
(
fluid_lib_deps inference_lib_dist
)
add_custom_target
(
fluid_lib_dist ALL DEPENDS
${
fluid_lib_deps
}
)
...
...
@@ -254,3 +281,4 @@ function(version version_file)
endfunction
()
version
(
${
FLUID_INSTALL_DIR
}
/version.txt
)
version
(
${
FLUID_INFERENCE_INSTALL_DIR
}
/version.txt
)
version
(
${
FLUID_INFERENCE_C_INSTALL_DIR
}
/version.txt
)
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录