Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
fc63aa72
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
fc63aa72
编写于
10月 16, 2018
作者:
T
Tao Luo
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add inference-only fluid library
上级
dcfb6875
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
40 addition
and
17 deletion
+40
-17
CMakeLists.txt
CMakeLists.txt
+3
-0
cmake/inference_lib.cmake
cmake/inference_lib.cmake
+37
-17
未找到文件。
CMakeLists.txt
浏览文件 @
fc63aa72
...
@@ -127,6 +127,9 @@ set(THIRD_PARTY_PATH "${CMAKE_BINARY_DIR}/third_party" CACHE STRING
...
@@ -127,6 +127,9 @@ set(THIRD_PARTY_PATH "${CMAKE_BINARY_DIR}/third_party" CACHE STRING
set
(
FLUID_INSTALL_DIR
"
${
CMAKE_BINARY_DIR
}
/fluid_install_dir"
CACHE STRING
set
(
FLUID_INSTALL_DIR
"
${
CMAKE_BINARY_DIR
}
/fluid_install_dir"
CACHE STRING
"A path setting fluid shared and static libraries"
)
"A path setting fluid shared and static libraries"
)
set
(
FLUID_INFERENCE_INSTALL_DIR
"
${
CMAKE_BINARY_DIR
}
/fluid_inference_install_dir"
CACHE STRING
"A path setting fluid inference shared and static libraries"
)
if
(
WITH_C_API AND WITH_PYTHON
)
if
(
WITH_C_API AND WITH_PYTHON
)
message
(
WARNING
"It is suggest not embedded a python interpreter in Paddle "
message
(
WARNING
"It is suggest not embedded a python interpreter in Paddle "
"when using C-API. It will give an unpredictable behavior when using a "
"when using C-API. It will give an unpredictable behavior when using a "
...
...
cmake/inference_lib.cmake
浏览文件 @
fc63aa72
...
@@ -150,16 +150,16 @@ if (WITH_ANAKIN AND WITH_MKL)
...
@@ -150,16 +150,16 @@ if (WITH_ANAKIN AND WITH_MKL)
SRCS
SRCS
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/api/libinference_anakin_api*
# compiled anakin api
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/api/libinference_anakin_api*
# compiled anakin api
${
ANAKIN_INSTALL_DIR
}
# anakin release
${
ANAKIN_INSTALL_DIR
}
# anakin release
DSTS
${
dst_dir
}
/inference
/anakin
${
FLUID_INSTALL_DIR
}
/third_party/install/anakin
)
DSTS
${
FLUID_INSTALL_DIR
}
/third_party/install
/anakin
${
FLUID_INSTALL_DIR
}
/third_party/install/anakin
)
list
(
APPEND inference_deps anakin_inference_lib
)
list
(
APPEND inference_deps anakin_inference_lib
)
endif
()
endif
()
set
(
module
"inference"
)
set
(
module
"inference"
)
copy
(
inference_lib DEPS
${
inference_deps
}
copy
(
inference_lib DEPS
${
inference_deps
}
SRCS
${
src_dir
}
/
${
module
}
/*.h
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/libpaddle_fluid.*
SRCS
${
src_dir
}
/
${
module
}
/*.h
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/libpaddle_fluid.*
${
src_dir
}
/
${
module
}
/api/paddle_inference_api.h
${
src_dir
}
/
${
module
}
/api/demo_ci
${
src_dir
}
/
${
module
}
/api/paddle_inference_api.h
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/api/paddle_inference_pass.h
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/api/paddle_inference_pass.h
DSTS
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
DSTS
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
${
dst_dir
}
/
${
module
}
)
)
set
(
module
"platform"
)
set
(
module
"platform"
)
...
@@ -188,18 +188,38 @@ copy(cmake_cache
...
@@ -188,18 +188,38 @@ copy(cmake_cache
# This command generates a complete fluid library for both train and inference
# This command generates a complete fluid library for both train and inference
add_custom_target
(
fluid_lib_dist DEPENDS
${
fluid_lib_dist_dep
}
)
add_custom_target
(
fluid_lib_dist DEPENDS
${
fluid_lib_dist_dep
}
)
# Following commands generate a inference-only fluid library
# third_party, version.txt and CMakeCache.txt are the same position with ${FLUID_INSTALL_DIR}
copy
(
third_party DEPS fluid_lib_dist
SRCS
${
FLUID_INSTALL_DIR
}
/third_party
${
FLUID_INSTALL_DIR
}
/CMakeCache.txt
DSTS
${
FLUID_INFERENCE_INSTALL_DIR
}
${
FLUID_INFERENCE_INSTALL_DIR
}
)
# only need libpaddle_fluid.so/a and paddle_inference_api.h for inference-only library
copy
(
inference_api_lib DEPS fluid_lib_dist
SRCS
${
FLUID_INSTALL_DIR
}
/paddle/fluid/inference/libpaddle_fluid.*
${
FLUID_INSTALL_DIR
}
/paddle/fluid/inference/paddle_inference_api.h
DSTS
${
FLUID_INFERENCE_INSTALL_DIR
}
/paddle/lib
${
FLUID_INFERENCE_INSTALL_DIR
}
/paddle/include
)
add_custom_target
(
inference_lib_dist DEPENDS third_party inference_api_lib
)
# paddle fluid version
# paddle fluid version
execute_process
(
function
(
version version_file
)
COMMAND
${
GIT_EXECUTABLE
}
log --pretty=format:%H -1
execute_process
(
WORKING_DIRECTORY
${
PADDLE_SOURCE_DIR
}
COMMAND
${
GIT_EXECUTABLE
}
log --pretty=format:%H -1
OUTPUT_VARIABLE PADDLE_GIT_COMMIT
)
WORKING_DIRECTORY
${
PADDLE_SOURCE_DIR
}
set
(
version_file
${
FLUID_INSTALL_DIR
}
/version.txt
)
OUTPUT_VARIABLE PADDLE_GIT_COMMIT
)
file
(
WRITE
${
version_file
}
file
(
WRITE
${
version_file
}
"GIT COMMIT ID:
${
PADDLE_GIT_COMMIT
}
\n
"
"GIT COMMIT ID:
${
PADDLE_GIT_COMMIT
}
\n
"
"WITH_MKL:
${
WITH_MKL
}
\n
"
"WITH_MKL:
${
WITH_MKL
}
\n
"
"WITH_GPU:
${
WITH_GPU
}
\n
"
)
"WITH_MKLDNN:
${
WITH_MKLDNN
}
\n
"
if
(
WITH_GPU
)
"WITH_GPU:
${
WITH_GPU
}
\n
"
)
file
(
APPEND
${
version_file
}
if
(
WITH_GPU
)
"CUDA version:
${
CUDA_VERSION
}
\n
"
file
(
APPEND
${
version_file
}
"CUDNN version: v
${
CUDNN_MAJOR_VERSION
}
\n
"
)
"CUDA version:
${
CUDA_VERSION
}
\n
"
endif
()
"CUDNN version: v
${
CUDNN_MAJOR_VERSION
}
\n
"
)
endif
()
endfunction
()
version
(
${
FLUID_INSTALL_DIR
}
/version.txt
)
version
(
${
FLUID_INFERENCE_INSTALL_DIR
}
/version.txt
)
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录