Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
f8874b3c
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
f8874b3c
编写于
10月 16, 2018
作者:
T
Tao Luo
提交者:
GitHub
10月 16, 2018
浏览文件
操作
浏览文件
下载
差异文件
Merge pull request #13884 from luotao1/rename_inference_lib_dist
rename inference_lib_dist to fluid_lib_dist
上级
a9d79225
3d976f3f
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
16 addition
and
15 deletion
+16
-15
cmake/inference_lib.cmake
cmake/inference_lib.cmake
+3
-2
paddle/fluid/train/demo/README.md
paddle/fluid/train/demo/README.md
+1
-1
paddle/scripts/paddle_build.sh
paddle/scripts/paddle_build.sh
+12
-12
未找到文件。
cmake/inference_lib.cmake
浏览文件 @
f8874b3c
...
@@ -18,7 +18,7 @@ function(copy TARGET)
...
@@ -18,7 +18,7 @@ function(copy TARGET)
set
(
oneValueArgs
""
)
set
(
oneValueArgs
""
)
set
(
multiValueArgs SRCS DSTS DEPS
)
set
(
multiValueArgs SRCS DSTS DEPS
)
cmake_parse_arguments
(
copy_lib
"
${
options
}
"
"
${
oneValueArgs
}
"
"
${
multiValueArgs
}
"
${
ARGN
}
)
cmake_parse_arguments
(
copy_lib
"
${
options
}
"
"
${
oneValueArgs
}
"
"
${
multiValueArgs
}
"
${
ARGN
}
)
set
(
inference_lib_dist_dep
${
TARGET
}
${
inference
_lib_dist_dep
}
PARENT_SCOPE
)
set
(
fluid_lib_dist_dep
${
TARGET
}
${
fluid
_lib_dist_dep
}
PARENT_SCOPE
)
list
(
LENGTH copy_lib_SRCS copy_lib_SRCS_len
)
list
(
LENGTH copy_lib_SRCS copy_lib_SRCS_len
)
list
(
LENGTH copy_lib_DSTS copy_lib_DSTS_len
)
list
(
LENGTH copy_lib_DSTS copy_lib_DSTS_len
)
...
@@ -185,7 +185,8 @@ copy(cmake_cache
...
@@ -185,7 +185,8 @@ copy(cmake_cache
SRCS
${
CMAKE_CURRENT_BINARY_DIR
}
/CMakeCache.txt
SRCS
${
CMAKE_CURRENT_BINARY_DIR
}
/CMakeCache.txt
DSTS
${
FLUID_INSTALL_DIR
}
)
DSTS
${
FLUID_INSTALL_DIR
}
)
add_custom_target
(
inference_lib_dist DEPENDS
${
inference_lib_dist_dep
}
)
# This command generates a complete fluid library for both train and inference
add_custom_target
(
fluid_lib_dist DEPENDS
${
fluid_lib_dist_dep
}
)
# paddle fluid version
# paddle fluid version
execute_process
(
execute_process
(
...
...
paddle/fluid/train/demo/README.md
浏览文件 @
f8874b3c
...
@@ -15,7 +15,7 @@ cmake .. -DFLUID_INSTALL_DIR=$PADDLE_LIB \
...
@@ -15,7 +15,7 @@ cmake .. -DFLUID_INSTALL_DIR=$PADDLE_LIB \
-DWITH_MKL=OFF \
-DWITH_MKL=OFF \
-DWITH_MKLDNN=OFF
-DWITH_MKLDNN=OFF
make -j8
make -j8
make -j8
inference
_lib_dist
make -j8
fluid
_lib_dist
```
```
### step 2. generate program desc
### step 2. generate program desc
...
...
paddle/scripts/paddle_build.sh
浏览文件 @
f8874b3c
...
@@ -648,25 +648,25 @@ function gen_capi_package() {
...
@@ -648,25 +648,25 @@ function gen_capi_package() {
fi
fi
}
}
function
gen_fluid_
inference_
lib
()
{
function
gen_fluid_lib
()
{
mkdir
-p
${
PADDLE_ROOT
}
/build
mkdir
-p
${
PADDLE_ROOT
}
/build
cd
${
PADDLE_ROOT
}
/build
cd
${
PADDLE_ROOT
}
/build
if
[[
${
WITH_C_API
:-
OFF
}
==
"OFF"
&&
${
WITH_INFERENCE
:-
ON
}
==
"ON"
]]
;
then
if
[[
${
WITH_C_API
:-
OFF
}
==
"OFF"
&&
${
WITH_INFERENCE
:-
ON
}
==
"ON"
]]
;
then
cat
<<
EOF
cat
<<
EOF
========================================
========================================
Generating fluid
inference library
...
Generating fluid
library for train and inference
...
========================================
========================================
EOF
EOF
cmake ..
-DWITH_DISTRIBUTE
=
OFF
cmake ..
-DWITH_DISTRIBUTE
=
OFF
make
-j
`
nproc
`
inference
_lib_dist
make
-j
`
nproc
`
fluid
_lib_dist
fi
fi
}
}
function
tar_fluid_
inference_
lib
()
{
function
tar_fluid_lib
()
{
if
[[
${
WITH_C_API
:-
OFF
}
==
"OFF"
&&
${
WITH_INFERENCE
:-
ON
}
==
"ON"
]]
;
then
if
[[
${
WITH_C_API
:-
OFF
}
==
"OFF"
&&
${
WITH_INFERENCE
:-
ON
}
==
"ON"
]]
;
then
cat
<<
EOF
cat
<<
EOF
========================================
========================================
Taring fluid
inference library
...
Taring fluid
library for train and inference
...
========================================
========================================
EOF
EOF
cd
${
PADDLE_ROOT
}
/build
cd
${
PADDLE_ROOT
}
/build
...
@@ -675,11 +675,11 @@ EOF
...
@@ -675,11 +675,11 @@ EOF
fi
fi
}
}
function
test_fluid_
inference_
lib
()
{
function
test_fluid_lib
()
{
if
[[
${
WITH_C_API
:-
OFF
}
==
"OFF"
&&
${
WITH_INFERENCE
:-
ON
}
==
"ON"
]]
;
then
if
[[
${
WITH_C_API
:-
OFF
}
==
"OFF"
&&
${
WITH_INFERENCE
:-
ON
}
==
"ON"
]]
;
then
cat
<<
EOF
cat
<<
EOF
========================================
========================================
Testing fluid
inference library
...
Testing fluid
library for inference
...
========================================
========================================
EOF
EOF
cd
${
PADDLE_ROOT
}
/paddle/fluid/inference/api/demo_ci
cd
${
PADDLE_ROOT
}
/paddle/fluid/inference/api/demo_ci
...
@@ -731,9 +731,9 @@ function main() {
...
@@ -731,9 +731,9 @@ function main() {
;;
;;
fluid_inference_lib
)
fluid_inference_lib
)
cmake_gen
${
PYTHON_ABI
:-
""
}
cmake_gen
${
PYTHON_ABI
:-
""
}
gen_fluid_
inference_
lib
gen_fluid_lib
tar_fluid_
inference_
lib
tar_fluid_lib
test_fluid_
inference_
lib
test_fluid_lib
;;
;;
check_style
)
check_style
)
check_style
check_style
...
@@ -744,8 +744,8 @@ function main() {
...
@@ -744,8 +744,8 @@ function main() {
assert_api_not_changed
${
PYTHON_ABI
:-
""
}
assert_api_not_changed
${
PYTHON_ABI
:-
""
}
run_test
run_test
gen_capi_package
gen_capi_package
gen_fluid_
inference_
lib
gen_fluid_lib
test_fluid_
inference_
lib
test_fluid_lib
assert_api_spec_approvals
assert_api_spec_approvals
;;
;;
maccheck
)
maccheck
)
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录