Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle
提交
afe94903
P
Paddle
项目概览
PaddlePaddle
/
Paddle
大约 1 年 前同步成功
通知
2298
Star
20931
Fork
5422
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1423
列表
看板
标记
里程碑
合并请求
543
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1,423
Issue
1,423
列表
看板
标记
里程碑
合并请求
543
合并请求
543
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
afe94903
编写于
9月 22, 2020
作者:
W
Wilber
提交者:
GitHub
9月 22, 2020
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Rename fluid_inference to paddle_inference. (#27422)
上级
81823370
变更
10
显示空白变更内容
内联
并排
Showing
10 changed file
with
50 addition
and
50 deletion
+50
-50
cmake/inference_lib.cmake
cmake/inference_lib.cmake
+27
-27
go/README_cn.md
go/README_cn.md
+1
-1
paddle/fluid/inference/api/demo_ci/run.sh
paddle/fluid/inference/api/demo_ci/run.sh
+1
-1
paddle/fluid/inference/api/demo_ci/run_windows_demo.bat
paddle/fluid/inference/api/demo_ci/run_windows_demo.bat
+1
-1
paddle/fluid/train/demo/README.md
paddle/fluid/train/demo/README.md
+2
-2
paddle/fluid/train/demo/run.sh
paddle/fluid/train/demo/run.sh
+2
-2
paddle/fluid/train/imdb_demo/README.md
paddle/fluid/train/imdb_demo/README.md
+2
-2
paddle/scripts/paddle_build.bat
paddle/scripts/paddle_build.bat
+3
-3
paddle/scripts/paddle_build.sh
paddle/scripts/paddle_build.sh
+9
-9
paddle/scripts/windows_build/build.bat
paddle/scripts/windows_build/build.bat
+2
-2
未找到文件。
cmake/inference_lib.cmake
浏览文件 @
afe94903
...
@@ -13,11 +13,11 @@
...
@@ -13,11 +13,11 @@
# limitations under the License.
# limitations under the License.
# make package for paddle fluid shared and static library
# make package for paddle fluid shared and static library
set
(
FLUID_INSTALL_DIR
"
${
CMAKE_BINARY_DIR
}
/fluid
_install_dir"
CACHE STRING
set
(
PADDLE_INSTALL_DIR
"
${
CMAKE_BINARY_DIR
}
/paddle
_install_dir"
CACHE STRING
"A path setting
fluid
shared and static libraries"
)
"A path setting
paddle
shared and static libraries"
)
set
(
FLUID_INFERENCE_INSTALL_DIR
"
${
CMAKE_BINARY_DIR
}
/fluid
_inference_install_dir"
CACHE STRING
set
(
PADDLE_INFERENCE_INSTALL_DIR
"
${
CMAKE_BINARY_DIR
}
/paddle
_inference_install_dir"
CACHE STRING
"A path setting
fluid
inference shared and static libraries"
)
"A path setting
paddle
inference shared and static libraries"
)
# TODO(zhaolong)
# TODO(zhaolong)
# At present, the size of static lib in Windows exceeds the system limit,
# At present, the size of static lib in Windows exceeds the system limit,
...
@@ -142,14 +142,14 @@ set(inference_lib_deps third_party paddle_fluid paddle_fluid_c paddle_fluid_shar
...
@@ -142,14 +142,14 @@ set(inference_lib_deps third_party paddle_fluid paddle_fluid_c paddle_fluid_shar
add_custom_target
(
inference_lib_dist DEPENDS
${
inference_lib_deps
}
)
add_custom_target
(
inference_lib_dist DEPENDS
${
inference_lib_deps
}
)
set
(
dst_dir
"
${
FLUID
_INFERENCE_INSTALL_DIR
}
/third_party/threadpool"
)
set
(
dst_dir
"
${
PADDLE
_INFERENCE_INSTALL_DIR
}
/third_party/threadpool"
)
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
THREADPOOL_INCLUDE_DIR
}
/ThreadPool.h
SRCS
${
THREADPOOL_INCLUDE_DIR
}
/ThreadPool.h
DSTS
${
dst_dir
}
)
DSTS
${
dst_dir
}
)
# Only GPU need cudaErrorMessage.pb
# Only GPU need cudaErrorMessage.pb
IF
(
WITH_GPU
)
IF
(
WITH_GPU
)
set
(
dst_dir
"
${
FLUID
_INFERENCE_INSTALL_DIR
}
/third_party/cudaerror/data"
)
set
(
dst_dir
"
${
PADDLE
_INFERENCE_INSTALL_DIR
}
/third_party/cudaerror/data"
)
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
cudaerror_INCLUDE_DIR
}
SRCS
${
cudaerror_INCLUDE_DIR
}
DSTS
${
dst_dir
}
)
DSTS
${
dst_dir
}
)
...
@@ -158,9 +158,9 @@ ENDIF()
...
@@ -158,9 +158,9 @@ ENDIF()
# CMakeCache Info
# CMakeCache Info
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
CMAKE_CURRENT_BINARY_DIR
}
/CMakeCache.txt
SRCS
${
CMAKE_CURRENT_BINARY_DIR
}
/CMakeCache.txt
DSTS
${
FLUID
_INFERENCE_INSTALL_DIR
}
)
DSTS
${
PADDLE
_INFERENCE_INSTALL_DIR
}
)
copy_part_of_thrid_party
(
inference_lib_dist
${
FLUID
_INFERENCE_INSTALL_DIR
}
)
copy_part_of_thrid_party
(
inference_lib_dist
${
PADDLE
_INFERENCE_INSTALL_DIR
}
)
set
(
src_dir
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid"
)
set
(
src_dir
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid"
)
if
(
WIN32
)
if
(
WIN32
)
...
@@ -177,39 +177,39 @@ endif(WIN32)
...
@@ -177,39 +177,39 @@ endif(WIN32)
if
(
WIN32 AND NOT WITH_STATIC_LIB
)
if
(
WIN32 AND NOT WITH_STATIC_LIB
)
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
src_dir
}
/inference/api/paddle_*.h
${
paddle_fluid_lib
}
SRCS
${
src_dir
}
/inference/api/paddle_*.h
${
paddle_fluid_lib
}
DSTS
${
FLUID_INFERENCE_INSTALL_DIR
}
/paddle/include
${
FLUID
_INFERENCE_INSTALL_DIR
}
/paddle/lib
DSTS
${
PADDLE_INFERENCE_INSTALL_DIR
}
/paddle/include
${
PADDLE
_INFERENCE_INSTALL_DIR
}
/paddle/lib
${
FLUID
_INFERENCE_INSTALL_DIR
}
/paddle/lib
)
${
PADDLE
_INFERENCE_INSTALL_DIR
}
/paddle/lib
)
else
()
else
()
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
src_dir
}
/inference/api/paddle_*.h
${
paddle_fluid_lib
}
SRCS
${
src_dir
}
/inference/api/paddle_*.h
${
paddle_fluid_lib
}
DSTS
${
FLUID_INFERENCE_INSTALL_DIR
}
/paddle/include
${
FLUID
_INFERENCE_INSTALL_DIR
}
/paddle/lib
)
DSTS
${
PADDLE_INFERENCE_INSTALL_DIR
}
/paddle/include
${
PADDLE
_INFERENCE_INSTALL_DIR
}
/paddle/lib
)
endif
()
endif
()
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
CMAKE_BINARY_DIR
}
/paddle/fluid/framework/framework.pb.h
SRCS
${
CMAKE_BINARY_DIR
}
/paddle/fluid/framework/framework.pb.h
DSTS
${
FLUID
_INFERENCE_INSTALL_DIR
}
/paddle/include/internal
)
DSTS
${
PADDLE
_INFERENCE_INSTALL_DIR
}
/paddle/include/internal
)
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/framework/io/crypto/cipher.h
SRCS
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/framework/io/crypto/cipher.h
DSTS
${
FLUID
_INFERENCE_INSTALL_DIR
}
/paddle/include/crypto/
)
DSTS
${
PADDLE
_INFERENCE_INSTALL_DIR
}
/paddle/include/crypto/
)
include_directories
(
${
CMAKE_BINARY_DIR
}
/../paddle/fluid/framework/io
)
include_directories
(
${
CMAKE_BINARY_DIR
}
/../paddle/fluid/framework/io
)
# CAPI inference library for only inference
# CAPI inference library for only inference
set
(
FLUID_INFERENCE_C_INSTALL_DIR
"
${
CMAKE_BINARY_DIR
}
/fluid
_inference_c_install_dir"
CACHE STRING
set
(
PADDLE_INFERENCE_C_INSTALL_DIR
"
${
CMAKE_BINARY_DIR
}
/paddle
_inference_c_install_dir"
CACHE STRING
"A path setting CAPI
fluid
inference shared"
)
"A path setting CAPI
paddle
inference shared"
)
copy_part_of_thrid_party
(
inference_lib_dist
${
FLUID
_INFERENCE_C_INSTALL_DIR
}
)
copy_part_of_thrid_party
(
inference_lib_dist
${
PADDLE
_INFERENCE_C_INSTALL_DIR
}
)
set
(
src_dir
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid"
)
set
(
src_dir
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid"
)
set
(
paddle_fluid_c_lib
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/capi/libpaddle_fluid_c.*
)
set
(
paddle_fluid_c_lib
${
PADDLE_BINARY_DIR
}
/paddle/fluid/inference/capi/libpaddle_fluid_c.*
)
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
src_dir
}
/inference/capi/paddle_c_api.h
${
paddle_fluid_c_lib
}
SRCS
${
src_dir
}
/inference/capi/paddle_c_api.h
${
paddle_fluid_c_lib
}
DSTS
${
FLUID_INFERENCE_C_INSTALL_DIR
}
/paddle/include
${
FLUID
_INFERENCE_C_INSTALL_DIR
}
/paddle/lib
)
DSTS
${
PADDLE_INFERENCE_C_INSTALL_DIR
}
/paddle/include
${
PADDLE
_INFERENCE_C_INSTALL_DIR
}
/paddle/lib
)
# fluid library for both train and inference
# fluid library for both train and inference
set
(
fluid_lib_deps inference_lib_dist
)
set
(
fluid_lib_deps inference_lib_dist
)
add_custom_target
(
fluid_lib_dist ALL DEPENDS
${
fluid_lib_deps
}
)
add_custom_target
(
fluid_lib_dist ALL DEPENDS
${
fluid_lib_deps
}
)
set
(
dst_dir
"
${
FLUID
_INSTALL_DIR
}
/paddle/fluid"
)
set
(
dst_dir
"
${
PADDLE
_INSTALL_DIR
}
/paddle/fluid"
)
set
(
module
"inference"
)
set
(
module
"inference"
)
if
(
WIN32 AND NOT WITH_STATIC_LIB
)
if
(
WIN32 AND NOT WITH_STATIC_LIB
)
copy
(
fluid_lib_dist
copy
(
fluid_lib_dist
...
@@ -273,22 +273,22 @@ copy(fluid_lib_dist
...
@@ -273,22 +273,22 @@ copy(fluid_lib_dist
DSTS
${
dst_dir
}
/
${
module
}
DSTS
${
dst_dir
}
/
${
module
}
)
)
set
(
dst_dir
"
${
FLUID
_INSTALL_DIR
}
/third_party/eigen3"
)
set
(
dst_dir
"
${
PADDLE
_INSTALL_DIR
}
/third_party/eigen3"
)
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
EIGEN_INCLUDE_DIR
}
/Eigen/Core
${
EIGEN_INCLUDE_DIR
}
/Eigen/src
${
EIGEN_INCLUDE_DIR
}
/unsupported/Eigen
SRCS
${
EIGEN_INCLUDE_DIR
}
/Eigen/Core
${
EIGEN_INCLUDE_DIR
}
/Eigen/src
${
EIGEN_INCLUDE_DIR
}
/unsupported/Eigen
DSTS
${
dst_dir
}
/Eigen
${
dst_dir
}
/Eigen
${
dst_dir
}
/unsupported
)
DSTS
${
dst_dir
}
/Eigen
${
dst_dir
}
/Eigen
${
dst_dir
}
/unsupported
)
set
(
dst_dir
"
${
FLUID
_INSTALL_DIR
}
/third_party/boost"
)
set
(
dst_dir
"
${
PADDLE
_INSTALL_DIR
}
/third_party/boost"
)
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
BOOST_INCLUDE_DIR
}
/boost
SRCS
${
BOOST_INCLUDE_DIR
}
/boost
DSTS
${
dst_dir
}
)
DSTS
${
dst_dir
}
)
set
(
dst_dir
"
${
FLUID
_INSTALL_DIR
}
/third_party/dlpack"
)
set
(
dst_dir
"
${
PADDLE
_INSTALL_DIR
}
/third_party/dlpack"
)
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
DLPACK_INCLUDE_DIR
}
/dlpack
SRCS
${
DLPACK_INCLUDE_DIR
}
/dlpack
DSTS
${
dst_dir
}
)
DSTS
${
dst_dir
}
)
set
(
dst_dir
"
${
FLUID
_INSTALL_DIR
}
/third_party/install/zlib"
)
set
(
dst_dir
"
${
PADDLE
_INSTALL_DIR
}
/third_party/install/zlib"
)
copy
(
inference_lib_dist
copy
(
inference_lib_dist
SRCS
${
ZLIB_INCLUDE_DIR
}
${
ZLIB_LIBRARIES
}
SRCS
${
ZLIB_INCLUDE_DIR
}
${
ZLIB_LIBRARIES
}
DSTS
${
dst_dir
}
${
dst_dir
}
/lib
)
DSTS
${
dst_dir
}
${
dst_dir
}
/lib
)
...
@@ -296,8 +296,8 @@ copy(inference_lib_dist
...
@@ -296,8 +296,8 @@ copy(inference_lib_dist
# CMakeCache Info
# CMakeCache Info
copy
(
fluid_lib_dist
copy
(
fluid_lib_dist
SRCS
${
FLUID
_INFERENCE_INSTALL_DIR
}
/third_party
${
CMAKE_CURRENT_BINARY_DIR
}
/CMakeCache.txt
SRCS
${
PADDLE
_INFERENCE_INSTALL_DIR
}
/third_party
${
CMAKE_CURRENT_BINARY_DIR
}
/CMakeCache.txt
DSTS
${
FLUID_INSTALL_DIR
}
${
FLUID
_INSTALL_DIR
}
DSTS
${
PADDLE_INSTALL_DIR
}
${
PADDLE
_INSTALL_DIR
}
)
)
# paddle fluid version
# paddle fluid version
...
@@ -323,6 +323,6 @@ function(version version_file)
...
@@ -323,6 +323,6 @@ function(version version_file)
endif
()
endif
()
endfunction
()
endfunction
()
version
(
${
FLUID
_INSTALL_DIR
}
/version.txt
)
version
(
${
PADDLE
_INSTALL_DIR
}
/version.txt
)
version
(
${
FLUID
_INFERENCE_INSTALL_DIR
}
/version.txt
)
version
(
${
PADDLE
_INFERENCE_INSTALL_DIR
}
/version.txt
)
version
(
${
FLUID
_INFERENCE_C_INSTALL_DIR
}
/version.txt
)
version
(
${
PADDLE
_INFERENCE_C_INSTALL_DIR
}
/version.txt
)
go/README_cn.md
浏览文件 @
afe94903
# Paddle 预测golang API
# Paddle 预测golang API
## 安装
## 安装
首先cmake编译时打开
`-DON_INFER=ON`
,在编译目录下得到
``
fluid
_inference_c_install_dir``
,将该目录移动到当前目录中并重命名为
`paddle_c`
首先cmake编译时打开
`-DON_INFER=ON`
,在编译目录下得到
``
paddle
_inference_c_install_dir``
,将该目录移动到当前目录中并重命名为
`paddle_c`
## 在Go中使用Paddle预测
## 在Go中使用Paddle预测
首先创建预测配置
首先创建预测配置
...
...
paddle/fluid/inference/api/demo_ci/run.sh
浏览文件 @
afe94903
...
@@ -7,7 +7,7 @@ DATA_DIR=$4 # dataset
...
@@ -7,7 +7,7 @@ DATA_DIR=$4 # dataset
TENSORRT_INCLUDE_DIR
=
$5
# TensorRT header file dir, default to /usr/local/TensorRT/include
TENSORRT_INCLUDE_DIR
=
$5
# TensorRT header file dir, default to /usr/local/TensorRT/include
TENSORRT_LIB_DIR
=
$6
# TensorRT lib file dir, default to /usr/local/TensorRT/lib
TENSORRT_LIB_DIR
=
$6
# TensorRT lib file dir, default to /usr/local/TensorRT/lib
inference_install_dir
=
${
PADDLE_ROOT
}
/build/
fluid
_inference_install_dir
inference_install_dir
=
${
PADDLE_ROOT
}
/build/
paddle
_inference_install_dir
cd
`
dirname
$0
`
cd
`
dirname
$0
`
current_dir
=
`
pwd
`
current_dir
=
`
pwd
`
...
...
paddle/fluid/inference/api/demo_ci/run_windows_demo.bat
浏览文件 @
afe94903
...
@@ -21,7 +21,7 @@ if /i "%use_mkl%"=="N" (
...
@@ -21,7 +21,7 @@ if /i "%use_mkl%"=="N" (
)
)
:set
_paddle_infernece_lib
:set
_paddle_infernece_lib
SET
/P
paddle_infernece_lib
=
"Please input the path of paddle inference library, such as D:\
fluid
_inference_install_dir =======>"
SET
/P
paddle_infernece_lib
=
"Please input the path of paddle inference library, such as D:\
paddle
_inference_install_dir =======>"
set
tmp_var
=
!paddle_infernece_lib!
set
tmp_var
=
!paddle_infernece_lib!
call
:remove
_space
call
:remove
_space
set
paddle_infernece_lib
=
!tmp_var!
set
paddle_infernece_lib
=
!tmp_var!
...
...
paddle/fluid/train/demo/README.md
浏览文件 @
afe94903
...
@@ -7,7 +7,7 @@
...
@@ -7,7 +7,7 @@
# WITH_MKLDNN=ON|OFF
# WITH_MKLDNN=ON|OFF
PADDLE_LIB=/paddle/lib/dir
PADDLE_LIB=/paddle/lib/dir
cmake .. -D
FLUID
_INSTALL_DIR=$PADDLE_LIB \
cmake .. -D
PADDLE
_INSTALL_DIR=$PADDLE_LIB \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_BUILD_TYPE=Release \
-DWITH_GPU=OFF \
-DWITH_GPU=OFF \
-DWITH_STYLE_CHECK=OFF \
-DWITH_STYLE_CHECK=OFF \
...
@@ -41,7 +41,7 @@ cd build
...
@@ -41,7 +41,7 @@ cd build
# WITH_MKLDNN=ON|OFF
# WITH_MKLDNN=ON|OFF
PADDLE_LIB=/paddle/lib/dir
PADDLE_LIB=/paddle/lib/dir
# PADDLE_LIB is the same with
FLUID
_INSTALL_DIR when building the lib
# PADDLE_LIB is the same with
PADDLE
_INSTALL_DIR when building the lib
cmake .. -DPADDLE_LIB=$PADDLE_LIB \
cmake .. -DPADDLE_LIB=$PADDLE_LIB \
-DWITH_MKLDNN=OFF \
-DWITH_MKLDNN=OFF \
-DWITH_MKL=OFF
-DWITH_MKL=OFF
...
...
paddle/fluid/train/demo/run.sh
浏览文件 @
afe94903
...
@@ -14,12 +14,12 @@ function download() {
...
@@ -14,12 +14,12 @@ function download() {
download
download
# build demo trainer
# build demo trainer
fluid_install_dir
=
${
PADDLE_ROOT
}
/build/fluid
_install_dir
paddle_install_dir
=
${
PADDLE_ROOT
}
/build/paddle
_install_dir
mkdir
-p
build
mkdir
-p
build
cd
build
cd
build
rm
-rf
*
rm
-rf
*
cmake ..
-DPADDLE_LIB
=
$
fluid
_install_dir
\
cmake ..
-DPADDLE_LIB
=
$
paddle
_install_dir
\
-DWITH_MKLDNN
=
$TURN_ON_MKL
\
-DWITH_MKLDNN
=
$TURN_ON_MKL
\
-DWITH_MKL
=
$TURN_ON_MKL
-DWITH_MKL
=
$TURN_ON_MKL
make
make
...
...
paddle/fluid/train/imdb_demo/README.md
浏览文件 @
afe94903
...
@@ -11,7 +11,7 @@ PADDLE_ROOT=./Paddle
...
@@ -11,7 +11,7 @@ PADDLE_ROOT=./Paddle
cd
Paddle
cd
Paddle
mkdir
build
mkdir
build
cd
build
cd
build
cmake
-D
FLUID
_INFERENCE_INSTALL_DIR
=
$PADDLE_ROOT
\
cmake
-D
PADDLE
_INFERENCE_INSTALL_DIR
=
$PADDLE_ROOT
\
-DCMAKE_BUILD_TYPE
=
Release
\
-DCMAKE_BUILD_TYPE
=
Release
\
-DWITH_PYTHON
=
OFF
\
-DWITH_PYTHON
=
OFF
\
-DWITH_MKL
=
OFF
\
-DWITH_MKL
=
OFF
\
...
@@ -40,7 +40,7 @@ see: [IMDB Dataset of 50K Movie Reviews | Kaggle](https://www.kaggle.com/lakshmi
...
@@ -40,7 +40,7 @@ see: [IMDB Dataset of 50K Movie Reviews | Kaggle](https://www.kaggle.com/lakshmi
mkdir
build
mkdir
build
cd
build
cd
build
rm
-rf
*
rm
-rf
*
PADDLE_LIB
=
path/to/Paddle/build/
fluid
_install_dir
PADDLE_LIB
=
path/to/Paddle/build/
paddle
_install_dir
cmake ..
-DPADDLE_LIB
=
$PADDLE_LIB
-DWITH_MKLDNN
=
OFF
-DWITH_MKL
=
OFF
cmake ..
-DPADDLE_LIB
=
$PADDLE_LIB
-DWITH_MKLDNN
=
OFF
-DWITH_MKL
=
OFF
make
make
```
```
...
...
paddle/scripts/paddle_build.bat
浏览文件 @
afe94903
...
@@ -213,10 +213,10 @@ echo ========================================
...
@@ -213,10 +213,10 @@ echo ========================================
for
/F
%%
#
in
(
'wmic os get localdatetime
^|
findstr 20'
)
do
set
end
=
%%
#
for
/F
%%
#
in
(
'wmic os get localdatetime
^|
findstr 20'
)
do
set
end
=
%%
#
set
end
=
%end
:
~
4
,
10
%
set
end
=
%end
:
~
4
,
10
%
call
:timestamp
"
%start%
"
"
%end%
"
"Build"
call
:timestamp
"
%start%
"
"
%end%
"
"Build"
tree
/F
%cd%
\
fluid
_inference_install_dir\paddle
tree
/F
%cd%
\
paddle
_inference_install_dir\paddle
%cache_dir%
\tools\busybox64.exe
du
-h -d
0
%cd%
\
fluid
_inference_install_dir\paddle\lib
>
lib_size
.txt
%cache_dir%
\tools\busybox64.exe
du
-h -d
0
%cd%
\
paddle
_inference_install_dir\paddle\lib
>
lib_size
.txt
set
/p
libsize
=<
lib_size
.txt
set
/p
libsize
=<
lib_size
.txt
for
/F
%%i
in
(
"
%libsize%
"
)
do
echo
"Windows
FLuid
_Inference Size:
%%i
"
for
/F
%%i
in
(
"
%libsize%
"
)
do
echo
"Windows
Paddle
_Inference Size:
%%i
"
%cache_dir%
\tools\busybox64.exe
du
-h -d
0
%cd%
\python\dist
>
whl_size
.txt
%cache_dir%
\tools\busybox64.exe
du
-h -d
0
%cd%
\python\dist
>
whl_size
.txt
set
/p
whlsize
=<
whl_size
.txt
set
/p
whlsize
=<
whl_size
.txt
for
/F
%%i
in
(
"
%whlsize%
"
)
do
echo
"Windows PR whl Size:
%%i
"
for
/F
%%i
in
(
"
%whlsize%
"
)
do
echo
"Windows PR whl Size:
%%i
"
...
...
paddle/scripts/paddle_build.sh
浏览文件 @
afe94903
...
@@ -362,12 +362,12 @@ function build_size() {
...
@@ -362,12 +362,12 @@ function build_size() {
Calculate /paddle/build size and PR whl size
Calculate /paddle/build size and PR whl size
============================================
============================================
EOF
EOF
if
[
"
$1
"
==
"
fluid
_inference"
]
;
then
if
[
"
$1
"
==
"
paddle
_inference"
]
;
then
cd
${
PADDLE_ROOT
}
/build
cd
${
PADDLE_ROOT
}
/build
cp
-r
fluid_inference_install_dir fluid
_inference
cp
-r
paddle_inference_install_dir paddle
_inference
tar
-czf
fluid_inference.tgz fluid
_inference
tar
-czf
paddle_inference.tgz paddle
_inference
buildSize
=
$(
du
-h
--max-depth
=
0
${
PADDLE_ROOT
}
/build/
fluid
_inference.tgz |awk
'{print $1}'
)
buildSize
=
$(
du
-h
--max-depth
=
0
${
PADDLE_ROOT
}
/build/
paddle
_inference.tgz |awk
'{print $1}'
)
echo
"
FLuid
_Inference Size:
$buildSize
"
echo
"
Paddle
_Inference Size:
$buildSize
"
else
else
SYSTEM
=
`
uname
-s
`
SYSTEM
=
`
uname
-s
`
if
[
"
$SYSTEM
"
==
"Darwin"
]
;
then
if
[
"
$SYSTEM
"
==
"Darwin"
]
;
then
...
@@ -1446,7 +1446,7 @@ EOF
...
@@ -1446,7 +1446,7 @@ EOF
fi
fi
endTime_s
=
`
date
+%s
`
endTime_s
=
`
date
+%s
`
echo
"Build Time:
$[
$endTime_s
-
$startTime_s
]s"
echo
"Build Time:
$[
$endTime_s
-
$startTime_s
]s"
build_size
"
fluid
_inference"
build_size
"
paddle
_inference"
}
}
function
tar_fluid_lib
()
{
function
tar_fluid_lib
()
{
...
@@ -1456,10 +1456,10 @@ function tar_fluid_lib() {
...
@@ -1456,10 +1456,10 @@ function tar_fluid_lib() {
========================================
========================================
EOF
EOF
cd
${
PADDLE_ROOT
}
/build
cd
${
PADDLE_ROOT
}
/build
cp
-r
fluid
_install_dir fluid
cp
-r
paddle
_install_dir fluid
tar
-czf
fluid.tgz fluid
tar
-czf
fluid.tgz fluid
cp
-r
fluid_inference_install_dir fluid
_inference
cp
-r
paddle_inference_install_dir paddle
_inference
tar
-czf
fluid_inference.tgz fluid
_inference
tar
-czf
paddle_inference.tgz paddle
_inference
}
}
function
test_fluid_lib
()
{
function
test_fluid_lib
()
{
...
...
paddle/scripts/windows_build/build.bat
浏览文件 @
afe94903
...
@@ -118,8 +118,8 @@ call:Build
...
@@ -118,8 +118,8 @@ call:Build
echo
PACKAGE
INFERENCE
LIBRARY
echo
PACKAGE
INFERENCE
LIBRARY
mkdir
inference_dist
mkdir
inference_dist
%PYTHON_DIR%
\python.exe
-c
"import shutil;shutil.make_archive('inference_dist/
fluid_inference_install_dir', 'zip', root_dir='fluid
_inference_install_dir')"
%PYTHON_DIR%
\python.exe
-c
"import shutil;shutil.make_archive('inference_dist/
paddle_inference_install_dir', 'zip', root_dir='paddle
_inference_install_dir')"
%PYTHON_DIR%
\python.exe
-c
"import shutil;shutil.make_archive('inference_dist/
fluid_install_dir', 'zip', root_dir='fluid
_install_dir')"
%PYTHON_DIR%
\python.exe
-c
"import shutil;shutil.make_archive('inference_dist/
paddle_install_dir', 'zip', root_dir='paddle
_install_dir')"
echo
BUILD
INFERENCE
LIBRARY
COMPLETE
echo
BUILD
INFERENCE
LIBRARY
COMPLETE
goto
:END
goto
:END
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录