Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
40a6b203
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
40a6b203
编写于
7月 01, 2019
作者:
S
sangoly
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add cxx demo readme & refine deployment CMakeList.txt
上级
b275e16a
变更
8
隐藏空白更改
内联
并排
Showing
8 changed file
with
164 addition
and
61 deletion
+164
-61
paddle/fluid/lite/CMakeLists.txt
paddle/fluid/lite/CMakeLists.txt
+74
-57
paddle/fluid/lite/demo/cxx/README.md
paddle/fluid/lite/demo/cxx/README.md
+43
-0
paddle/fluid/lite/demo/cxx/makefiles/mobile_full/Makefile.android.armv7
...ite/demo/cxx/makefiles/mobile_full/Makefile.android.armv7
+22
-0
paddle/fluid/lite/demo/cxx/makefiles/mobile_full/Makefile.android.armv8
...ite/demo/cxx/makefiles/mobile_full/Makefile.android.armv8
+0
-0
paddle/fluid/lite/demo/cxx/makefiles/mobile_light/Makefile.android.armv7
...te/demo/cxx/makefiles/mobile_light/Makefile.android.armv7
+22
-0
paddle/fluid/lite/demo/cxx/makefiles/mobile_light/Makefile.android.armv8
...te/demo/cxx/makefiles/mobile_light/Makefile.android.armv8
+0
-0
paddle/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc
...e/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc
+2
-3
paddle/fluid/lite/tools/build.sh
paddle/fluid/lite/tools/build.sh
+1
-1
未找到文件。
paddle/fluid/lite/CMakeLists.txt
浏览文件 @
40a6b203
...
...
@@ -222,13 +222,10 @@ add_subdirectory(api)
add_subdirectory
(
gen_code
)
add_subdirectory
(
tools
)
# Deployment required
lite_download_and_uncompress
(
${
LITE_MODEL_DIR
}
${
LITE_URL
}
"mobilenet_v1.tar.gz"
)
if
(
WITH_TESTING
)
lite_download_and_uncompress
(
${
LITE_MODEL_DIR
}
${
LITE_URL
}
"lite_naive_model.tar.gz"
)
if
(
LITE_WITH_LIGHT_WEIGHT_FRAMEWORK
)
lite_download_and_uncompress
(
${
LITE_MODEL_DIR
}
${
LITE_URL
}
"mobilenet_v1.tar.gz"
)
lite_download_and_uncompress
(
${
LITE_MODEL_DIR
}
${
LITE_URL
}
"mobilenet_v2_relu.tar.gz"
)
lite_download_and_uncompress
(
${
LITE_MODEL_DIR
}
${
LITE_URL
}
"resnet50.tar.gz"
)
lite_download_and_uncompress
(
${
LITE_MODEL_DIR
}
${
LITE_URL
}
"inception_v4_simple.tar.gz"
)
...
...
@@ -238,62 +235,82 @@ if (WITH_TESTING)
endif
()
endif
()
# for publish
set
(
INFER_LITE_PUBLISH_ROOT
"
${
CMAKE_BINARY_DIR
}
/inference_lite_lib.
${
ARM_TARGET_OS
}
.
${
ARM_TARGET_ARCH_ABI
}
"
)
message
(
STATUS
"publish inference lib to
${
INFER_LITE_PUBLISH_ROOT
}
"
)
# The final target for publish lite lib
add_custom_target
(
publish_inference_lite
)
#cc_library(inference_cxx_lib DEPS cxx_api_lite)
add_custom_target
(
publish_inference_cxx_lib
${
TARGET
}
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/lib"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/bin"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/include"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/third_party"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx"
COMMAND cp
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/api/paddle_*.h"
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/include"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/libpaddle_api_full_bundled.a"
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/lib"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/paddle/fluid/lite/api/model_optimize_tool"
"
${
INFER_LITE_PUBLISH_ROOT
}
/bin"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/paddle/fluid/lite/gen_code/paddle_code_generator"
"
${
INFER_LITE_PUBLISH_ROOT
}
/bin"
COMMAND cp -r
"
${
CMAKE_BINARY_DIR
}
/third_party/install/glog"
"
${
INFER_LITE_PUBLISH_ROOT
}
/third_party"
COMMAND cp -r
"
${
CMAKE_BINARY_DIR
}
/third_party/install/gflags"
"
${
INFER_LITE_PUBLISH_ROOT
}
/third_party"
COMMAND cp -r
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/demo/cxx/mobile_full"
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx"
COMMAND cp
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/demo/cxx/Makefile.def"
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx"
)
add_dependencies
(
publish_inference_cxx_lib model_optimize_tool
)
add_dependencies
(
publish_inference_cxx_lib paddle_code_generator
)
add_dependencies
(
publish_inference_cxx_lib bundle_full_api
)
add_dependencies
(
publish_inference_lite publish_inference_cxx_lib
)
if
(
LITE_WITH_LIGHT_WEIGHT_FRAMEWORK
)
#cc_library(inference_mobile_lib DEPS light_api_lite)
# copy cpp mobile_light demo/lib
add_custom_target
(
publish_inference_mobile_lib
${
TARGET
}
if
(
LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND LITE_WITH_ARM
)
# for publish
set
(
INFER_LITE_PUBLISH_ROOT
"
${
CMAKE_BINARY_DIR
}
/inference_lite_lib.
${
ARM_TARGET_OS
}
.
${
ARM_TARGET_ARCH_ABI
}
"
)
if
(
LITE_WITH_OPENCL
)
set
(
INFER_LITE_PUBLISH_ROOT
"
${
INFER_LITE_PUBLISH_ROOT
}
.opencl"
)
endif
(
LITE_WITH_OPENCL
)
message
(
STATUS
"publish inference lib to
${
INFER_LITE_PUBLISH_ROOT
}
"
)
# The final target for publish lite lib
add_custom_target
(
publish_inference_lite
)
# add cxx lib
add_custom_target
(
publish_inference_cxx_lib
${
TARGET
}
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/lib"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/bin"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/include"
COMMAND cp
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/api/paddle_*.h"
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/include"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/libpaddle_api_full_bundled.a"
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/lib"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/libpaddle_api_light_bundled.a"
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/lib"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/paddle/fluid/lite/api/model_optimize_tool"
"
${
INFER_LITE_PUBLISH_ROOT
}
/bin"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/paddle/fluid/lite/gen_code/paddle_code_generator"
"
${
INFER_LITE_PUBLISH_ROOT
}
/bin"
)
add_dependencies
(
publish_inference_cxx_lib model_optimize_tool
)
add_dependencies
(
publish_inference_cxx_lib paddle_code_generator
)
add_dependencies
(
publish_inference_cxx_lib bundle_full_api
)
add_dependencies
(
publish_inference_cxx_lib bundle_light_api
)
add_dependencies
(
publish_inference_lite publish_inference_cxx_lib
)
if
(
LITE_WITH_JAVA
)
# add java lib
add_custom_target
(
publish_inference_java_lib
${
TARGET
}
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/java/so"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so"
"
${
INFER_LITE_PUBLISH_ROOT
}
/java/so"
COMMAND cp -r
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/api/android/jni/src"
"
${
INFER_LITE_PUBLISH_ROOT
}
/java"
)
add_dependencies
(
publish_inference_java_lib paddle_lite_jni
)
add_dependencies
(
publish_inference_lite publish_inference_java_lib
)
endif
()
if
((
ARM_TARGET_OS STREQUAL
"android"
)
AND
(
NOT LITE_WITH_OPENCL
)
AND
((
ARM_TARGET_ARCH_ABI STREQUAL armv7
)
OR
(
ARM_TARGET_ARCH_ABI STREQUAL armv8
)))
# copy
add_custom_target
(
publish_inference_android_cxx_demos
${
TARGET
}
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/third_party"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx"
COMMAND cp -r
"
${
CMAKE_BINARY_DIR
}
/third_party/install/glog"
"
${
INFER_LITE_PUBLISH_ROOT
}
/third_party"
COMMAND cp -r
"
${
CMAKE_BINARY_DIR
}
/third_party/install/gflags"
"
${
INFER_LITE_PUBLISH_ROOT
}
/third_party"
COMMAND cp
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/demo/cxx/Makefile.def"
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx"
COMMAND cp -r
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/demo/cxx/mobile_full"
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx"
COMMAND cp
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/demo/cxx/makefiles/mobile_full/Makefile.
${
ARM_TARGET_OS
}
.
${
ARM_TARGET_ARCH_ABI
}
"
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx/mobile_full/Makefile"
COMMAND cp -r
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/demo/cxx/mobile_light"
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx"
COMMAND cp
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/demo/cxx/makefiles/mobile_light/Makefile.
${
ARM_TARGET_OS
}
.
${
ARM_TARGET_ARCH_ABI
}
"
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx/mobile_light/Makefile"
)
add_dependencies
(
publish_inference_android_cxx_demos glog gflags
)
add_dependencies
(
publish_inference_cxx_lib publish_inference_android_cxx_demos
)
if
(
LITE_WITH_JAVA
)
# copy java mobile_light demo/lib
add_custom_target
(
publish_inference_android_java_demo
${
TARGET
}
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java"
COMMAND cp -r
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/demo/java/android"
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm7"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm8"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm64-v8a"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java/android/PaddlePredictor/app/src/main/jniLibs/armeabi-v7a"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java/android/PaddlePredictor/app/src/main/jniLibs/x86"
)
add_dependencies
(
publish_inference_mobile_lib paddle_api_light bundle_light_api
)
add_dependencies
(
publish_inference_lite publish_inference_mobile_lib
)
if
(
LITE_WITH_JAVA AND LITE_WITH_ARM
)
# copy java mobile_light demo/lib
add_custom_target
(
publish_java_inference_mobile_lib
${
TARGET
}
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/java/so"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so"
"
${
INFER_LITE_PUBLISH_ROOT
}
/java/so"
COMMAND cp -r
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/api/android/jni/src"
"
${
INFER_LITE_PUBLISH_ROOT
}
/java"
COMMAND cp -r
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/demo/java/android"
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm7"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm8"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm64-v8a"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java/android/PaddlePredictor/app/src/main/jniLibs/armeabi-v7a"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/java/android/PaddlePredictor/app/src/main/jniLibs/x86"
)
add_dependencies
(
publish_java_inference_mobile_lib paddle_lite_jni
)
add_dependencies
(
publish_inference_lite publish_java_inference_mobile_lib
)
add_dependencies
(
publish_inference_java_lib publish_inference_android_java_demo
)
endif
()
endif
()
if
(
LITE_WITH_OPENCL
)
add_custom_target
(
publish_inference_opencl
${
TARGET
}
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/opencl"
COMMAND cp -r
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/opencl/cl_kernel"
"
${
INFER_LITE_PUBLISH_ROOT
}
/opencl"
)
add_dependencies
(
publish_inference_cxx_lib publish_inference_opencl
)
endif
()
endif
()
paddle/fluid/lite/demo/cxx/README.md
0 → 100644
浏览文件 @
40a6b203
# C++ Android Demo
1.
使用
`paddle/fluid/lite/tools/Dockerfile.mobile`
生成docker镜像
2.
运行并进入docker镜像环境,执行
`wget http://http://paddle-inference-dist.bj.bcebos.com/inference_lite_lib.android.armv8.tar.gz `
下载所需demo环境。(armv7 demo可使用命令
`wget http://http://paddle-inference-dist.bj.bcebos.com/inference_lite_lib.android.armv7.tar.gz`
进行下载)。
3.
解压下载文件
`tar zxvf inference_lite_lib.android.armv8.tar.gz `
4.
执行以下命令准备模拟器环境
```
shell
# armv8
adb kill-server
adb devices |
grep
emulator |
cut
-f1
|
while
read
line
;
do
adb
-s
$line
emu
kill
;
done
echo
n | avdmanager create avd
-f
-n
paddle-armv8
-k
"system-images;android-24;google_apis;arm64-v8a"
echo
-ne
'\n'
|
${
ANDROID_HOME
}
/emulator/emulator
-avd
paddle-armv8
-noaudio
-no-window
-gpu
off
-port
5554 &
sleep
1m
```
```
shell
# armv7
adb kill-server
adb devices |
grep
emulator |
cut
-f1
|
while
read
line
;
do
adb
-s
$line
emu
kill
;
done
echo
n | avdmanager create avd
-f
-n
paddle-armv7
-k
"system-images;android-24;google_apis;armeabi-v7a"
echo
-ne
'\n'
|
${
ANDROID_HOME
}
/emulator/emulator
-avd
paddle-armv7
-noaudio
-no-window
-gpu
off
-port
5554 &
sleep
1m
```
5.
准备模型、编译并运行完整api的demo
```
shell
cd
inference_lite_lib.android.armv8/demo/cxx/mobile_full
wget http://http://paddle-inference-dist.bj.bcebos.com/mobilenet_v1.tar.gz
tar
zxvf mobilenet_v1.tar.gz
make
adb
-s
emulator-5554 push mobilenet_v1 /data/local/tmp/
adb
-s
emulator-5554 push mobilenetv1_full_api /data/local/tmp/
adb
-s
emulator-5554 shell
chmod
+x /data/local/tmp/mobilenetv1_full_api
adb
-s
emulator-5554 shell
"/data/local/tmp/mobilenetv1_full_api --model_dir=/data/local/tmp/mobilenet_v1 --optimized_model_dir=/data/local/tmp/mobilenet_v1.opt"
```
运行成功将在控制台输出预测结果的前10个类别的预测概率
6.
编译并运行轻量级api的demo
```
shell
cd
../mobile_light
make
adb
-s
emulator-5554 push mobilenetv1_light_api /data/local/tmp/
adb
-s
emulator-5554 shell
chmod
+x /data/local/tmp/mobilenetv1_light_api
adb
-s
emulator-5554 shell
"/data/local/tmp/mobilenetv1_light_api --model_dir=/data/local/tmp/mobilenet_v1.opt
```
运行成功将在控制台输出预测结果的前10个类别的预
paddle/fluid/lite/demo/cxx/makefiles/mobile_full/Makefile.android.armv7
0 → 100644
浏览文件 @
40a6b203
ARM_ABI
=
arm7
export
ARM_ABI
include
../Makefile.def
LITE_ROOT
=
../../../
CXX_INCLUDES
=
$(INCLUDES)
-I
$(LITE_ROOT)
/cxx/include
CXX_LIBS
=
$(THIRD_PARTY_LIBS)
$(LITE_ROOT)
/cxx/lib/libpaddle_api_full_bundled.a
$(SYSTEM_LIBS)
mobilenetv1_full_api
:
mobilenetv1_full_api.o
$(CC)
$(SYSROOT_LINK)
$(CXXFLAGS_LINK)
mobilenetv1_full_api.o
-o
mobilenetv1_full_api
$(CXX_LIBS)
$(LDFLAGS)
mobilenetv1_full_api.o
:
mobilenetv1_full_api.cc
$(CC)
$(SYSROOT_COMPLILE)
$(CXX_DEFINES)
$(CXX_INCLUDES)
$(CXX_FLAGS)
-o
mobilenetv1_full_api.o
-c
mobilenetv1_full_api.cc
.PHONY
:
clean
clean
:
rm
mobilenetv1_full_api.o
rm
mobilenetv1_full_api
paddle/fluid/lite/demo/cxx/m
obile_full/Makefile
→
paddle/fluid/lite/demo/cxx/m
akefiles/mobile_full/Makefile.android.armv8
浏览文件 @
40a6b203
文件已移动
paddle/fluid/lite/demo/cxx/makefiles/mobile_light/Makefile.android.armv7
0 → 100644
浏览文件 @
40a6b203
ARM_ABI
=
arm7
export
ARM_ABI
include
../Makefile.def
LITE_ROOT
=
../../../
CXX_INCLUDES
=
$(INCLUDES)
-I
$(LITE_ROOT)
/cxx/include
CXX_LIBS
=
$(THIRD_PARTY_LIBS)
$(LITE_ROOT)
/cxx/lib/libpaddle_api_light_bundled.a
$(SYSTEM_LIBS)
mobilenetv1_light_api
:
mobilenetv1_light_api.o
$(CC)
$(SYSROOT_LINK)
$(CXXFLAGS_LINK)
mobilenetv1_light_api.o
-o
mobilenetv1_light_api
$(CXX_LIBS)
$(LDFLAGS)
mobilenetv1_light_api.o
:
mobilenetv1_light_api.cc
$(CC)
$(SYSROOT_COMPLILE)
$(CXX_DEFINES)
$(CXX_INCLUDES)
$(CXX_FLAGS)
-o
mobilenetv1_light_api.o
-c
mobilenetv1_light_api.cc
.PHONY
:
clean
clean
:
rm
mobilenetv1_light_api.o
rm
mobilenetv1_light_api
paddle/fluid/lite/demo/cxx/m
obile_light/Makefile
→
paddle/fluid/lite/demo/cxx/m
akefiles/mobile_light/Makefile.android.armv8
浏览文件 @
40a6b203
文件已移动
paddle/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc
浏览文件 @
40a6b203
...
...
@@ -36,9 +36,8 @@ void RunModel() {
// 1. Set CxxConfig
CxxConfig
config
;
config
.
set_model_dir
(
FLAGS_model_dir
);
config
.
set_preferred_place
(
Place
{
TARGET
(
kX86
),
PRECISION
(
kFloat
)});
config
.
set_valid_places
({
Place
{
TARGET
(
kX86
),
PRECISION
(
kFloat
)},
Place
{
TARGET
(
kARM
),
PRECISION
(
kFloat
)}});
config
.
set_preferred_place
(
Place
{
TARGET
(
kARM
),
PRECISION
(
kFloat
)});
config
.
set_valid_places
({
Place
{
TARGET
(
kARM
),
PRECISION
(
kFloat
)}});
// 2. Create PaddlePredictor by CxxConfig
std
::
shared_ptr
<
PaddlePredictor
>
predictor
=
...
...
paddle/fluid/lite/tools/build.sh
浏览文件 @
40a6b203
...
...
@@ -130,7 +130,7 @@ function build {
make lite_compile_deps
-j
$NUM_CORES_FOR_COMPILE
# test publish inference lib
make publish_inference_lite
#
make publish_inference_lite
}
# It will eagerly test all lite related unittests.
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录