Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
ec0ea5ca
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
ec0ea5ca
编写于
6月 28, 2019
作者:
S
sangoly
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
complete deployment & add cxx demo
上级
cdddfe68
变更
7
隐藏空白更改
内联
并排
Showing
7 changed file
with
251 addition
and
8 deletion
+251
-8
paddle/fluid/lite/CMakeLists.txt
paddle/fluid/lite/CMakeLists.txt
+26
-7
paddle/fluid/lite/api/CMakeLists.txt
paddle/fluid/lite/api/CMakeLists.txt
+1
-1
paddle/fluid/lite/demo/cxx/Makefile.def
paddle/fluid/lite/demo/cxx/Makefile.def
+37
-0
paddle/fluid/lite/demo/cxx/mobile_full/Makefile
paddle/fluid/lite/demo/cxx/mobile_full/Makefile
+22
-0
paddle/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc
...e/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc
+75
-0
paddle/fluid/lite/demo/cxx/mobile_light/Makefile
paddle/fluid/lite/demo/cxx/mobile_light/Makefile
+22
-0
paddle/fluid/lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc
...fluid/lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc
+68
-0
未找到文件。
paddle/fluid/lite/CMakeLists.txt
浏览文件 @
ec0ea5ca
...
...
@@ -221,10 +221,12 @@ add_subdirectory(gen_code)
add_subdirectory
(
tools
)
# Deployment required
lite_download_and_uncompress
(
${
LITE_MODEL_DIR
}
${
LITE_URL
}
"mobilenet_v1.tar.gz"
)
if
(
WITH_TESTING
)
lite_download_and_uncompress
(
${
LITE_MODEL_DIR
}
${
LITE_URL
}
"lite_naive_model.tar.gz"
)
if
(
LITE_WITH_LIGHT_WEIGHT_FRAMEWORK
)
lite_download_and_uncompress
(
${
LITE_MODEL_DIR
}
${
LITE_URL
}
"mobilenet_v1.tar.gz"
)
lite_download_and_uncompress
(
${
LITE_MODEL_DIR
}
${
LITE_URL
}
"mobilenet_v2_relu.tar.gz"
)
lite_download_and_uncompress
(
${
LITE_MODEL_DIR
}
${
LITE_URL
}
"resnet50.tar.gz"
)
lite_download_and_uncompress
(
${
LITE_MODEL_DIR
}
${
LITE_URL
}
"inception_v4_simple.tar.gz"
)
...
...
@@ -246,27 +248,44 @@ add_custom_target(publish_inference_cxx_lib ${TARGET}
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/lib"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/bin"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/include"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/third_party"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/models"
COMMAND cp
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/api/paddle_*.h"
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/include"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/libpaddle_api_full_bundled.a"
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/lib"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/paddle/fluid/lite/api/model_optimize_tool"
"
${
INFER_LITE_PUBLISH_ROOT
}
/bin"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/paddle/fluid/lite/gen_code/paddle_code_generator"
"
${
INFER_LITE_PUBLISH_ROOT
}
/bin"
COMMAND cp -r
"
${
CMAKE_BINARY_DIR
}
/third_party/install/glog"
"
${
INFER_LITE_PUBLISH_ROOT
}
/third_party"
COMMAND cp -r
"
${
CMAKE_BINARY_DIR
}
/third_party/install/gflags"
"
${
INFER_LITE_PUBLISH_ROOT
}
/third_party"
COMMAND cp -r
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/demo/cxx/mobile_full"
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx"
COMMAND cp
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/demo/cxx/Makefile.def"
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx"
COMMAND cp -r
"
${
CMAKE_BINARY_DIR
}
/third_party/install/mobilenet_v1"
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/models"
)
add_dependencies
(
publish_inference_cxx_lib model_optimize_tool
)
add_dependencies
(
publish_inference_cxx_lib paddle_code_generator
)
add_dependencies
(
publish_inference_cxx_lib bundle_full_api
)
add_dependencies
(
publish_inference_cxx_lib extern_lite_download_mobilenet_v1_tar_gz
)
add_dependencies
(
publish_inference_lite publish_inference_cxx_lib
)
if
(
LITE_WITH_LIGHT_WEIGHT_FRAMEWORK
)
#cc_library(inference_mobile_lib DEPS light_api_lite)
# copy cpp mobile_light demo/lib
add_custom_target
(
publish_inference_mobile_lib
${
TARGET
}
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/mobile/lib"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/mobile/bin"
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/mobile/include"
COMMAND cp
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/api/paddle_*.h"
"
${
INFER_LITE_PUBLISH_ROOT
}
/mobile/include"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/libpaddle_api_light_bundled.a"
"
${
INFER_LITE_PUBLISH_ROOT
}
/mobile/lib"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/libpaddle_api_light_bundled.a"
"
${
INFER_LITE_PUBLISH_ROOT
}
/cxx/lib"
COMMAND cp -r
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/demo/cxx/mobile_light"
"
${
INFER_LITE_PUBLISH_ROOT
}
/demo/cxx"
)
add_dependencies
(
publish_inference_mobile_lib paddle_api_light bundle_light_api
)
add_dependencies
(
publish_inference_lite publish_inference_mobile_lib
)
if
(
LITE_WITH_JAVA AND LITE_WITH_ARM
)
# copy java mobile_light demo/lib
add_custom_target
(
publish_java_inference_mobile_lib
${
TARGET
}
COMMAND mkdir -p
"
${
INFER_LITE_PUBLISH_ROOT
}
/java/so"
COMMAND cp
"
${
CMAKE_BINARY_DIR
}
/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so"
"
${
INFER_LITE_PUBLISH_ROOT
}
/java/so"
COMMAND cp -r
"
${
CMAKE_SOURCE_DIR
}
/paddle/fluid/lite/api/android/jni/src"
"
${
INFER_LITE_PUBLISH_ROOT
}
/java"
)
add_dependencies
(
publish_java_inference_mobile_lib paddle_lite_jni
)
add_dependencies
(
publish_inference_lite publish_java_inference_mobile_lib
)
endif
()
endif
()
paddle/fluid/lite/api/CMakeLists.txt
浏览文件 @
ec0ea5ca
...
...
@@ -113,7 +113,7 @@ lite_cc_library(paddle_api_full SRCS cxx_api_impl.cc DEPS cxx_api_lite paddle_ap
ARM_DEPS
${
arm_kernels
}
CL_DEPS
${
opencl_kernels
}
)
# The final inference library for just MobileConfig.
lite_cc_library
(
paddle_api_light SRCS light_api_impl.cc DEPS light_api_lite paddle_api_lite
)
lite_cc_library
(
paddle_api_light SRCS light_api_impl.cc DEPS light_api_lite paddle_api_lite
mir_passes
)
bundle_static_library
(
paddle_api_full paddle_api_full_bundled bundle_full_api
)
bundle_static_library
(
paddle_api_light paddle_api_light_bundled bundle_light_api
)
...
...
paddle/fluid/lite/demo/cxx/Makefile.def
0 → 100644
浏览文件 @
ec0ea5ca
CXX_DEFINES
=
-DARM_WITH_OMP
-DHPPL_STUB_FUNC
-DLITE_WITH_ARM
-DLITE_WITH_LIGHT_WEIGHT_FRAMEWORK
\
-DLITE_WITH_LINUX
-DPADDLE_DISABLE_PROFILER
-DPADDLE_NO_PYTHON
-DPADDLE_WITH_TESTING
LDFLAGS
=
-latomic
-pthread
-ldl
SYSROOT_COMPLILE
=
--sysroot
=
/opt/android-ndk-r17c/sysroot
THIRD_PARTY_LIBS
=
../../../third_party/glog/lib/libglog.a
\
../../../third_party/gflags/lib/libgflags.a
SYSTEM_INCLUDES
=
-I
/opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/include
\
-I
/opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++abi/include
\
-I
/opt/android-ndk-r17c/sources/android/support/include
\
-I
/opt/android-ndk-r17c/sysroot/usr/include
\
THIRD_PARTY_INCLUDES
=
-I
../../../third_party/gflags/include
\
-I
../../../third_party/glog/include
ifeq
($(ARM_ABI), arm8)
CC
=
/opt/android-ndk-r17c/toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64/bin/aarch64-linux-android-g++
CXX_FLAGS
=
-funwind-tables
-no-canonical-prefixes
-D__ANDROID_API__
=
22
-fexceptions
-frtti
-std
=
c++11
-fopenmp
-O3
-DNDEBUG
-fPIE
CXXFLAGS_LINK
=
$(CXX_FLAGS)
-pie
-Wl
,--gc-sections
SYSROOT_LINK
=
--sysroot
=
/opt/android-ndk-r17c/platforms/android-24/arch-arm64
SYSTEM_LIBS
=
/opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/libs/arm64-v8a/libc++_static.a
\
/opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/libs/arm64-v8a/libc++abi.a
INCLUDES
=
$(SYSTEM_INCLUDES)
-I
/opt/android-ndk-r17c/sysroot/usr/include/aarch64-linux-android
$(THIRD_PARTY_INCLUDES)
else
CC
=
/opt/android-ndk-r17c/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/arm-linux-androideabi-g++
CXX_FLAGS
=
-march
=
armv7-a
-mthumb
-mfpu
=
neon
-mfloat-abi
=
softfp
-funwind-tables
-no-canonical-prefixes
\
-D__ANDROID_API__
=
22
-fexceptions
-frtti
-std
=
c++11
-fopenmp
-O3
-DNDEBUG
-fPIE
CXXFLAGS_LINK
=
$(CXX_FLAGS)
-pie
-Wl
,--fix-cortex-a8
-Wl
,--gc-sections
-Wl
,-z,nocopyreloc
SYSROOT_LINK
=
--sysroot
=
/opt/android-ndk-r17c/platforms/android-22/arch-arm
SYSTEM_LIBS
=
/opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/libs/armeabi-v7a/libc++_static.a
\
/opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/libs/armeabi-v7a/libc++abi.a
\
/opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/libs/armeabi-v7a/libandroid_support.a
\
/opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/libs/armeabi-v7a/libunwind.a
INCLUDES
=
$(SYSTEM_INCLUDES)
-I
/opt/android-ndk-r17c/sysroot/usr/include/arm-linux-androideabi
$(THIRD_PARTY_INCLUDES)
endif
paddle/fluid/lite/demo/cxx/mobile_full/Makefile
0 → 100644
浏览文件 @
ec0ea5ca
ARM_ABI
=
arm8
export
ARM_ABI
include
../Makefile.def
LITE_ROOT
=
../../../
CXX_INCLUDES
=
$(INCLUDES)
-I
$(LITE_ROOT)
/cxx/include
CXX_LIBS
=
$(THIRD_PARTY_LIBS)
$(LITE_ROOT)
/cxx/lib/libpaddle_api_full_bundled.a
$(SYSTEM_LIBS)
mobilenetv1_full_api
:
mobilenetv1_full_api.o
$(CC)
$(SYSROOT_LINK)
$(CXXFLAGS_LINK)
mobilenetv1_full_api.o
-o
mobilenetv1_full_api
$(CXX_LIBS)
$(LDFLAGS)
mobilenetv1_full_api.o
:
mobilenetv1_full_api.cc
$(CC)
$(SYSROOT_COMPLILE)
$(CXX_DEFINES)
$(CXX_INCLUDES)
$(CXX_FLAGS)
-o
mobilenetv1_full_api.o
-c
mobilenetv1_full_api.cc
.PHONY
:
clean
clean
:
rm
mobilenetv1_full_api.o
rm
mobilenetv1_full_api
paddle/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc
0 → 100644
浏览文件 @
ec0ea5ca
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <gflags/gflags.h>
#include <glog/logging.h>
#include <iostream>
#include <vector>
#include "paddle_api.h" // NOLINT
#include "paddle_use_kernels.h" // NOLINT
#include "paddle_use_ops.h" // NOLINT
#include "paddle_use_passes.h" // NOLINT
using
namespace
paddle
::
lite_api
;
// NOLINT
DEFINE_string
(
model_dir
,
""
,
"Model dir path."
);
DEFINE_string
(
optimized_model_dir
,
""
,
"Optimized model dir."
);
int64_t
ShapeProduction
(
const
shape_t
&
shape
)
{
int64_t
res
=
1
;
for
(
auto
i
:
shape
)
res
*=
i
;
return
res
;
}
void
RunModel
()
{
// 1. Set CxxConfig
CxxConfig
config
;
config
.
set_model_dir
(
FLAGS_model_dir
);
config
.
set_preferred_place
(
Place
{
TARGET
(
kX86
),
PRECISION
(
kFloat
)});
config
.
set_valid_places
({
Place
{
TARGET
(
kX86
),
PRECISION
(
kFloat
)},
Place
{
TARGET
(
kARM
),
PRECISION
(
kFloat
)}});
// 2. Create PaddlePredictor by CxxConfig
std
::
shared_ptr
<
PaddlePredictor
>
predictor
=
CreatePaddlePredictor
<
CxxConfig
>
(
config
);
// 3. Prepare input data
std
::
unique_ptr
<
Tensor
>
input_tensor
(
std
::
move
(
predictor
->
GetInput
(
0
)));
input_tensor
->
Resize
(
shape_t
({
1
,
3
,
224
,
224
}));
auto
*
data
=
input_tensor
->
mutable_data
<
float
>
();
for
(
int
i
=
0
;
i
<
ShapeProduction
(
input_tensor
->
shape
());
++
i
)
{
data
[
i
]
=
1
;
}
// 4. Run predictor
predictor
->
Run
();
// 5. Get output
std
::
unique_ptr
<
const
Tensor
>
output_tensor
(
std
::
move
(
predictor
->
GetOutput
(
0
)));
LOG
(
INFO
)
<<
"Ouput dim: "
<<
output_tensor
->
shape
()[
1
]
<<
std
::
endl
;
for
(
int
i
=
0
;
i
<
ShapeProduction
(
output_tensor
->
shape
());
i
+=
100
)
{
LOG
(
INFO
)
<<
"Output["
<<
i
<<
"]: "
<<
output_tensor
->
data
<
float
>
()[
i
]
<<
std
::
endl
;
}
// 6. Save optimition model
predictor
->
SaveOptimizedModel
(
FLAGS_optimized_model_dir
);
}
int
main
(
int
argc
,
char
**
argv
)
{
google
::
ParseCommandLineFlags
(
&
argc
,
&
argv
,
true
);
RunModel
();
return
0
;
}
paddle/fluid/lite/demo/cxx/mobile_light/Makefile
0 → 100644
浏览文件 @
ec0ea5ca
ARM_ABI
=
arm8
export
ARM_ABI
include
../Makefile.def
LITE_ROOT
=
../../../
CXX_INCLUDES
=
$(INCLUDES)
-I
$(LITE_ROOT)
/cxx/include
CXX_LIBS
=
$(THIRD_PARTY_LIBS)
$(LITE_ROOT)
/cxx/lib/libpaddle_api_light_bundled.a
$(SYSTEM_LIBS)
mobilenetv1_light_api
:
mobilenetv1_light_api.o
$(CC)
$(SYSROOT_LINK)
$(CXXFLAGS_LINK)
mobilenetv1_light_api.o
-o
mobilenetv1_light_api
$(CXX_LIBS)
$(LDFLAGS)
mobilenetv1_light_api.o
:
mobilenetv1_light_api.cc
$(CC)
$(SYSROOT_COMPLILE)
$(CXX_DEFINES)
$(CXX_INCLUDES)
$(CXX_FLAGS)
-o
mobilenetv1_light_api.o
-c
mobilenetv1_light_api.cc
.PHONY
:
clean
clean
:
rm
mobilenetv1_light_api.o
rm
mobilenetv1_light_api
paddle/fluid/lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc
0 → 100644
浏览文件 @
ec0ea5ca
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <gflags/gflags.h>
#include <glog/logging.h>
#include <iostream>
#include <vector>
#include "paddle_api.h" // NOLINT
#include "paddle_use_kernels.h" // NOLINT
#include "paddle_use_ops.h" // NOLINT
#include "paddle_use_passes.h" // NOLINT
using
namespace
paddle
::
lite_api
;
// NOLINT
DEFINE_string
(
model_dir
,
""
,
"Model dir path."
);
int64_t
ShapeProduction
(
const
shape_t
&
shape
)
{
int64_t
res
=
1
;
for
(
auto
i
:
shape
)
res
*=
i
;
return
res
;
}
void
RunModel
()
{
// 1. Set MobileConfig
MobileConfig
config
;
config
.
set_model_dir
(
FLAGS_model_dir
);
// 2. Create PaddlePredictor by MobileConfig
std
::
shared_ptr
<
PaddlePredictor
>
predictor
=
CreatePaddlePredictor
<
MobileConfig
>
(
config
);
// 3. Prepare input data
std
::
unique_ptr
<
Tensor
>
input_tensor
(
std
::
move
(
predictor
->
GetInput
(
0
)));
input_tensor
->
Resize
({
1
,
3
,
224
,
224
});
auto
*
data
=
input_tensor
->
mutable_data
<
float
>
();
for
(
int
i
=
0
;
i
<
ShapeProduction
(
input_tensor
->
shape
());
++
i
)
{
data
[
i
]
=
1
;
}
// 4. Run predictor
predictor
->
Run
();
// 5. Get output
std
::
unique_ptr
<
const
Tensor
>
output_tensor
(
std
::
move
(
predictor
->
GetOutput
(
0
)));
LOG
(
INFO
)
<<
"Ouput dim: "
<<
output_tensor
->
shape
()[
1
]
<<
std
::
endl
;
for
(
int
i
=
0
;
i
<
ShapeProduction
(
output_tensor
->
shape
());
i
+=
100
)
{
LOG
(
INFO
)
<<
"Output["
<<
i
<<
"]: "
<<
output_tensor
->
data
<
float
>
()[
i
]
<<
std
::
endl
;
}
}
int
main
(
int
argc
,
char
**
argv
)
{
google
::
ParseCommandLineFlags
(
&
argc
,
&
argv
,
true
);
RunModel
();
return
0
;
}
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录