Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle-Lite
提交
09f1ec4d
P
Paddle-Lite
项目概览
PaddlePaddle
/
Paddle-Lite
通知
331
Star
4
Fork
1
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
271
列表
看板
标记
里程碑
合并请求
78
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle-Lite
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
271
Issue
271
列表
看板
标记
里程碑
合并请求
78
合并请求
78
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
09f1ec4d
编写于
3月 02, 2020
作者:
H
huzhiqiang
提交者:
GitHub
3月 02, 2020
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix cpp demo to apply new API test=develop (#3052)
上级
2995645a
变更
6
隐藏空白更改
内联
并排
Showing
6 changed file
with
36 addition
and
36 deletion
+36
-36
lite/demo/cxx/mask_detection/mask_detection.cc
lite/demo/cxx/mask_detection/mask_detection.cc
+8
-8
lite/demo/cxx/mobile_classify/mobile_classify.cc
lite/demo/cxx/mobile_classify/mobile_classify.cc
+6
-6
lite/demo/cxx/ssd_detection/ssd_detection.cc
lite/demo/cxx/ssd_detection/ssd_detection.cc
+5
-5
lite/demo/cxx/test_cv/test_img_prepross.cc
lite/demo/cxx/test_cv/test_img_prepross.cc
+6
-6
lite/demo/cxx/test_cv/test_model_cv.cc
lite/demo/cxx/test_cv/test_model_cv.cc
+6
-6
lite/demo/cxx/yolov3_detection/yolov3_detection.cc
lite/demo/cxx/yolov3_detection/yolov3_detection.cc
+5
-5
未找到文件。
lite/demo/cxx/mask_detection/mask_detection.cc
浏览文件 @
09f1ec4d
...
...
@@ -125,8 +125,8 @@ void pre_process(const cv::Mat& img,
neon_mean_scale
(
dimg
,
data
,
width
*
height
,
mean
,
scale
);
}
void
RunModel
(
std
::
string
det_model_
dir
,
std
::
string
class_model_
dir
,
void
RunModel
(
std
::
string
det_model_
file
,
std
::
string
class_model_
file
,
std
::
string
img_path
)
{
// Prepare
cv
::
Mat
img
=
imread
(
img_path
,
cv
::
IMREAD_COLOR
);
...
...
@@ -138,7 +138,7 @@ void RunModel(std::string det_model_dir,
// Detection
MobileConfig
config
;
config
.
set_model_
dir
(
det_model_dir
);
config
.
set_model_
from_file
(
det_model_file
);
// Create Predictor For Detction Model
std
::
shared_ptr
<
PaddlePredictor
>
predictor
=
...
...
@@ -185,7 +185,7 @@ void RunModel(std::string det_model_dir,
}
// Classification
config
.
set_model_
dir
(
class_model_dir
);
config
.
set_model_
from_file
(
class_model_file
);
// Create Predictor For Classification Model
predictor
=
CreatePaddlePredictor
<
MobileConfig
>
(
config
);
...
...
@@ -290,12 +290,12 @@ void RunModel(std::string det_model_dir,
int
main
(
int
argc
,
char
**
argv
)
{
if
(
argc
<
3
)
{
std
::
cerr
<<
"[ERROR] usage: "
<<
argv
[
0
]
<<
" detction_model_
dir classification_model_dir
image_path
\n
"
;
<<
" detction_model_
file classification_model_file
image_path
\n
"
;
exit
(
1
);
}
std
::
string
detect_model_
dir
=
argv
[
1
];
std
::
string
classify_model_
dir
=
argv
[
2
];
std
::
string
detect_model_
file
=
argv
[
1
];
std
::
string
classify_model_
file
=
argv
[
2
];
std
::
string
img_path
=
argv
[
3
];
RunModel
(
detect_model_
dir
,
classify_model_dir
,
img_path
);
RunModel
(
detect_model_
file
,
classify_model_file
,
img_path
);
return
0
;
}
lite/demo/cxx/mobile_classify/mobile_classify.cc
浏览文件 @
09f1ec4d
...
...
@@ -126,7 +126,7 @@ void pre_process(const cv::Mat& img,
neon_mean_scale
(
dimg
,
data
,
width
*
height
,
means
,
scales
);
}
void
RunModel
(
std
::
string
model_
dir
,
void
RunModel
(
std
::
string
model_
file
,
std
::
string
img_path
,
const
std
::
vector
<
std
::
string
>&
labels
,
const
int
topk
,
...
...
@@ -134,7 +134,7 @@ void RunModel(std::string model_dir,
int
height
)
{
// 1. Set MobileConfig
MobileConfig
config
;
config
.
set_model_
dir
(
model_dir
);
config
.
set_model_
from_file
(
model_file
);
// 2. Create PaddlePredictor by MobileConfig
std
::
shared_ptr
<
PaddlePredictor
>
predictor
=
...
...
@@ -169,12 +169,12 @@ void RunModel(std::string model_dir,
int
main
(
int
argc
,
char
**
argv
)
{
if
(
argc
<
4
)
{
std
::
cerr
<<
"[ERROR] usage: "
<<
argv
[
0
]
<<
" model_
dir
image_path label_file
\n
"
;
<<
" model_
file
image_path label_file
\n
"
;
exit
(
1
);
}
printf
(
"parameter: model_
dir
, image_path and label_file are necessary
\n
"
);
printf
(
"parameter: model_
file
, image_path and label_file are necessary
\n
"
);
printf
(
"parameter: topk, input_width, input_height, are optional
\n
"
);
std
::
string
model_
dir
=
argv
[
1
];
std
::
string
model_
file
=
argv
[
1
];
std
::
string
img_path
=
argv
[
2
];
std
::
string
label_file
=
argv
[
3
];
std
::
vector
<
std
::
string
>
labels
;
...
...
@@ -190,6 +190,6 @@ int main(int argc, char** argv) {
height
=
atoi
(
argv
[
6
]);
}
RunModel
(
model_
dir
,
img_path
,
labels
,
topk
,
width
,
height
);
RunModel
(
model_
file
,
img_path
,
labels
,
topk
,
width
,
height
);
return
0
;
}
lite/demo/cxx/ssd_detection/ssd_detection.cc
浏览文件 @
09f1ec4d
...
...
@@ -162,10 +162,10 @@ std::vector<Object> detect_object(const float* data,
return
rect_out
;
}
void
RunModel
(
std
::
string
model_
dir
,
std
::
string
img_path
)
{
void
RunModel
(
std
::
string
model_
file
,
std
::
string
img_path
)
{
// 1. Set MobileConfig
MobileConfig
config
;
config
.
set_model_
dir
(
model_dir
);
config
.
set_model_
from_file
(
model_file
);
// 2. Create PaddlePredictor by MobileConfig
std
::
shared_ptr
<
PaddlePredictor
>
predictor
=
...
...
@@ -199,11 +199,11 @@ void RunModel(std::string model_dir, std::string img_path) {
int
main
(
int
argc
,
char
**
argv
)
{
if
(
argc
<
3
)
{
std
::
cerr
<<
"[ERROR] usage: "
<<
argv
[
0
]
<<
" model_
dir
image_path
\n
"
;
std
::
cerr
<<
"[ERROR] usage: "
<<
argv
[
0
]
<<
" model_
file
image_path
\n
"
;
exit
(
1
);
}
std
::
string
model_
dir
=
argv
[
1
];
std
::
string
model_
file
=
argv
[
1
];
std
::
string
img_path
=
argv
[
2
];
RunModel
(
model_
dir
,
img_path
);
RunModel
(
model_
file
,
img_path
);
return
0
;
}
lite/demo/cxx/test_cv/test_img_prepross.cc
浏览文件 @
09f1ec4d
...
...
@@ -50,7 +50,7 @@ void test_img(std::vector<int> cluster_id,
float
rotate
,
FlipParam
flip
,
LayoutType
layout
,
std
::
string
model_
dir
,
std
::
string
model_
file
,
int
test_iter
=
1
)
{
// init
// paddle::lite::DeviceInfo::Init();
...
...
@@ -65,10 +65,10 @@ void test_img(std::vector<int> cluster_id,
std
::
cout
<<
"cluster: "
<<
cls
<<
", threads: "
<<
th
<<
std
::
endl
;
// 1. Set MobileConfig
MobileConfig
config
;
config
.
set_model_
dir
(
model_dir
);
config
.
set_model_
from_file
(
model_file
);
config
.
set_power_mode
((
PowerMode
)
cls
);
config
.
set_threads
(
th
);
std
::
cout
<<
"model: "
<<
model_
dir
;
std
::
cout
<<
"model: "
<<
model_
file
;
// 2. Create PaddlePredictor by MobileConfig
std
::
shared_ptr
<
PaddlePredictor
>
predictor
=
...
...
@@ -359,9 +359,9 @@ int main(int argc, char** argv) {
int
flip
=
-
1
;
float
rotate
=
90
;
int
layout
=
1
;
std
::
string
model_
dir
=
"mobilenet_v1
"
;
std
::
string
model_
file
=
"mobilenet_v1.nb
"
;
if
(
argc
>
7
)
{
model_
dir
=
argv
[
7
];
model_
file
=
argv
[
7
];
}
if
(
argc
>
8
)
{
flip
=
atoi
(
argv
[
8
]);
...
...
@@ -383,7 +383,7 @@ int main(int argc, char** argv) {
rotate
,
(
FlipParam
)
flip
,
(
LayoutType
)
layout
,
model_
dir
,
model_
file
,
20
);
return
0
;
}
lite/demo/cxx/test_cv/test_model_cv.cc
浏览文件 @
09f1ec4d
...
...
@@ -111,7 +111,7 @@ void pre_process(const cv::Mat& img, int width, int height, Tensor dstTensor) {
#endif
}
void
RunModel
(
std
::
string
model_
dir
,
void
RunModel
(
std
::
string
model_
file
,
std
::
string
img_path
,
std
::
vector
<
int
>
input_shape
,
PowerMode
power_mode
,
...
...
@@ -120,7 +120,7 @@ void RunModel(std::string model_dir,
int
warmup
=
0
)
{
// 1. Set MobileConfig
MobileConfig
config
;
config
.
set_model_
dir
(
model_dir
);
config
.
set_model_
from_file
(
model_file
);
config
.
set_power_mode
(
power_mode
);
config
.
set_threads
(
thread_num
);
...
...
@@ -161,7 +161,7 @@ void RunModel(std::string model_dir,
}
std
::
cout
<<
"================== Speed Report ==================="
<<
std
::
endl
;
std
::
cout
<<
"Model: "
<<
model_
dir
std
::
cout
<<
"Model: "
<<
model_
file
<<
", power_mode: "
<<
static_cast
<
int
>
(
power_mode
)
<<
", threads num "
<<
thread_num
<<
", warmup: "
<<
warmup
<<
", repeats: "
<<
test_iter
<<
", avg time: "
<<
lps
/
test_iter
...
...
@@ -187,10 +187,10 @@ void RunModel(std::string model_dir,
int
main
(
int
argc
,
char
**
argv
)
{
if
(
argc
<
7
)
{
std
::
cerr
<<
"[ERROR] usage: "
<<
argv
[
0
]
<<
" model_
dir
image_path input_shape
\n
"
;
<<
" model_
file
image_path input_shape
\n
"
;
exit
(
1
);
}
std
::
string
model_
dir
=
argv
[
1
];
std
::
string
model_
file
=
argv
[
1
];
std
::
string
img_path
=
argv
[
2
];
std
::
vector
<
int
>
input_shape
;
input_shape
.
push_back
(
atoi
(
argv
[
3
]));
...
...
@@ -213,7 +213,7 @@ int main(int argc, char** argv) {
if
(
argc
>
10
)
{
warmup
=
atoi
(
argv
[
10
]);
}
RunModel
(
model_
dir
,
RunModel
(
model_
file
,
img_path
,
input_shape
,
(
PowerMode
)
power_mode
,
...
...
lite/demo/cxx/yolov3_detection/yolov3_detection.cc
浏览文件 @
09f1ec4d
...
...
@@ -182,10 +182,10 @@ std::vector<Object> detect_object(const float* data,
return
rect_out
;
}
void
RunModel
(
std
::
string
model_
dir
,
std
::
string
img_path
)
{
void
RunModel
(
std
::
string
model_
file
,
std
::
string
img_path
)
{
// 1. Set MobileConfig
MobileConfig
config
;
config
.
set_model_
dir
(
model_dir
);
config
.
set_model_
from_file
(
model_file
);
// 2. Create PaddlePredictor by MobileConfig
std
::
shared_ptr
<
PaddlePredictor
>
predictor
=
...
...
@@ -228,11 +228,11 @@ void RunModel(std::string model_dir, std::string img_path) {
int
main
(
int
argc
,
char
**
argv
)
{
if
(
argc
<
3
)
{
std
::
cerr
<<
"[ERROR] usage: "
<<
argv
[
0
]
<<
" model_
dir
image_path
\n
"
;
std
::
cerr
<<
"[ERROR] usage: "
<<
argv
[
0
]
<<
" model_
file
image_path
\n
"
;
exit
(
1
);
}
std
::
string
model_
dir
=
argv
[
1
];
std
::
string
model_
file
=
argv
[
1
];
std
::
string
img_path
=
argv
[
2
];
RunModel
(
model_
dir
,
img_path
);
RunModel
(
model_
file
,
img_path
);
return
0
;
}
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录