Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
f395075e
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
f395075e
编写于
11月 07, 2018
作者:
S
Sylwester Fraczek
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
rebased and stuff broke
上级
a60957f3
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
4 addition
and
29 deletion
+4
-29
paddle/fluid/inference/tests/api/CMakeLists.txt
paddle/fluid/inference/tests/api/CMakeLists.txt
+1
-0
paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc
...le/fluid/inference/tests/api/analyzer_mobilenet_tester.cc
+3
-29
未找到文件。
paddle/fluid/inference/tests/api/CMakeLists.txt
浏览文件 @
f395075e
...
...
@@ -86,6 +86,7 @@ inference_analysis_api_test_with_fake_data(test_analyzer_resnet50
set
(
MOBILENET_INSTALL_DIR
"
${
INFERENCE_DEMO_INSTALL_DIR
}
/mobilenet"
)
if
(
NOT EXISTS
${
MOBILENET_INSTALL_DIR
}
)
inference_download_and_uncompress
(
${
MOBILENET_INSTALL_DIR
}
"http://paddle-inference-dist.bj.bcebos.com/tensorrt_test"
"mobilenet.tar.gz"
)
file
(
RENAME
${
MOBILENET_INSTALL_DIR
}
/mobilenet/__model__
${
MOBILENET_INSTALL_DIR
}
/mobilenet/model
)
endif
()
inference_analysis_test
(
test_analyzer_mobilenet SRCS analyzer_mobilenet_tester.cc
EXTRA_DEPS
${
INFERENCE_EXTRA_DEPS
}
ARGS --infer_model=
${
MOBILENET_INSTALL_DIR
}
/mobilenet
)
...
...
paddle/fluid/inference/tests/api/analyzer_mobilenet_tester.cc
浏览文件 @
f395075e
...
...
@@ -29,25 +29,7 @@ void SetConfig(AnalysisConfig *cfg) {
}
void
SetInput
(
std
::
vector
<
std
::
vector
<
PaddleTensor
>>
*
inputs
)
{
PADDLE_ENFORCE_EQ
(
FLAGS_test_all_data
,
0
,
"Only have single batch of data."
);
PaddleTensor
input
;
// channel=3, height/width=318
std
::
vector
<
int
>
shape
({
FLAGS_batch_size
,
3
,
318
,
318
});
input
.
shape
=
shape
;
input
.
dtype
=
PaddleDType
::
FLOAT32
;
// fill input data, for profile easily, do not use random data here.
size_t
size
=
FLAGS_batch_size
*
3
*
318
*
318
;
input
.
data
.
Resize
(
size
*
sizeof
(
float
));
float
*
input_data
=
static_cast
<
float
*>
(
input
.
data
.
data
());
for
(
size_t
i
=
0
;
i
<
size
;
i
++
)
{
*
(
input_data
+
i
)
=
static_cast
<
float
>
(
i
)
/
size
;
}
std
::
vector
<
PaddleTensor
>
input_slots
;
input_slots
.
assign
({
input
});
(
*
inputs
).
emplace_back
(
input_slots
);
SetFakeImageInput
(
inputs
,
FLAGS_infer_model
);
}
// Easy for profiling independently.
...
...
@@ -60,13 +42,6 @@ void profile(bool use_mkldnn = false) {
std
::
vector
<
std
::
vector
<
PaddleTensor
>>
input_slots_all
;
SetInput
(
&
input_slots_all
);
TestPrediction
(
cfg
,
input_slots_all
,
&
outputs
,
FLAGS_num_threads
);
if
(
FLAGS_num_threads
==
1
&&
!
FLAGS_test_all_data
)
{
PADDLE_ENFORCE_EQ
(
outputs
.
size
(),
1UL
);
size_t
size
=
GetSize
(
outputs
[
0
]);
// output is a 1000-dimension feature
EXPECT_EQ
(
size
,
1000
*
FLAGS_batch_size
);
}
}
TEST
(
Analyzer_mobilenet
,
profile
)
{
profile
();
}
...
...
@@ -74,7 +49,7 @@ TEST(Analyzer_mobilenet, profile) { profile(); }
TEST
(
Analyzer_mobilenet
,
profile_mkldnn
)
{
profile
(
true
/* use_mkldnn */
);
}
#endif
// Check the depthwise_conv status
// Check the depthwise_conv
pass
status
TEST
(
Analyzer_mobilenet
,
depthwise_conv_statis
)
{
AnalysisConfig
cfg
;
SetConfig
(
&
cfg
);
...
...
@@ -83,8 +58,7 @@ TEST(Analyzer_mobilenet, depthwise_conv_statis) {
auto
predictor
=
CreatePaddlePredictor
<
AnalysisConfig
>
(
cfg
);
auto
fuse_statis
=
GetFuseStatis
(
static_cast
<
AnalysisPredictor
*>
(
predictor
.
get
()),
&
num_ops
);
ASSERT_TRUE
(
fuse_statis
.
count
(
"depthwise_conv_mkldnn_pass"
));
EXPECT_EQ
(
fuse_statis
.
at
(
"depthwise_conv_mkldnn_pass"
),
13
);
LOG
(
INFO
)
<<
"num_ops: "
<<
num_ops
;
}
// Compare result of NativeConfig and AnalysisConfig
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录