Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
db1747a5
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
db1747a5
编写于
6月 07, 2018
作者:
T
tensor-tang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
enable word2vec multi-threads ut
上级
9dd99395
变更
1
显示空白变更内容
内联
并排
Showing
1 changed file
with
69 addition
and
0 deletion
+69
-0
paddle/contrib/inference/test_paddle_inference_api_impl.cc
paddle/contrib/inference/test_paddle_inference_api_impl.cc
+69
-0
未找到文件。
paddle/contrib/inference/test_paddle_inference_api_impl.cc
浏览文件 @
db1747a5
...
...
@@ -15,6 +15,8 @@ limitations under the License. */
#include <glog/logging.h>
#include <gtest/gtest.h>
#include <thread>
#include "gflags/gflags.h"
#include "paddle/contrib/inference/paddle_inference_api_impl.h"
#include "paddle/fluid/inference/tests/test_helper.h"
...
...
@@ -45,7 +47,11 @@ NativeConfig GetConfig() {
config
.
model_dir
=
FLAGS_dirname
+
"word2vec.inference.model"
;
LOG
(
INFO
)
<<
"dirname "
<<
config
.
model_dir
;
config
.
fraction_of_gpu_memory
=
0.15
;
#ifdef PADDLE_WITH_CUDA
config
.
use_gpu
=
true
;
#else
config
.
use_gpu
=
false
;
#endif
config
.
device
=
0
;
return
config
;
}
...
...
@@ -149,4 +155,67 @@ TEST(paddle_inference_api_impl, image_classification) {
free
(
data
);
}
TEST
(
paddle_inference_api_native_multithreads
,
word2vec
)
{
NativeConfig
config
=
GetConfig
();
config
.
use_gpu
=
false
;
auto
main_predictor
=
CreatePaddlePredictor
<
NativeConfig
>
(
config
);
// prepare inputs data
constexpr
int
num_jobs
=
3
;
std
::
vector
<
std
::
vector
<
framework
::
LoDTensor
>>
jobs
(
num_jobs
);
std
::
vector
<
std
::
vector
<
PaddleTensor
>>
paddle_tensor_feeds
(
num_jobs
);
std
::
vector
<
framework
::
LoDTensor
>
refs
(
num_jobs
);
for
(
size_t
i
=
0
;
i
<
jobs
.
size
();
++
i
)
{
// each job has 4 words
jobs
[
i
].
resize
(
4
);
for
(
size_t
j
=
0
;
j
<
4
;
++
j
)
{
framework
::
LoD
lod
{{
0
,
1
}};
int64_t
dict_size
=
2073
;
// The size of dictionary
SetupLoDTensor
(
&
jobs
[
i
][
j
],
lod
,
static_cast
<
int64_t
>
(
0
),
dict_size
-
1
);
paddle_tensor_feeds
[
i
].
push_back
(
LodTensorToPaddleTensor
(
&
jobs
[
i
][
j
]));
}
// get reference result of each job
std
::
vector
<
paddle
::
framework
::
LoDTensor
*>
ref_feeds
;
std
::
vector
<
paddle
::
framework
::
LoDTensor
*>
ref_fetches
(
1
,
&
refs
[
i
]);
for
(
auto
&
word
:
jobs
[
i
])
{
ref_feeds
.
push_back
(
&
word
);
}
TestInference
<
platform
::
CPUPlace
>
(
config
.
model_dir
,
ref_feeds
,
ref_fetches
);
}
// create threads and each thread run 1 job
std
::
vector
<
std
::
thread
>
threads
;
for
(
int
tid
=
0
;
tid
<
num_jobs
;
++
tid
)
{
threads
.
emplace_back
([
&
,
tid
]()
{
auto
predictor
=
main_predictor
->
Clone
();
auto
&
local_inputs
=
paddle_tensor_feeds
[
tid
];
std
::
vector
<
PaddleTensor
>
local_outputs
;
ASSERT_TRUE
(
predictor
->
Run
(
local_inputs
,
&
local_outputs
));
// check outputs range
ASSERT_EQ
(
local_outputs
.
size
(),
1UL
);
const
size_t
len
=
local_outputs
[
0
].
data
.
length
;
float
*
data
=
static_cast
<
float
*>
(
local_outputs
[
0
].
data
.
data
);
for
(
size_t
j
=
0
;
j
<
len
/
sizeof
(
float
);
++
j
)
{
ASSERT_LT
(
data
[
j
],
1.0
);
ASSERT_GT
(
data
[
j
],
-
1.0
);
}
// check outputs correctness
float
*
ref_data
=
refs
[
tid
].
data
<
float
>
();
EXPECT_EQ
(
refs
[
tid
].
numel
(),
len
/
sizeof
(
float
));
for
(
int
i
=
0
;
i
<
refs
[
tid
].
numel
();
++
i
)
{
EXPECT_LT
(
ref_data
[
i
]
-
data
[
i
],
1e-3
);
EXPECT_GT
(
ref_data
[
i
]
-
data
[
i
],
-
1e-3
);
}
free
(
local_outputs
[
0
].
data
.
data
);
});
}
for
(
int
i
=
0
;
i
<
num_jobs
;
++
i
)
{
threads
[
i
].
join
();
}
}
}
// namespace paddle
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录