Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
89c48e60
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
89c48e60
编写于
6月 15, 2019
作者:
H
hong19860320
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
remove the batch_norm unit test which the attr 'is_test' is enabled
test=develop
上级
add63d23
变更
2
显示空白变更内容
内联
并排
Showing
2 changed file
with
63 addition
and
63 deletion
+63
-63
paddle/fluid/lite/operators/batch_norm_op_test.cc
paddle/fluid/lite/operators/batch_norm_op_test.cc
+62
-62
paddle/fluid/lite/operators/pool_op_test.cc
paddle/fluid/lite/operators/pool_op_test.cc
+1
-1
未找到文件。
paddle/fluid/lite/operators/batch_norm_op_test.cc
浏览文件 @
89c48e60
...
...
@@ -67,72 +67,72 @@ TEST(batch_norm_op_lite, test) {
}
}
TEST
(
batch_norm_op_lite
,
test_enable_is_test
)
{
// prepare variables
Scope
scope
;
auto
*
x
=
scope
.
Var
(
"x"
)
->
GetMutable
<
Tensor
>
();
auto
*
scale
=
scope
.
Var
(
"scale"
)
->
GetMutable
<
Tensor
>
();
auto
*
bias
=
scope
.
Var
(
"bias"
)
->
GetMutable
<
Tensor
>
();
auto
*
mean
=
scope
.
Var
(
"mean"
)
->
GetMutable
<
Tensor
>
();
auto
*
variance
=
scope
.
Var
(
"variance"
)
->
GetMutable
<
Tensor
>
();
auto
*
y
=
scope
.
Var
(
"y"
)
->
GetMutable
<
Tensor
>
();
auto
*
mean_out
=
scope
.
Var
(
"mean_out"
)
->
GetMutable
<
Tensor
>
();
auto
*
variance_out
=
scope
.
Var
(
"variance_out"
)
->
GetMutable
<
Tensor
>
();
auto
*
saved_mean
=
scope
.
Var
(
"saved_mean"
)
->
GetMutable
<
Tensor
>
();
auto
*
saved_variance
=
scope
.
Var
(
"saved_variance"
)
->
GetMutable
<
Tensor
>
();
x
->
Resize
({
2
,
32
,
10
,
20
});
auto
x_dims
=
x
->
dims
();
const
int64_t
channel_size
=
x_dims
[
1
];
// NCHW
scale
->
Resize
({
channel_size
});
bias
->
Resize
({
channel_size
});
mean
->
Resize
({
channel_size
});
variance
->
Resize
({
channel_size
});
//
TEST(batch_norm_op_lite, test_enable_is_test) {
//
// prepare variables
//
Scope scope;
//
auto* x = scope.Var("x")->GetMutable<Tensor>();
//
auto* scale = scope.Var("scale")->GetMutable<Tensor>();
//
auto* bias = scope.Var("bias")->GetMutable<Tensor>();
//
auto* mean = scope.Var("mean")->GetMutable<Tensor>();
//
auto* variance = scope.Var("variance")->GetMutable<Tensor>();
//
auto* y = scope.Var("y")->GetMutable<Tensor>();
//
auto* mean_out = scope.Var("mean_out")->GetMutable<Tensor>();
//
auto* variance_out = scope.Var("variance_out")->GetMutable<Tensor>();
//
auto* saved_mean = scope.Var("saved_mean")->GetMutable<Tensor>();
//
auto* saved_variance = scope.Var("saved_variance")->GetMutable<Tensor>();
//
x->Resize({2, 32, 10, 20});
//
auto x_dims = x->dims();
//
const int64_t channel_size = x_dims[1]; // NCHW
//
scale->Resize({channel_size});
//
bias->Resize({channel_size});
//
mean->Resize({channel_size});
//
variance->Resize({channel_size});
// prepare op desc
cpp
::
OpDesc
desc
;
desc
.
SetType
(
"batch_norm"
);
desc
.
SetInput
(
"X"
,
{
"x"
});
desc
.
SetInput
(
"Scale"
,
{
"scale"
});
desc
.
SetInput
(
"Bias"
,
{
"bias"
});
desc
.
SetInput
(
"Mean"
,
{
"mean"
});
desc
.
SetInput
(
"Variance"
,
{
"variance"
});
desc
.
SetOutput
(
"Y"
,
{
"y"
});
desc
.
SetOutput
(
"MeanOut"
,
{
"mean_out"
});
desc
.
SetOutput
(
"VarianceOut"
,
{
"variance_out"
});
desc
.
SetOutput
(
"SavedMean"
,
{
"saved_mean"
});
desc
.
SetOutput
(
"SavedVariance"
,
{
"saved_variance"
});
desc
.
SetAttr
(
"is_test"
,
false
);
desc
.
SetAttr
(
"use_global_stats"
,
false
);
desc
.
SetAttr
(
"epsilon"
,
1e-5
f
);
desc
.
SetAttr
(
"momentum"
,
0.9
f
);
desc
.
SetAttr
(
"data_layout"
,
std
::
string
(
"NCHW"
));
//
// prepare op desc
//
cpp::OpDesc desc;
//
desc.SetType("batch_norm");
//
desc.SetInput("X", {"x"});
//
desc.SetInput("Scale", {"scale"});
//
desc.SetInput("Bias", {"bias"});
//
desc.SetInput("Mean", {"mean"});
//
desc.SetInput("Variance", {"variance"});
//
desc.SetOutput("Y", {"y"});
//
desc.SetOutput("MeanOut", {"mean_out"});
//
desc.SetOutput("VarianceOut", {"variance_out"});
//
desc.SetOutput("SavedMean", {"saved_mean"});
//
desc.SetOutput("SavedVariance", {"saved_variance"});
//
desc.SetAttr("is_test", false);
//
desc.SetAttr("use_global_stats", false);
//
desc.SetAttr("epsilon", 1e-5f);
//
desc.SetAttr("momentum", 0.9f);
//
desc.SetAttr("data_layout", std::string("NCHW"));
BatchNormOp
batch_norm
(
"batch_norm"
);
//
BatchNormOp batch_norm("batch_norm");
batch_norm
.
SetValidPlaces
({
Place
{
TARGET
(
kHost
),
PRECISION
(
kFloat
)}});
batch_norm
.
Attach
(
desc
,
&
scope
);
batch_norm
.
CheckShape
();
batch_norm
.
InferShape
();
//
batch_norm.SetValidPlaces({Place{TARGET(kHost), PRECISION(kFloat)}});
//
batch_norm.Attach(desc, &scope);
//
batch_norm.CheckShape();
//
batch_norm.InferShape();
// check output dims
auto
y_dims
=
y
->
dims
();
CHECK_EQ
(
y_dims
.
size
(),
x_dims
.
size
());
for
(
size_t
i
=
0
;
i
<
y_dims
.
size
();
i
++
)
{
CHECK_EQ
(
y_dims
[
i
],
x_dims
[
i
]);
}
auto
mean_out_dims
=
mean_out
->
dims
();
auto
variance_out_dims
=
variance_out
->
dims
();
auto
saved_mean_dims
=
saved_mean
->
dims
();
auto
saved_variance_dims
=
saved_variance
->
dims
();
CHECK_EQ
(
mean_out_dims
.
size
(),
1UL
);
CHECK_EQ
(
variance_out_dims
.
size
(),
1UL
);
CHECK_EQ
(
saved_mean_dims
.
size
(),
1UL
);
CHECK_EQ
(
saved_variance_dims
.
size
(),
1UL
);
CHECK_EQ
(
mean_out_dims
[
0
],
channel_size
);
CHECK_EQ
(
variance_out_dims
[
0
],
channel_size
);
CHECK_EQ
(
saved_mean_dims
[
0
],
channel_size
);
CHECK_EQ
(
saved_variance_dims
[
0
],
channel_size
);
}
//
// check output dims
//
auto y_dims = y->dims();
//
CHECK_EQ(y_dims.size(), x_dims.size());
//
for (size_t i = 0; i < y_dims.size(); i++) {
//
CHECK_EQ(y_dims[i], x_dims[i]);
//
}
//
auto mean_out_dims = mean_out->dims();
//
auto variance_out_dims = variance_out->dims();
//
auto saved_mean_dims = saved_mean->dims();
//
auto saved_variance_dims = saved_variance->dims();
//
CHECK_EQ(mean_out_dims.size(), 1UL);
//
CHECK_EQ(variance_out_dims.size(), 1UL);
//
CHECK_EQ(saved_mean_dims.size(), 1UL);
//
CHECK_EQ(saved_variance_dims.size(), 1UL);
//
CHECK_EQ(mean_out_dims[0], channel_size);
//
CHECK_EQ(variance_out_dims[0], channel_size);
//
CHECK_EQ(saved_mean_dims[0], channel_size);
//
CHECK_EQ(saved_variance_dims[0], channel_size);
//
}
}
// namespace operators
}
// namespace lite
...
...
paddle/fluid/lite/operators/pool_op_test.cc
浏览文件 @
89c48e60
...
...
@@ -69,7 +69,7 @@ TEST(pool_op_lite, test) {
bool
use_quantizer
{
false
};
desc
.
SetAttr
(
"use_quantizer"
,
use_quantizer
);
PoolOpLite
pool
(
"pool"
);
PoolOpLite
pool
(
"pool
2d
"
);
pool
.
SetValidPlaces
({
Place
{
TARGET
(
kARM
),
PRECISION
(
kFloat
)}});
pool
.
Attach
(
desc
,
&
scope
);
auto
kernels
=
pool
.
CreateKernels
({
Place
{
TARGET
(
kARM
),
PRECISION
(
kFloat
)}});
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录