Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
s920243400
PaddleDetection
提交
44ed21ee
P
PaddleDetection
项目概览
s920243400
/
PaddleDetection
与 Fork 源项目一致
Fork自
PaddlePaddle / PaddleDetection
通知
2
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleDetection
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
44ed21ee
编写于
10月 15, 2017
作者:
F
fengjiayi
提交者:
Yi Wang
10月 15, 2017
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Refine backward unit tests (#4817)
上级
88b9202c
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
76 addition
and
16 deletion
+76
-16
paddle/framework/backward.cc
paddle/framework/backward.cc
+0
-10
paddle/framework/backward.h
paddle/framework/backward.h
+9
-0
paddle/framework/backward_test.cc
paddle/framework/backward_test.cc
+67
-6
未找到文件。
paddle/framework/backward.cc
浏览文件 @
44ed21ee
...
...
@@ -281,15 +281,6 @@ static void CreateGradVarInBlock(
auto
ops
=
block_desc
->
AllOps
();
for
(
size_t
op_index
=
grad_op_start_index
;
op_index
<
ops
.
size
();
++
op_index
)
{
// <<<<<<< HEAD
// for (const auto& output : ops[op_index]->Outputs()) {
// for (const auto& real_output : output.second) {
// if (!block_desc->HasVar(real_output)) {
// block_desc->Var(real_output);
// }
// }
// }
// =======
ForEachVarName
(
ops
[
op_index
]
->
Outputs
(),
[
&
](
const
std
::
string
&
grad_var_name
)
{
if
(
block_desc
->
HasVar
(
grad_var_name
))
{
...
...
@@ -307,7 +298,6 @@ static void CreateGradVarInBlock(
grad_record
.
op_idx_
=
static_cast
<
int
>
(
op_index
);
return
false
;
/* not break */
});
// >>>>>>> origin/develop
}
}
...
...
paddle/framework/backward.h
浏览文件 @
44ed21ee
...
...
@@ -31,6 +31,15 @@ extern std::unique_ptr<OperatorBase> Backward(
const
std
::
unordered_set
<
std
::
string
>&
no_grad_vars
);
struct
GradVarInfo
{
GradVarInfo
()
{}
GradVarInfo
(
const
std
::
string
&
name
,
int
block_idx
,
int
op_idx
)
:
name_
(
name
),
block_idx_
(
block_idx
),
op_idx_
(
op_idx
)
{}
bool
operator
==
(
const
GradVarInfo
&
b
)
const
{
return
name_
==
b
.
name_
&&
block_idx_
==
b
.
block_idx_
&&
op_idx_
==
b
.
op_idx_
;
}
std
::
string
name_
;
int
block_idx_
;
int
op_idx_
;
...
...
paddle/framework/backward_test.cc
浏览文件 @
44ed21ee
...
...
@@ -470,7 +470,7 @@ TEST(Backward, simple_single_op) {
op
->
SetOutput
(
"Out"
,
{
"out"
});
auto
target
=
f
::
VarDescBind
(
"out"
);
AppendBackward
(
program
,
target
,
{});
auto
var_to_grad
=
AppendBackward
(
program
,
target
,
{});
ASSERT_EQ
(
block
->
AllOps
().
size
(),
3UL
);
f
::
OpDescBind
*
fill_op
=
block
->
AllOps
()[
1
];
...
...
@@ -486,6 +486,13 @@ TEST(Backward, simple_single_op) {
std
::
vector
<
std
::
string
>
({
f
::
GradVarName
(
"x"
)}));
EXPECT_EQ
(
grad_op
->
Output
(
f
::
GradVarName
(
"b"
)),
std
::
vector
<
std
::
string
>
({
f
::
GradVarName
(
"b"
)}));
EXPECT_EQ
(
var_to_grad
.
size
(),
2UL
);
EXPECT_EQ
(
var_to_grad
.
at
(
"b"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"b"
),
0
,
2
));
EXPECT_EQ
(
var_to_grad
.
at
(
"x"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"x"
),
0
,
2
));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"b"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"x"
)));
}
TEST
(
Backward
,
default_attribute
)
{
...
...
@@ -539,7 +546,7 @@ TEST(Backward, simple_mult_op) {
auto
target
=
f
::
VarDescBind
(
"out3"
);
size_t
forward_len
=
block
->
AllOps
().
size
();
AppendBackward
(
program
,
target
,
{});
auto
var_to_grad
=
AppendBackward
(
program
,
target
,
{});
ASSERT_EQ
(
block
->
AllOps
().
size
(),
6UL
+
1
);
f
::
OpDescBind
*
fill_op
=
block
->
AllOps
()[
forward_len
];
...
...
@@ -580,6 +587,23 @@ TEST(Backward, simple_mult_op) {
std
::
vector
<
std
::
string
>
({
f
::
GradVarName
(
"out2"
)}));
EXPECT_EQ
(
grad_op3
->
Output
(
f
::
GradVarName
(
"b"
)),
std
::
vector
<
std
::
string
>
({
f
::
GradVarName
(
"b3"
)}));
EXPECT_EQ
(
var_to_grad
.
size
(),
6UL
);
EXPECT_EQ
(
var_to_grad
.
at
(
"x1"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"x1"
),
0
,
6
));
EXPECT_EQ
(
var_to_grad
.
at
(
"b1"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"b1"
),
0
,
6
));
EXPECT_EQ
(
var_to_grad
.
at
(
"out1"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"out1"
),
0
,
5
));
EXPECT_EQ
(
var_to_grad
.
at
(
"y2"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"y2"
),
0
,
5
));
EXPECT_EQ
(
var_to_grad
.
at
(
"out2"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"out2"
),
0
,
4
));
EXPECT_EQ
(
var_to_grad
.
at
(
"b3"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"b3"
),
0
,
4
));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"x1"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"b1"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"out1"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"y2"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"out2"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"b3"
)));
}
TEST
(
Backward
,
intermedia_var_no_grad
)
{
...
...
@@ -612,7 +636,7 @@ TEST(Backward, intermedia_var_no_grad) {
auto
target
=
f
::
VarDescBind
(
"out4"
);
size_t
forward_len
=
block
->
AllOps
().
size
();
AppendBackward
(
program
,
target
,
{
"out3"
});
auto
var_to_grad
=
AppendBackward
(
program
,
target
,
{
"out3"
});
ASSERT_EQ
(
block
->
AllOps
().
size
(),
7UL
);
f
::
OpDescBind
*
fill_op
=
block
->
AllOps
()[
forward_len
];
...
...
@@ -641,6 +665,16 @@ TEST(Backward, intermedia_var_no_grad) {
EXPECT_EQ
(
grad_op4
->
Output
(
f
::
GradVarName
(
"X"
)),
std
::
vector
<
std
::
string
>
({
f
::
GradVarName
(
"out1"
)}));
EXPECT_EQ
(
grad_op4
->
Output
(
f
::
GradVarName
(
"Y"
)),
std
::
vector
<
std
::
string
>
());
EXPECT_EQ
(
var_to_grad
.
size
(),
3UL
);
EXPECT_EQ
(
var_to_grad
.
at
(
"x1"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"x1"
),
0
,
6
));
EXPECT_EQ
(
var_to_grad
.
at
(
"b1"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"b1"
),
0
,
6
));
EXPECT_EQ
(
var_to_grad
.
at
(
"out1"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"out1"
),
0
,
5
));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"x1"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"b1"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"out1"
)));
}
TEST
(
Backward
,
var_no_grad
)
{
...
...
@@ -663,7 +697,7 @@ TEST(Backward, var_no_grad) {
auto
target
=
f
::
VarDescBind
(
"z2"
);
size_t
forward_len
=
block
->
AllOps
().
size
();
AppendBackward
(
program
,
target
,
{
"z1"
});
auto
var_to_grad
=
AppendBackward
(
program
,
target
,
{
"z1"
});
ASSERT_EQ
(
block
->
AllOps
().
size
(),
6UL
);
f
::
OpDescBind
*
fill_op
=
block
->
AllOps
()[
forward_len
];
...
...
@@ -709,6 +743,15 @@ TEST(Backward, var_no_grad) {
std
::
vector
<
std
::
string
>
({
f
::
GradVarName
(
"x1"
)}));
EXPECT_EQ
(
grad_op1
->
Output
(
f
::
GradVarName
(
"H"
)),
std
::
vector
<
std
::
string
>
({
f
::
GradVarName
(
"h1"
)}));
EXPECT_EQ
(
var_to_grad
.
size
(),
3UL
);
EXPECT_EQ
(
var_to_grad
.
at
(
"y1"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"y1"
),
0
,
3
));
EXPECT_EQ
(
var_to_grad
.
at
(
"x1"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"x1"
),
0
,
5
));
EXPECT_EQ
(
var_to_grad
.
at
(
"h1"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"h1"
),
0
,
5
));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"y1"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"x1"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"h1"
)));
}
TEST
(
Backward
,
shared_var
)
{
...
...
@@ -735,7 +778,7 @@ TEST(Backward, shared_var) {
auto
target
=
f
::
VarDescBind
(
"out3"
);
size_t
forward_len
=
block
->
AllOps
().
size
();
AppendBackward
(
program
,
target
,
{});
auto
var_to_grad
=
AppendBackward
(
program
,
target
,
{});
ASSERT_EQ
(
block
->
AllOps
().
size
(),
8UL
);
f
::
OpDescBind
*
fill_op
=
block
->
AllOps
()[
forward_len
];
...
...
@@ -786,6 +829,20 @@ TEST(Backward, shared_var) {
std
::
vector
<
std
::
string
>
({
f
::
GradVarName
(
"x1"
)}));
EXPECT_EQ
(
grad_op1
->
Output
(
f
::
GradVarName
(
"b"
)),
std
::
vector
<
std
::
string
>
({
f
::
GradVarName
(
"b1"
)}));
EXPECT_EQ
(
var_to_grad
.
size
(),
5UL
);
EXPECT_EQ
(
var_to_grad
.
at
(
"b3"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"b3"
),
0
,
4
));
EXPECT_EQ
(
var_to_grad
.
at
(
"y2"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"y2"
),
0
,
5
));
EXPECT_EQ
(
var_to_grad
.
at
(
"out1"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"out1"
),
0
,
6
));
EXPECT_EQ
(
var_to_grad
.
at
(
"x1"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"x1"
),
0
,
7
));
EXPECT_EQ
(
var_to_grad
.
at
(
"b1"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"b1"
),
0
,
7
));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"b3"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"y2"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"out1"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"x1"
)));
EXPECT_TRUE
(
block
->
HasVar
(
f
::
GradVarName
(
"b1"
)));
}
TEST
(
Backward
,
half_backward
)
{
...
...
@@ -800,9 +857,13 @@ TEST(Backward, half_backward) {
auto
target
=
f
::
VarDescBind
(
"out"
);
size_t
forward_len
=
block
->
AllOps
().
size
();
AppendBackward
(
program
,
target
,
{
"b"
});
auto
var_to_grad
=
AppendBackward
(
program
,
target
,
{
"b"
});
f
::
OpDescBind
*
fill_op
=
block
->
AllOps
()[
forward_len
];
EXPECT_EQ
(
fill_op
->
Type
(),
"fill_constant"
);
auto
ops
=
block
->
AllOps
();
ASSERT_EQ
(
3UL
,
ops
.
size
());
EXPECT_EQ
(
var_to_grad
.
size
(),
1UL
);
EXPECT_EQ
(
var_to_grad
.
at
(
"a"
),
f
::
GradVarInfo
(
f
::
GradVarName
(
"a"
),
0
,
forward_len
+
1
));
}
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录