Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
dfb4ea76
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
dfb4ea76
编写于
8月 11, 2017
作者:
Q
qingqing01
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
make unit test of backward_test pass.
上级
88104905
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
249 addition
and
216 deletion
+249
-216
paddle/framework/backward.cc
paddle/framework/backward.cc
+6
-6
paddle/framework/backward_test.cc
paddle/framework/backward_test.cc
+242
-209
paddle/framework/operator.cc
paddle/framework/operator.cc
+1
-1
未找到文件。
paddle/framework/backward.cc
浏览文件 @
dfb4ea76
...
...
@@ -25,7 +25,7 @@ template <typename Map, typename T>
static
void
ForEachVarName
(
Map
&
names
,
T
callback
)
{
for
(
auto
&
name
:
names
)
{
for
(
auto
&
n
:
name
.
second
)
{
if
(
callback
(
n
))
break
;
if
(
callback
(
n
))
return
;
}
}
}
...
...
@@ -33,12 +33,12 @@ static void ForEachVarName(Map& names, T callback) {
static
bool
AllInSet
(
const
std
::
unordered_map
<
std
::
string
,
std
::
vector
<
std
::
string
>>&
names
,
const
std
::
string
&
suffix
,
const
std
::
unordered_set
<
std
::
string
>&
set
)
{
bool
ret_val
=
true
;
ForEachVarName
(
names
,
[
&
ret_val
,
&
set
,
&
suffix
](
const
std
::
string
&
n
)
{
ret_val
=
set
.
find
(
n
+
suffix
)
=
=
set
.
end
();
return
!
ret_val
;
bool
all_in_set
=
true
;
ForEachVarName
(
names
,
[
&
all_in_set
,
&
set
,
&
suffix
](
const
std
::
string
&
n
)
{
all_in_set
=
set
.
find
(
n
+
suffix
)
!
=
set
.
end
();
return
!
all_in_set
;
});
return
ret_val
;
return
all_in_set
;
}
static
std
::
shared_ptr
<
OperatorBase
>
NOP
()
{
...
...
paddle/framework/backward_test.cc
浏览文件 @
dfb4ea76
...
...
@@ -82,11 +82,11 @@ class FcOp : public operators::NetOp {
AddOp
(
OpRegistry
::
CreateOp
(
"mul"
,
{{
"X"
,
{
Input
(
"X"
)}},
{
"Y"
,
{
Input
(
"W"
)}}},
{{
"Out"
,
{
Output
(
"mul_result"
)}}},
{}));
auto
b_name
=
Input
(
"b"
);
auto
input_b
=
Inputs
(
"b"
);
std
::
string
before_act
=
"mul_result"
;
if
(
b_name
!=
kEmptyVarName
)
{
if
(
input_b
.
size
()
!=
0
)
{
AddOp
(
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{{
"X"
,
{
Output
(
"mul_result"
)}},
{
"b"
,
{
b_name
}}},
"rowwise_add"
,
{{
"X"
,
{
Output
(
"mul_result"
)}},
{
"b"
,
{
input_b
[
0
]
}}},
{{
"Out"
,
{
Output
(
"add_result"
)}}},
{}));
before_act
=
"add_result"
;
}
else
{
...
...
@@ -166,209 +166,242 @@ REGISTER_OP(fc, f::FcOp, f::FcOpMaker);
REGISTER_OP
(
many_output_op
,
f
::
EmptyOp
,
f
::
ManyOutputOpMaker
);
REGISTER_GRADIENT_OP
(
many_output_op
,
many_output_op_grad
,
f
::
EmptyOp
);
// TEST(Backward, simple_op_grad) {
// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {});
// ASSERT_NE(fwd, nullptr);
// auto gop = f::OpRegistry::CreateGradOp(*fwd);
// ASSERT_EQ(4UL, gop->inputs_.size());
// ASSERT_EQ(f::kEmptyVarName, gop->inputs_[0]);
// ASSERT_EQ("rowwise_add_grad", gop->type_);
// ASSERT_EQ(f::GradVarName("X"), gop->outputs_[0]);
// ASSERT_EQ(f::GradVarName("b"), gop->outputs_[1]);
//
// ASSERT_EQ(f::GradVarName("X"), gop->Output(f::GradVarName("X")));
//}
//
// TEST(Backward, simple_op_not_need_grad) {
// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {});
// ASSERT_NE(fwd, nullptr);
// auto gop = f::Backward(*fwd, {"X"});
// ASSERT_EQ(std::find(gop->outputs_.begin(), gop->outputs_.end(),
// f::GradVarName("X")),
// gop->outputs_.end());
//
// auto no_input_gop = f::Backward(*fwd, {"X", "b"});
// ASSERT_NE(no_input_gop, nullptr);
// ASSERT_TRUE(no_input_gop->IsNetOp());
// ASSERT_EQ(0UL,
// std::static_pointer_cast<ops::NetOp>(no_input_gop)->ops_.size());
//}
//
// TEST(Backward, net_fc_backward_normal) {
// std::shared_ptr<f::OperatorBase> fwd = f::OpRegistry::CreateOp(
// "fc", {"X", "w", "b"}, {"mul_result", "add_result", "out"}, {});
// ASSERT_NE(fwd, nullptr);
// std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
// ASSERT_TRUE(gop->IsNetOp());
// auto net = static_cast<ops::NetOp *>(gop.get());
//
// ASSERT_NO_THROW(net->DebugString());
//
// ASSERT_EQ(3UL, net->ops_.size());
//
// f::OperatorBase &d_sigmoid = *net->ops_[0];
// ASSERT_EQ("sigmoid_grad", d_sigmoid.type_);
//
// f::OperatorBase &d_add = *net->ops_[1];
// ASSERT_EQ("rowwise_add_grad", d_add.type_);
//
// f::OperatorBase &d_mul = *net->ops_[2];
// ASSERT_EQ("mul_grad", d_mul.type_);
//}
//
// TEST(Backward, net_fc_backward_not_have_b) {
// std::shared_ptr<f::OperatorBase> fwd =
// f::OpRegistry::CreateOp("fc", {"X", "w", f::kEmptyVarName},
// {"mul_result", "add_result", "tmp"}, {});
// ASSERT_NE(fwd, nullptr);
// std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
// ASSERT_TRUE(gop->IsNetOp());
// auto net = static_cast<ops::NetOp *>(gop.get());
//
// ASSERT_NO_THROW(net->DebugString());
//
// ASSERT_EQ(2UL, net->ops_.size());
//
// f::OperatorBase &d_sigmoid = *net->ops_[0];
// ASSERT_EQ("sigmoid_grad", d_sigmoid.type_);
//
// f::OperatorBase &d_mul = *net->ops_[1];
// ASSERT_EQ("mul_grad", d_mul.type_);
//}
//
// TEST(Backward, net_input_of_network_not_need_grad) {
// ops::NetOp net;
// net.AddOp(f::OpRegistry::CreateOp("fc", {"X", "W1", "b1"},
// {"mul_tmp_0", "add_tmp_0", "hidden0"},
// {}));
// net.AddOp(f::OpRegistry::CreateOp("fc", {"hidden0", "W2", "b2"},
// {"mul_tmp_1", "add_tmp_1", "hidden1"},
// {}));
// net.CompleteAddOp();
// auto bwd = Backward(net, {"X"}); // X@GRAD is not need.
// ASSERT_TRUE(bwd->IsNetOp());
// auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
//
// std::unordered_set<std::string> all_output =
// std::unordered_set<std::string>(
// bwd_net->outputs_.begin(), bwd_net->outputs_.end());
// all_output.erase(f::kEmptyVarName);
//
// for (auto &out : {"W1", "b1", "hidden0", "W2", "b2"}) {
// ASSERT_NE(all_output.find(f::GradVarName(out)), all_output.end());
// }
//
// // Not Generated X
// ASSERT_EQ(all_output.find(f::GradVarName("X")), all_output.end());
//
// ASSERT_EQ(2UL, bwd_net->ops_.size());
// ASSERT_TRUE(bwd_net->ops_[1]->IsNetOp());
// auto first_fc_grad = static_cast<ops::NetOp *>(bwd_net->ops_[1].get());
// ASSERT_EQ(3UL, first_fc_grad->ops_.size());
// ASSERT_EQ(f::kEmptyVarName,
// first_fc_grad->ops_[2]->Output(f::GradVarName("A")));
//}
//
// TEST(Backward, net_shared_weight) {
// ops::NetOp net;
// net.AddOp(f::OpRegistry::CreateOp("mul", {"X", "W"}, {"Out"}, {}));
// net.AddOp(f::OpRegistry::CreateOp("mul", {"Out", "W"}, {"FinalOut"}, {}));
// net.CompleteAddOp();
//
// auto bwd = f::Backward(net, {});
// ASSERT_TRUE(bwd->IsNetOp());
// auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
// ASSERT_EQ(3UL, bwd_net->ops_.size());
// ASSERT_EQ("add", bwd_net->ops_[2]->type_);
//}
//
// TEST(Backward, op_register_grad_not_for_network) {
// auto fwd = f::OpRegistry::CreateOp(
// "fc", {"X", "W", "b"}, {"mul_out", "add_out", "out1"},
// {{"temporary_index", std::vector<int>{0, 1}}});
//
// ASSERT_THROW(f::OpRegistry::CreateGradOp(*fwd), EnforceNotMet);
//}
//
// TEST(Backward, op_all_input_are_not_need) {
// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {});
// auto backward = f::Backward(*fwd, {"X", "b"});
// ASSERT_TRUE(backward->IsNetOp());
// auto net = static_cast<ops::NetOp *>(backward.get());
// ASSERT_TRUE(net->ops_.empty());
//}
//
// TEST(Backward, op_all_output_are_not_need) {
// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {});
// auto backward = f::Backward(*fwd, {"Out"});
// ASSERT_TRUE(backward->IsNetOp());
// auto net = static_cast<ops::NetOp *>(backward.get());
// ASSERT_TRUE(net->ops_.empty());
//}
//
// TEST(Backward, op_part_of_output_are_not_need) {
// auto fwd = f::OpRegistry::CreateOp("many_output_op", {"X"}, {"Y", "Z"}, {});
// auto backward = f::Backward(*fwd, {"Z"});
// ASSERT_TRUE(backward->IsNetOp());
// auto net = static_cast<ops::NetOp *>(backward.get());
// ASSERT_EQ(net->ops_.size(), 2UL);
//
// auto &fill_zero = *net->ops_[0];
// ASSERT_EQ("fill_zeros_like", fill_zero.type_);
// ASSERT_EQ(1UL, fill_zero.inputs_.size());
// ASSERT_EQ("Z", fill_zero.inputs_[0]);
// ASSERT_EQ(1UL, fill_zero.outputs_.size());
// ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix, fill_zero.outputs_[0]);
//
// auto &d_many_out = *net->ops_[1];
// ASSERT_EQ("many_output_op_grad", d_many_out.type_);
// ASSERT_EQ(1UL + 2UL + 2UL, d_many_out.inputs_.size()); // I/O/OG
// ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix,
// d_many_out.Input(f::GradVarName("z")));
// ASSERT_EQ(f::GradVarName("Y"), d_many_out.Input(f::GradVarName("y")));
// ASSERT_EQ(f::GradVarName("X"), d_many_out.Output(f::GradVarName("x")));
//}
//
// TEST(Backward, op_part_of_input_are_not_need) {
// auto fwd = f::OpRegistry::CreateOp("mul", {"a", "b"}, {"out"}, {});
// auto backward = f::Backward(*fwd, {"a"});
// auto &grad_mul = *backward;
// ASSERT_EQ(grad_mul.type_, "mul_grad");
// ASSERT_EQ(grad_mul.inputs_.size(), 2UL + 1UL + 1UL);
// ASSERT_EQ(grad_mul.outputs_.size(), 2UL);
// ASSERT_EQ(grad_mul.Output(f::GradVarName("A")), f::kEmptyVarName);
// ASSERT_EQ(grad_mul.Output(f::GradVarName("B")), f::GradVarName("b"));
// ASSERT_EQ(grad_mul.Input(f::GradVarName("Out")), f::GradVarName("out"));
// ASSERT_EQ(grad_mul.Input("A"), "a");
// ASSERT_EQ(grad_mul.Input("B"), "b");
// ASSERT_EQ(grad_mul.Input("Out"), "out");
//}
//
// TEST(Backward, linear_net_intermediate_variable_has_no_grad) {
// ops::NetOp net;
// net.AddOp(f::OpRegistry::CreateOp("fc", {"x1", "w1", "b1"},
// {"mul_out1", "add_out1", "out1"}, {}));
// net.AddOp(f::OpRegistry::CreateOp("fc", {"out1", "w2", "b2"},
// {"mul_out2", "tmp_out2", "out2"}, {}));
// net.AddOp(f::OpRegistry::CreateOp("fc", {"out2", "w3", "b3"},
// {"mul_out3", "tmp_out3", "out3"}, {}));
// net.CompleteAddOp();
// auto backward = f::Backward(net, {"mul_out2", "tmp_out2", "out2"});
// ASSERT_TRUE(backward->IsNetOp());
// auto bwd_net = static_cast<ops::NetOp *>(backward.get());
// ASSERT_EQ(bwd_net->ops_.size(), 3UL);
// auto &grad_fc = *bwd_net->ops_[0];
// EXPECT_EQ(grad_fc.inputs_.size(),
// 3UL /* external input number */
// + 1UL /* external output number*/
// + 1UL /* number of gradient of external output*/
// + 2U /* internal variable number*/);
// EXPECT_EQ(grad_fc.outputs_.size(), 2UL /* input number of mul*/
// + 2UL /* input number of rowwise_add
// */
// + 1UL /* input number of sigmod */);
// EXPECT_EQ(bwd_net->ops_[1]->inputs_.size(), 0UL);
// EXPECT_EQ(bwd_net->ops_[1]->outputs_.size(), 0UL);
// EXPECT_EQ(bwd_net->ops_[2]->inputs_.size(), 0UL);
// EXPECT_EQ(bwd_net->ops_[2]->outputs_.size(), 0UL);
//}
TEST
(
Backward
,
simple_op_grad
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{{
"X"
,
{
"x"
}},
{
"b"
,
{
"b"
}}},
{{
"Out"
,
{
"out"
}}},
{});
ASSERT_NE
(
fwd
,
nullptr
);
auto
gop
=
f
::
OpRegistry
::
CreateGradOp
(
*
fwd
);
ASSERT_EQ
(
1UL
,
gop
->
inputs_
.
size
());
ASSERT_EQ
(
"rowwise_add_grad"
,
gop
->
type_
);
ASSERT_EQ
(
f
::
GradVarName
(
"x"
),
gop
->
Output
(
f
::
GradVarName
(
"X"
)));
ASSERT_EQ
(
f
::
GradVarName
(
"b"
),
gop
->
Output
(
f
::
GradVarName
(
"b"
)));
}
TEST
(
Backward
,
simple_op_not_need_grad
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{{
"X"
,
{
"x"
}},
{
"b"
,
{
"b"
}}},
{{
"Out"
,
{
"out"
}}},
{});
ASSERT_NE
(
fwd
,
nullptr
);
auto
gop
=
f
::
Backward
(
*
fwd
,
{
"x"
});
ASSERT_EQ
(
gop
->
Output
(
f
::
GradVarName
(
"X"
)),
f
::
kEmptyVarName
);
auto
no_input_gop
=
f
::
Backward
(
*
fwd
,
{
"x"
,
"b"
});
ASSERT_NE
(
no_input_gop
,
nullptr
);
ASSERT_TRUE
(
no_input_gop
->
IsNetOp
());
ASSERT_EQ
(
0UL
,
std
::
static_pointer_cast
<
ops
::
NetOp
>
(
no_input_gop
)
->
ops_
.
size
());
}
TEST
(
Backward
,
net_fc_backward_normal
)
{
std
::
shared_ptr
<
f
::
OperatorBase
>
fwd
=
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{{
"X"
,
{
"x"
}},
{
"W"
,
{
"w"
}},
{
"b"
,
{
"b"
}}},
{{
"mul_result"
,
{
"mul_res"
}},
{
"add_result"
,
{
"add_re"
}},
{
"Out"
,
{
"out"
}}},
{});
ASSERT_NE
(
fwd
,
nullptr
);
std
::
shared_ptr
<
f
::
OperatorBase
>
gop
=
f
::
Backward
(
*
fwd
,
{});
ASSERT_TRUE
(
gop
->
IsNetOp
());
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
gop
.
get
());
ASSERT_NO_THROW
(
net
->
DebugString
());
ASSERT_EQ
(
3UL
,
net
->
ops_
.
size
());
f
::
OperatorBase
&
d_sigmoid
=
*
net
->
ops_
[
0
];
ASSERT_EQ
(
"sigmoid_grad"
,
d_sigmoid
.
type_
);
f
::
OperatorBase
&
d_add
=
*
net
->
ops_
[
1
];
ASSERT_EQ
(
"rowwise_add_grad"
,
d_add
.
type_
);
f
::
OperatorBase
&
d_mul
=
*
net
->
ops_
[
2
];
ASSERT_EQ
(
"mul_grad"
,
d_mul
.
type_
);
}
TEST
(
Backward
,
net_fc_backward_not_have_b
)
{
std
::
shared_ptr
<
f
::
OperatorBase
>
fwd
=
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{{
"X"
,
{
"x"
}},
{
"W"
,
{
"w"
}},
{
"b"
,
{}}},
{{
"mul_result"
,
{
"mul_res"
}},
{
"add_result"
,
{
"add_res"
}},
{
"Out"
,
{
"tmp"
}}},
{});
ASSERT_NE
(
fwd
,
nullptr
);
std
::
shared_ptr
<
f
::
OperatorBase
>
gop
=
f
::
Backward
(
*
fwd
,
{});
ASSERT_TRUE
(
gop
->
IsNetOp
());
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
gop
.
get
());
ASSERT_NO_THROW
(
net
->
DebugString
());
ASSERT_EQ
(
2UL
,
net
->
ops_
.
size
());
f
::
OperatorBase
&
d_sigmoid
=
*
net
->
ops_
[
0
];
ASSERT_EQ
(
"sigmoid_grad"
,
d_sigmoid
.
type_
);
f
::
OperatorBase
&
d_mul
=
*
net
->
ops_
[
1
];
ASSERT_EQ
(
"mul_grad"
,
d_mul
.
type_
);
}
TEST
(
Backward
,
net_input_of_network_not_need_grad
)
{
ops
::
NetOp
net
;
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{{
"X"
,
{
"x"
}},
{
"W"
,
{
"W1"
}},
{
"b"
,
{
"b1"
}}},
{{
"mul_result"
,
{
"mul_tmp_0"
}},
{
"add_result"
,
{
"add_tmp_0"
}},
{
"Out"
,
{
"hidden0"
}}},
{}));
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{{
"X"
,
{
"hidden0"
}},
{
"W"
,
{
"W2"
}},
{
"b"
,
{
"b2"
}}},
{{
"mul_result"
,
{
"mul_tmp_1"
}},
{
"add_result"
,
{
"add_tmp_1"
}},
{
"Out"
,
{
"hidden1"
}}},
{}));
net
.
CompleteAddOp
();
auto
bwd
=
Backward
(
net
,
{
"x"
});
// x@GRAD is not need.
ASSERT_TRUE
(
bwd
->
IsNetOp
());
auto
bwd_net
=
static_cast
<
ops
::
NetOp
*>
(
bwd
.
get
());
auto
output_vars
=
bwd_net
->
OutputVars
(
true
);
std
::
unordered_set
<
std
::
string
>
all_outputs
=
std
::
unordered_set
<
std
::
string
>
(
output_vars
.
begin
(),
output_vars
.
end
());
all_outputs
.
erase
(
f
::
kEmptyVarName
);
for
(
auto
&
out
:
{
"W1"
,
"b1"
,
"hidden0"
,
"W2"
,
"b2"
})
{
ASSERT_NE
(
all_outputs
.
find
(
f
::
GradVarName
(
out
)),
all_outputs
.
end
());
}
// Not Generated X
ASSERT_EQ
(
all_outputs
.
find
(
f
::
GradVarName
(
"X"
)),
all_outputs
.
end
());
ASSERT_EQ
(
2UL
,
bwd_net
->
ops_
.
size
());
ASSERT_TRUE
(
bwd_net
->
ops_
[
1
]
->
IsNetOp
());
auto
first_fc_grad
=
static_cast
<
ops
::
NetOp
*>
(
bwd_net
->
ops_
[
1
].
get
());
ASSERT_EQ
(
3UL
,
first_fc_grad
->
ops_
.
size
());
ASSERT_EQ
(
f
::
kEmptyVarName
,
first_fc_grad
->
ops_
[
2
]
->
Output
(
f
::
GradVarName
(
"X"
)));
}
TEST
(
Backward
,
net_shared_weight
)
{
ops
::
NetOp
net
;
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"mul"
,
{{
"X"
,
{
"x"
}},
{
"Y"
,
{
"w"
}}},
{{
"Out"
,
{
"out"
}}},
{}));
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"mul"
,
{{
"X"
,
{
"out"
}},
{
"Y"
,
{
"w"
}}},
{{
"Out"
,
{
"FinalOut"
}}},
{}));
net
.
CompleteAddOp
();
auto
bwd
=
f
::
Backward
(
net
,
{});
ASSERT_TRUE
(
bwd
->
IsNetOp
());
auto
bwd_net
=
static_cast
<
ops
::
NetOp
*>
(
bwd
.
get
());
ASSERT_EQ
(
3UL
,
bwd_net
->
ops_
.
size
());
ASSERT_EQ
(
"add"
,
bwd_net
->
ops_
[
2
]
->
type_
);
}
TEST
(
Backward
,
op_register_grad_not_for_network
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{{
"X"
,
{
"x"
}},
{
"W"
,
{
"w"
}},
{
"b"
,
{
"b"
}}},
{{
"mul_result"
,
{
"mul_out"
}},
{
"add_result"
,
{
"add_out"
}},
{
"Out"
,
{
"out1"
}}},
{{
"temporary_index"
,
std
::
vector
<
int
>
{
0
,
1
}}});
ASSERT_THROW
(
f
::
OpRegistry
::
CreateGradOp
(
*
fwd
),
EnforceNotMet
);
}
TEST
(
Backward
,
op_all_input_are_not_need
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{{
"X"
,
{
"x"
}},
{
"b"
,
{
"b"
}}},
{{
"Out"
,
{
"out"
}}},
{});
auto
backward
=
f
::
Backward
(
*
fwd
,
{
"x"
,
"b"
});
ASSERT_TRUE
(
backward
->
IsNetOp
());
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
backward
.
get
());
ASSERT_TRUE
(
net
->
ops_
.
empty
());
}
TEST
(
Backward
,
op_all_output_are_not_need
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"rowwise_add"
,
{{
"X"
,
{
"x"
}},
{
"b"
,
{
"b"
}}},
{{
"Out"
,
{
"out"
}}},
{});
auto
backward
=
f
::
Backward
(
*
fwd
,
{
"out"
});
ASSERT_TRUE
(
backward
->
IsNetOp
());
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
backward
.
get
());
ASSERT_TRUE
(
net
->
ops_
.
empty
());
}
TEST
(
Backward
,
op_part_of_output_are_not_need
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"many_output_op"
,
{{
"x"
,
{
"X"
}}},
{{
"y"
,
{
"Y"
}},
{
"z"
,
{
"Z"
}}},
{});
auto
backward
=
f
::
Backward
(
*
fwd
,
{
"Z"
});
ASSERT_TRUE
(
backward
->
IsNetOp
());
auto
net
=
static_cast
<
ops
::
NetOp
*>
(
backward
.
get
());
ASSERT_EQ
(
net
->
ops_
.
size
(),
2UL
);
auto
&
fill_zero
=
*
net
->
ops_
[
0
];
ASSERT_EQ
(
"fill_zeros_like"
,
fill_zero
.
type_
);
ASSERT_EQ
(
1UL
,
fill_zero
.
Inputs
(
"Src"
).
size
());
ASSERT_EQ
(
"Z"
,
fill_zero
.
Input
(
"Src"
));
ASSERT_EQ
(
1UL
,
fill_zero
.
Outputs
(
"Dst"
).
size
());
ASSERT_EQ
(
std
::
string
(
"Z"
)
+
f
::
kZeroVarSuffix
,
fill_zero
.
Output
(
"Dst"
));
auto
&
d_many_out
=
*
net
->
ops_
[
1
];
ASSERT_EQ
(
"many_output_op_grad"
,
d_many_out
.
type_
);
ASSERT_EQ
(
1UL
+
2UL
+
2UL
,
d_many_out
.
inputs_
.
size
());
// I/O/OG
ASSERT_EQ
(
std
::
string
(
"Z"
)
+
f
::
kZeroVarSuffix
,
d_many_out
.
Input
(
f
::
GradVarName
(
"z"
)));
ASSERT_EQ
(
f
::
GradVarName
(
"Y"
),
d_many_out
.
Input
(
f
::
GradVarName
(
"y"
)));
ASSERT_EQ
(
f
::
GradVarName
(
"X"
),
d_many_out
.
Output
(
f
::
GradVarName
(
"x"
)));
}
TEST
(
Backward
,
op_part_of_input_are_not_need
)
{
auto
fwd
=
f
::
OpRegistry
::
CreateOp
(
"mul"
,
{{
"X"
,
{
"a"
}},
{
"Y"
,
{
"b"
}}},
{{
"Out"
,
{
"out"
}}},
{});
auto
backward
=
f
::
Backward
(
*
fwd
,
{
"a"
});
auto
&
grad_mul
=
*
backward
;
ASSERT_EQ
(
grad_mul
.
type_
,
"mul_grad"
);
ASSERT_EQ
(
grad_mul
.
inputs_
.
size
(),
2UL
+
1UL
+
1UL
);
ASSERT_EQ
(
grad_mul
.
outputs_
.
size
(),
2UL
);
ASSERT_EQ
(
grad_mul
.
Output
(
f
::
GradVarName
(
"X"
)),
f
::
kEmptyVarName
);
ASSERT_EQ
(
grad_mul
.
Output
(
f
::
GradVarName
(
"Y"
)),
f
::
GradVarName
(
"b"
));
ASSERT_EQ
(
grad_mul
.
Input
(
f
::
GradVarName
(
"Out"
)),
f
::
GradVarName
(
"out"
));
ASSERT_EQ
(
grad_mul
.
Input
(
"X"
),
"a"
);
ASSERT_EQ
(
grad_mul
.
Input
(
"Y"
),
"b"
);
ASSERT_EQ
(
grad_mul
.
Input
(
"Out"
),
"out"
);
}
TEST
(
Backward
,
linear_net_intermediate_variable_has_no_grad
)
{
ops
::
NetOp
net
;
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{{
"X"
,
{
"x1"
}},
{
"W"
,
{
"w1"
}},
{
"b"
,
{
"b1"
}}},
{{
"mul_result"
,
{
"mul_out1"
}},
{
"add_result"
,
{
"add_out1"
}},
{
"Out"
,
{
"out1"
}}},
{}));
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{{
"X"
,
{
"out1"
}},
{
"W"
,
{
"w2"
}},
{
"b"
,
{
"b2"
}}},
{{
"mul_result"
,
{
"mul_out2"
}},
{
"add_result"
,
{
"tmp_out2"
}},
{
"Out"
,
{
"out2"
}}},
{}));
net
.
AddOp
(
f
::
OpRegistry
::
CreateOp
(
"fc"
,
{{
"X"
,
{
"out2"
}},
{
"W"
,
{
"w3"
}},
{
"b"
,
{
"b3"
}}},
{{
"mul_result"
,
{
"mul_out3"
}},
{
"add_result"
,
{
"tmp_out3"
}},
{
"Out"
,
{
"out3"
}}},
{}));
net
.
CompleteAddOp
();
auto
backward
=
f
::
Backward
(
net
,
{
"mul_out2"
,
"tmp_out2"
,
"out2"
});
ASSERT_TRUE
(
backward
->
IsNetOp
());
auto
bwd_net
=
static_cast
<
ops
::
NetOp
*>
(
backward
.
get
());
ASSERT_EQ
(
bwd_net
->
ops_
.
size
(),
3UL
);
auto
&
grad_fc
=
*
bwd_net
->
ops_
[
0
];
EXPECT_EQ
(
grad_fc
.
inputs_
[
"all"
].
size
(),
2UL
/* external input number */
+
1UL
/* external output number*/
+
1UL
/* number of gradient of external output*/
+
2U
/* internal variable number*/
);
EXPECT_EQ
(
grad_fc
.
outputs_
[
"all"
].
size
(),
2UL
/* input number of mul*/
+
2UL
/* input number of rowwise_add
*/
+
1UL
/* input number of sigmod */
);
EXPECT_EQ
(
bwd_net
->
ops_
[
1
]
->
inputs_
[
"all"
].
size
(),
0UL
);
EXPECT_EQ
(
bwd_net
->
ops_
[
1
]
->
outputs_
[
"all"
].
size
(),
0UL
);
EXPECT_EQ
(
bwd_net
->
ops_
[
2
]
->
inputs_
[
"all"
].
size
(),
0UL
);
EXPECT_EQ
(
bwd_net
->
ops_
[
2
]
->
outputs_
[
"all"
].
size
(),
0UL
);
}
paddle/framework/operator.cc
浏览文件 @
dfb4ea76
...
...
@@ -43,7 +43,7 @@ std::unordered_map<std::string, OpProto>& OpProtos() {
const
std
::
string
&
OperatorBase
::
Input
(
const
std
::
string
&
name
)
const
{
auto
it
=
inputs_
.
find
(
name
);
PADDLE_ENFORCE
(
it
!=
inputs_
.
end
(),
"Op %s does not have
out
put %s"
,
type_
,
PADDLE_ENFORCE
(
it
!=
inputs_
.
end
(),
"Op %s does not have
in
put %s"
,
type_
,
name
);
PADDLE_ENFORCE_EQ
(
it
->
second
.
size
(),
1UL
,
"Op %s input %s should contain only one variable"
,
type_
,
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录