From dfb4ea764b57e3b644b308a1691ef1e3da55723c Mon Sep 17 00:00:00 2001 From: qingqing01 Date: Fri, 11 Aug 2017 23:51:57 +0800 Subject: [PATCH] make unit test of backward_test pass. --- paddle/framework/backward.cc | 12 +- paddle/framework/backward_test.cc | 451 ++++++++++++++++-------------- paddle/framework/operator.cc | 2 +- 3 files changed, 249 insertions(+), 216 deletions(-) diff --git a/paddle/framework/backward.cc b/paddle/framework/backward.cc index 3e16949c9b0..36cc616358e 100644 --- a/paddle/framework/backward.cc +++ b/paddle/framework/backward.cc @@ -25,7 +25,7 @@ template static void ForEachVarName(Map& names, T callback) { for (auto& name : names) { for (auto& n : name.second) { - if (callback(n)) break; + if (callback(n)) return; } } } @@ -33,12 +33,12 @@ static void ForEachVarName(Map& names, T callback) { static bool AllInSet( const std::unordered_map>& names, const std::string& suffix, const std::unordered_set& set) { - bool ret_val = true; - ForEachVarName(names, [&ret_val, &set, &suffix](const std::string& n) { - ret_val = set.find(n + suffix) == set.end(); - return !ret_val; + bool all_in_set = true; + ForEachVarName(names, [&all_in_set, &set, &suffix](const std::string& n) { + all_in_set = set.find(n + suffix) != set.end(); + return !all_in_set; }); - return ret_val; + return all_in_set; } static std::shared_ptr NOP() { diff --git a/paddle/framework/backward_test.cc b/paddle/framework/backward_test.cc index 9a38d54acf8..c6e91e243ec 100644 --- a/paddle/framework/backward_test.cc +++ b/paddle/framework/backward_test.cc @@ -82,11 +82,11 @@ class FcOp : public operators::NetOp { AddOp(OpRegistry::CreateOp("mul", {{"X", {Input("X")}}, {"Y", {Input("W")}}}, {{"Out", {Output("mul_result")}}}, {})); - auto b_name = Input("b"); + auto input_b = Inputs("b"); std::string before_act = "mul_result"; - if (b_name != kEmptyVarName) { + if (input_b.size() != 0) { AddOp(OpRegistry::CreateOp( - "rowwise_add", {{"X", {Output("mul_result")}}, {"b", {b_name}}}, + "rowwise_add", {{"X", {Output("mul_result")}}, {"b", {input_b[0]}}}, {{"Out", {Output("add_result")}}}, {})); before_act = "add_result"; } else { @@ -166,209 +166,242 @@ REGISTER_OP(fc, f::FcOp, f::FcOpMaker); REGISTER_OP(many_output_op, f::EmptyOp, f::ManyOutputOpMaker); REGISTER_GRADIENT_OP(many_output_op, many_output_op_grad, f::EmptyOp); -// TEST(Backward, simple_op_grad) { -// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {}); -// ASSERT_NE(fwd, nullptr); -// auto gop = f::OpRegistry::CreateGradOp(*fwd); -// ASSERT_EQ(4UL, gop->inputs_.size()); -// ASSERT_EQ(f::kEmptyVarName, gop->inputs_[0]); -// ASSERT_EQ("rowwise_add_grad", gop->type_); -// ASSERT_EQ(f::GradVarName("X"), gop->outputs_[0]); -// ASSERT_EQ(f::GradVarName("b"), gop->outputs_[1]); -// -// ASSERT_EQ(f::GradVarName("X"), gop->Output(f::GradVarName("X"))); -//} -// -// TEST(Backward, simple_op_not_need_grad) { -// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {}); -// ASSERT_NE(fwd, nullptr); -// auto gop = f::Backward(*fwd, {"X"}); -// ASSERT_EQ(std::find(gop->outputs_.begin(), gop->outputs_.end(), -// f::GradVarName("X")), -// gop->outputs_.end()); -// -// auto no_input_gop = f::Backward(*fwd, {"X", "b"}); -// ASSERT_NE(no_input_gop, nullptr); -// ASSERT_TRUE(no_input_gop->IsNetOp()); -// ASSERT_EQ(0UL, -// std::static_pointer_cast(no_input_gop)->ops_.size()); -//} -// -// TEST(Backward, net_fc_backward_normal) { -// std::shared_ptr fwd = f::OpRegistry::CreateOp( -// "fc", {"X", "w", "b"}, {"mul_result", "add_result", "out"}, {}); -// ASSERT_NE(fwd, nullptr); -// std::shared_ptr gop = f::Backward(*fwd, {}); -// ASSERT_TRUE(gop->IsNetOp()); -// auto net = static_cast(gop.get()); -// -// ASSERT_NO_THROW(net->DebugString()); -// -// ASSERT_EQ(3UL, net->ops_.size()); -// -// f::OperatorBase &d_sigmoid = *net->ops_[0]; -// ASSERT_EQ("sigmoid_grad", d_sigmoid.type_); -// -// f::OperatorBase &d_add = *net->ops_[1]; -// ASSERT_EQ("rowwise_add_grad", d_add.type_); -// -// f::OperatorBase &d_mul = *net->ops_[2]; -// ASSERT_EQ("mul_grad", d_mul.type_); -//} -// -// TEST(Backward, net_fc_backward_not_have_b) { -// std::shared_ptr fwd = -// f::OpRegistry::CreateOp("fc", {"X", "w", f::kEmptyVarName}, -// {"mul_result", "add_result", "tmp"}, {}); -// ASSERT_NE(fwd, nullptr); -// std::shared_ptr gop = f::Backward(*fwd, {}); -// ASSERT_TRUE(gop->IsNetOp()); -// auto net = static_cast(gop.get()); -// -// ASSERT_NO_THROW(net->DebugString()); -// -// ASSERT_EQ(2UL, net->ops_.size()); -// -// f::OperatorBase &d_sigmoid = *net->ops_[0]; -// ASSERT_EQ("sigmoid_grad", d_sigmoid.type_); -// -// f::OperatorBase &d_mul = *net->ops_[1]; -// ASSERT_EQ("mul_grad", d_mul.type_); -//} -// -// TEST(Backward, net_input_of_network_not_need_grad) { -// ops::NetOp net; -// net.AddOp(f::OpRegistry::CreateOp("fc", {"X", "W1", "b1"}, -// {"mul_tmp_0", "add_tmp_0", "hidden0"}, -// {})); -// net.AddOp(f::OpRegistry::CreateOp("fc", {"hidden0", "W2", "b2"}, -// {"mul_tmp_1", "add_tmp_1", "hidden1"}, -// {})); -// net.CompleteAddOp(); -// auto bwd = Backward(net, {"X"}); // X@GRAD is not need. -// ASSERT_TRUE(bwd->IsNetOp()); -// auto bwd_net = static_cast(bwd.get()); -// -// std::unordered_set all_output = -// std::unordered_set( -// bwd_net->outputs_.begin(), bwd_net->outputs_.end()); -// all_output.erase(f::kEmptyVarName); -// -// for (auto &out : {"W1", "b1", "hidden0", "W2", "b2"}) { -// ASSERT_NE(all_output.find(f::GradVarName(out)), all_output.end()); -// } -// -// // Not Generated X -// ASSERT_EQ(all_output.find(f::GradVarName("X")), all_output.end()); -// -// ASSERT_EQ(2UL, bwd_net->ops_.size()); -// ASSERT_TRUE(bwd_net->ops_[1]->IsNetOp()); -// auto first_fc_grad = static_cast(bwd_net->ops_[1].get()); -// ASSERT_EQ(3UL, first_fc_grad->ops_.size()); -// ASSERT_EQ(f::kEmptyVarName, -// first_fc_grad->ops_[2]->Output(f::GradVarName("A"))); -//} -// -// TEST(Backward, net_shared_weight) { -// ops::NetOp net; -// net.AddOp(f::OpRegistry::CreateOp("mul", {"X", "W"}, {"Out"}, {})); -// net.AddOp(f::OpRegistry::CreateOp("mul", {"Out", "W"}, {"FinalOut"}, {})); -// net.CompleteAddOp(); -// -// auto bwd = f::Backward(net, {}); -// ASSERT_TRUE(bwd->IsNetOp()); -// auto bwd_net = static_cast(bwd.get()); -// ASSERT_EQ(3UL, bwd_net->ops_.size()); -// ASSERT_EQ("add", bwd_net->ops_[2]->type_); -//} -// -// TEST(Backward, op_register_grad_not_for_network) { -// auto fwd = f::OpRegistry::CreateOp( -// "fc", {"X", "W", "b"}, {"mul_out", "add_out", "out1"}, -// {{"temporary_index", std::vector{0, 1}}}); -// -// ASSERT_THROW(f::OpRegistry::CreateGradOp(*fwd), EnforceNotMet); -//} -// -// TEST(Backward, op_all_input_are_not_need) { -// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {}); -// auto backward = f::Backward(*fwd, {"X", "b"}); -// ASSERT_TRUE(backward->IsNetOp()); -// auto net = static_cast(backward.get()); -// ASSERT_TRUE(net->ops_.empty()); -//} -// -// TEST(Backward, op_all_output_are_not_need) { -// auto fwd = f::OpRegistry::CreateOp("rowwise_add", {"X", "b"}, {"Out"}, {}); -// auto backward = f::Backward(*fwd, {"Out"}); -// ASSERT_TRUE(backward->IsNetOp()); -// auto net = static_cast(backward.get()); -// ASSERT_TRUE(net->ops_.empty()); -//} -// -// TEST(Backward, op_part_of_output_are_not_need) { -// auto fwd = f::OpRegistry::CreateOp("many_output_op", {"X"}, {"Y", "Z"}, {}); -// auto backward = f::Backward(*fwd, {"Z"}); -// ASSERT_TRUE(backward->IsNetOp()); -// auto net = static_cast(backward.get()); -// ASSERT_EQ(net->ops_.size(), 2UL); -// -// auto &fill_zero = *net->ops_[0]; -// ASSERT_EQ("fill_zeros_like", fill_zero.type_); -// ASSERT_EQ(1UL, fill_zero.inputs_.size()); -// ASSERT_EQ("Z", fill_zero.inputs_[0]); -// ASSERT_EQ(1UL, fill_zero.outputs_.size()); -// ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix, fill_zero.outputs_[0]); -// -// auto &d_many_out = *net->ops_[1]; -// ASSERT_EQ("many_output_op_grad", d_many_out.type_); -// ASSERT_EQ(1UL + 2UL + 2UL, d_many_out.inputs_.size()); // I/O/OG -// ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix, -// d_many_out.Input(f::GradVarName("z"))); -// ASSERT_EQ(f::GradVarName("Y"), d_many_out.Input(f::GradVarName("y"))); -// ASSERT_EQ(f::GradVarName("X"), d_many_out.Output(f::GradVarName("x"))); -//} -// -// TEST(Backward, op_part_of_input_are_not_need) { -// auto fwd = f::OpRegistry::CreateOp("mul", {"a", "b"}, {"out"}, {}); -// auto backward = f::Backward(*fwd, {"a"}); -// auto &grad_mul = *backward; -// ASSERT_EQ(grad_mul.type_, "mul_grad"); -// ASSERT_EQ(grad_mul.inputs_.size(), 2UL + 1UL + 1UL); -// ASSERT_EQ(grad_mul.outputs_.size(), 2UL); -// ASSERT_EQ(grad_mul.Output(f::GradVarName("A")), f::kEmptyVarName); -// ASSERT_EQ(grad_mul.Output(f::GradVarName("B")), f::GradVarName("b")); -// ASSERT_EQ(grad_mul.Input(f::GradVarName("Out")), f::GradVarName("out")); -// ASSERT_EQ(grad_mul.Input("A"), "a"); -// ASSERT_EQ(grad_mul.Input("B"), "b"); -// ASSERT_EQ(grad_mul.Input("Out"), "out"); -//} -// -// TEST(Backward, linear_net_intermediate_variable_has_no_grad) { -// ops::NetOp net; -// net.AddOp(f::OpRegistry::CreateOp("fc", {"x1", "w1", "b1"}, -// {"mul_out1", "add_out1", "out1"}, {})); -// net.AddOp(f::OpRegistry::CreateOp("fc", {"out1", "w2", "b2"}, -// {"mul_out2", "tmp_out2", "out2"}, {})); -// net.AddOp(f::OpRegistry::CreateOp("fc", {"out2", "w3", "b3"}, -// {"mul_out3", "tmp_out3", "out3"}, {})); -// net.CompleteAddOp(); -// auto backward = f::Backward(net, {"mul_out2", "tmp_out2", "out2"}); -// ASSERT_TRUE(backward->IsNetOp()); -// auto bwd_net = static_cast(backward.get()); -// ASSERT_EQ(bwd_net->ops_.size(), 3UL); -// auto &grad_fc = *bwd_net->ops_[0]; -// EXPECT_EQ(grad_fc.inputs_.size(), -// 3UL /* external input number */ -// + 1UL /* external output number*/ -// + 1UL /* number of gradient of external output*/ -// + 2U /* internal variable number*/); -// EXPECT_EQ(grad_fc.outputs_.size(), 2UL /* input number of mul*/ -// + 2UL /* input number of rowwise_add -// */ -// + 1UL /* input number of sigmod */); -// EXPECT_EQ(bwd_net->ops_[1]->inputs_.size(), 0UL); -// EXPECT_EQ(bwd_net->ops_[1]->outputs_.size(), 0UL); -// EXPECT_EQ(bwd_net->ops_[2]->inputs_.size(), 0UL); -// EXPECT_EQ(bwd_net->ops_[2]->outputs_.size(), 0UL); -//} +TEST(Backward, simple_op_grad) { + auto fwd = f::OpRegistry::CreateOp( + "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {}); + ASSERT_NE(fwd, nullptr); + auto gop = f::OpRegistry::CreateGradOp(*fwd); + ASSERT_EQ(1UL, gop->inputs_.size()); + ASSERT_EQ("rowwise_add_grad", gop->type_); + ASSERT_EQ(f::GradVarName("x"), gop->Output(f::GradVarName("X"))); + ASSERT_EQ(f::GradVarName("b"), gop->Output(f::GradVarName("b"))); +} + +TEST(Backward, simple_op_not_need_grad) { + auto fwd = f::OpRegistry::CreateOp( + "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {}); + ASSERT_NE(fwd, nullptr); + auto gop = f::Backward(*fwd, {"x"}); + ASSERT_EQ(gop->Output(f::GradVarName("X")), f::kEmptyVarName); + + auto no_input_gop = f::Backward(*fwd, {"x", "b"}); + ASSERT_NE(no_input_gop, nullptr); + ASSERT_TRUE(no_input_gop->IsNetOp()); + ASSERT_EQ(0UL, + std::static_pointer_cast(no_input_gop)->ops_.size()); +} + +TEST(Backward, net_fc_backward_normal) { + std::shared_ptr fwd = + f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {"b"}}}, + {{"mul_result", {"mul_res"}}, + {"add_result", {"add_re"}}, + {"Out", {"out"}}}, + {}); + ASSERT_NE(fwd, nullptr); + std::shared_ptr gop = f::Backward(*fwd, {}); + ASSERT_TRUE(gop->IsNetOp()); + auto net = static_cast(gop.get()); + + ASSERT_NO_THROW(net->DebugString()); + + ASSERT_EQ(3UL, net->ops_.size()); + + f::OperatorBase &d_sigmoid = *net->ops_[0]; + ASSERT_EQ("sigmoid_grad", d_sigmoid.type_); + + f::OperatorBase &d_add = *net->ops_[1]; + ASSERT_EQ("rowwise_add_grad", d_add.type_); + + f::OperatorBase &d_mul = *net->ops_[2]; + ASSERT_EQ("mul_grad", d_mul.type_); +} + +TEST(Backward, net_fc_backward_not_have_b) { + std::shared_ptr fwd = + f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {}}}, + {{"mul_result", {"mul_res"}}, + {"add_result", {"add_res"}}, + {"Out", {"tmp"}}}, + {}); + ASSERT_NE(fwd, nullptr); + std::shared_ptr gop = f::Backward(*fwd, {}); + ASSERT_TRUE(gop->IsNetOp()); + auto net = static_cast(gop.get()); + + ASSERT_NO_THROW(net->DebugString()); + + ASSERT_EQ(2UL, net->ops_.size()); + + f::OperatorBase &d_sigmoid = *net->ops_[0]; + ASSERT_EQ("sigmoid_grad", d_sigmoid.type_); + + f::OperatorBase &d_mul = *net->ops_[1]; + ASSERT_EQ("mul_grad", d_mul.type_); +} + +TEST(Backward, net_input_of_network_not_need_grad) { + ops::NetOp net; + net.AddOp(f::OpRegistry::CreateOp( + "fc", {{"X", {"x"}}, {"W", {"W1"}}, {"b", {"b1"}}}, + {{"mul_result", {"mul_tmp_0"}}, + {"add_result", {"add_tmp_0"}}, + {"Out", {"hidden0"}}}, + {})); + net.AddOp(f::OpRegistry::CreateOp( + "fc", {{"X", {"hidden0"}}, {"W", {"W2"}}, {"b", {"b2"}}}, + {{"mul_result", {"mul_tmp_1"}}, + {"add_result", {"add_tmp_1"}}, + {"Out", {"hidden1"}}}, + {})); + net.CompleteAddOp(); + auto bwd = Backward(net, {"x"}); // x@GRAD is not need. + ASSERT_TRUE(bwd->IsNetOp()); + auto bwd_net = static_cast(bwd.get()); + + auto output_vars = bwd_net->OutputVars(true); + std::unordered_set all_outputs = + std::unordered_set(output_vars.begin(), output_vars.end()); + all_outputs.erase(f::kEmptyVarName); + + for (auto &out : {"W1", "b1", "hidden0", "W2", "b2"}) { + ASSERT_NE(all_outputs.find(f::GradVarName(out)), all_outputs.end()); + } + + // Not Generated X + ASSERT_EQ(all_outputs.find(f::GradVarName("X")), all_outputs.end()); + + ASSERT_EQ(2UL, bwd_net->ops_.size()); + ASSERT_TRUE(bwd_net->ops_[1]->IsNetOp()); + auto first_fc_grad = static_cast(bwd_net->ops_[1].get()); + ASSERT_EQ(3UL, first_fc_grad->ops_.size()); + ASSERT_EQ(f::kEmptyVarName, + first_fc_grad->ops_[2]->Output(f::GradVarName("X"))); +} + +TEST(Backward, net_shared_weight) { + ops::NetOp net; + net.AddOp(f::OpRegistry::CreateOp("mul", {{"X", {"x"}}, {"Y", {"w"}}}, + {{"Out", {"out"}}}, {})); + net.AddOp(f::OpRegistry::CreateOp("mul", {{"X", {"out"}}, {"Y", {"w"}}}, + {{"Out", {"FinalOut"}}}, {})); + net.CompleteAddOp(); + + auto bwd = f::Backward(net, {}); + ASSERT_TRUE(bwd->IsNetOp()); + auto bwd_net = static_cast(bwd.get()); + ASSERT_EQ(3UL, bwd_net->ops_.size()); + ASSERT_EQ("add", bwd_net->ops_[2]->type_); +} + +TEST(Backward, op_register_grad_not_for_network) { + auto fwd = + f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {"b"}}}, + {{"mul_result", {"mul_out"}}, + {"add_result", {"add_out"}}, + {"Out", {"out1"}}}, + {{"temporary_index", std::vector{0, 1}}}); + + ASSERT_THROW(f::OpRegistry::CreateGradOp(*fwd), EnforceNotMet); +} + +TEST(Backward, op_all_input_are_not_need) { + auto fwd = f::OpRegistry::CreateOp( + "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {}); + auto backward = f::Backward(*fwd, {"x", "b"}); + ASSERT_TRUE(backward->IsNetOp()); + auto net = static_cast(backward.get()); + ASSERT_TRUE(net->ops_.empty()); +} + +TEST(Backward, op_all_output_are_not_need) { + auto fwd = f::OpRegistry::CreateOp( + "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {}); + auto backward = f::Backward(*fwd, {"out"}); + ASSERT_TRUE(backward->IsNetOp()); + auto net = static_cast(backward.get()); + ASSERT_TRUE(net->ops_.empty()); +} + +TEST(Backward, op_part_of_output_are_not_need) { + auto fwd = f::OpRegistry::CreateOp("many_output_op", {{"x", {"X"}}}, + {{"y", {"Y"}}, {"z", {"Z"}}}, {}); + auto backward = f::Backward(*fwd, {"Z"}); + ASSERT_TRUE(backward->IsNetOp()); + auto net = static_cast(backward.get()); + ASSERT_EQ(net->ops_.size(), 2UL); + + auto &fill_zero = *net->ops_[0]; + ASSERT_EQ("fill_zeros_like", fill_zero.type_); + ASSERT_EQ(1UL, fill_zero.Inputs("Src").size()); + ASSERT_EQ("Z", fill_zero.Input("Src")); + ASSERT_EQ(1UL, fill_zero.Outputs("Dst").size()); + ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix, fill_zero.Output("Dst")); + + auto &d_many_out = *net->ops_[1]; + ASSERT_EQ("many_output_op_grad", d_many_out.type_); + ASSERT_EQ(1UL + 2UL + 2UL, d_many_out.inputs_.size()); // I/O/OG + ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix, + d_many_out.Input(f::GradVarName("z"))); + ASSERT_EQ(f::GradVarName("Y"), d_many_out.Input(f::GradVarName("y"))); + ASSERT_EQ(f::GradVarName("X"), d_many_out.Output(f::GradVarName("x"))); +} + +TEST(Backward, op_part_of_input_are_not_need) { + auto fwd = f::OpRegistry::CreateOp("mul", {{"X", {"a"}}, {"Y", {"b"}}}, + {{"Out", {"out"}}}, {}); + auto backward = f::Backward(*fwd, {"a"}); + auto &grad_mul = *backward; + ASSERT_EQ(grad_mul.type_, "mul_grad"); + ASSERT_EQ(grad_mul.inputs_.size(), 2UL + 1UL + 1UL); + ASSERT_EQ(grad_mul.outputs_.size(), 2UL); + ASSERT_EQ(grad_mul.Output(f::GradVarName("X")), f::kEmptyVarName); + ASSERT_EQ(grad_mul.Output(f::GradVarName("Y")), f::GradVarName("b")); + ASSERT_EQ(grad_mul.Input(f::GradVarName("Out")), f::GradVarName("out")); + ASSERT_EQ(grad_mul.Input("X"), "a"); + ASSERT_EQ(grad_mul.Input("Y"), "b"); + ASSERT_EQ(grad_mul.Input("Out"), "out"); +} + +TEST(Backward, linear_net_intermediate_variable_has_no_grad) { + ops::NetOp net; + net.AddOp(f::OpRegistry::CreateOp( + "fc", {{"X", {"x1"}}, {"W", {"w1"}}, {"b", {"b1"}}}, + {{"mul_result", {"mul_out1"}}, + {"add_result", {"add_out1"}}, + {"Out", {"out1"}}}, + {})); + net.AddOp(f::OpRegistry::CreateOp( + "fc", {{"X", {"out1"}}, {"W", {"w2"}}, {"b", {"b2"}}}, + {{"mul_result", {"mul_out2"}}, + {"add_result", {"tmp_out2"}}, + {"Out", {"out2"}}}, + {})); + net.AddOp(f::OpRegistry::CreateOp( + "fc", {{"X", {"out2"}}, {"W", {"w3"}}, {"b", {"b3"}}}, + {{"mul_result", {"mul_out3"}}, + {"add_result", {"tmp_out3"}}, + {"Out", {"out3"}}}, + {})); + net.CompleteAddOp(); + + auto backward = f::Backward(net, {"mul_out2", "tmp_out2", "out2"}); + ASSERT_TRUE(backward->IsNetOp()); + auto bwd_net = static_cast(backward.get()); + ASSERT_EQ(bwd_net->ops_.size(), 3UL); + auto &grad_fc = *bwd_net->ops_[0]; + EXPECT_EQ(grad_fc.inputs_["all"].size(), + 2UL /* external input number */ + + 1UL /* external output number*/ + + 1UL /* number of gradient of external output*/ + + 2U /* internal variable number*/); + EXPECT_EQ(grad_fc.outputs_["all"].size(), + 2UL /* input number of mul*/ + + 2UL /* input number of rowwise_add + */ + + 1UL /* input number of sigmod */); + EXPECT_EQ(bwd_net->ops_[1]->inputs_["all"].size(), 0UL); + EXPECT_EQ(bwd_net->ops_[1]->outputs_["all"].size(), 0UL); + EXPECT_EQ(bwd_net->ops_[2]->inputs_["all"].size(), 0UL); + EXPECT_EQ(bwd_net->ops_[2]->outputs_["all"].size(), 0UL); +} diff --git a/paddle/framework/operator.cc b/paddle/framework/operator.cc index 1210ee1ec4c..0dcbdffc9a7 100644 --- a/paddle/framework/operator.cc +++ b/paddle/framework/operator.cc @@ -43,7 +43,7 @@ std::unordered_map& OpProtos() { const std::string& OperatorBase::Input(const std::string& name) const { auto it = inputs_.find(name); - PADDLE_ENFORCE(it != inputs_.end(), "Op %s does not have output %s", type_, + PADDLE_ENFORCE(it != inputs_.end(), "Op %s does not have input %s", type_, name); PADDLE_ENFORCE_EQ(it->second.size(), 1UL, "Op %s input %s should contain only one variable", type_, -- GitLab