From 647121aad3d9e3af753aaa858c43ff57c724571a Mon Sep 17 00:00:00 2001 From: fengjiayi Date: Thu, 3 Aug 2017 14:08:42 -0700 Subject: [PATCH] Add unittest for GradOpBuilder --- paddle/framework/grad_op_builder.cc | 6 +- paddle/framework/grad_op_builder_test.cc | 129 ++++++++++++++++++++++- 2 files changed, 127 insertions(+), 8 deletions(-) diff --git a/paddle/framework/grad_op_builder.cc b/paddle/framework/grad_op_builder.cc index 34722fedf9..ea5e939c6e 100644 --- a/paddle/framework/grad_op_builder.cc +++ b/paddle/framework/grad_op_builder.cc @@ -26,7 +26,7 @@ using VarIndexMap = std::unordered_map; enum class OpArgType { IN, OUT }; static std::vector* GetOpFormat(OperatorBase* op, const OpArgType& type) { - std::string key = type == OpArgType::IN ? "input_format" : "output_name"; + std::string key = type == OpArgType::IN ? "input_format" : "output_format"; return op->attrs_.count(key) ? &boost::get>(op->attrs_.at(key)) : nullptr; @@ -34,7 +34,7 @@ static std::vector* GetOpFormat(OperatorBase* op, const OpArgType& type) { static const std::vector* GetOpFormat(const OperatorBase* op, const OpArgType& type) { - std::string key = type == OpArgType::IN ? "input_format" : "output_name"; + std::string key = type == OpArgType::IN ? "input_format" : "output_format"; return op->attrs_.count(key) ? &boost::get>(op->attrs_.at(key)) : nullptr; @@ -84,7 +84,7 @@ OperatorBase* BuildGradOp(const OperatorBase* op) { grad_op->attrs_ = op->attrs_; grad_op->attrs_.erase("input_format"); grad_op->attrs_.erase("output_format"); - if (GetOpFormat(op, OpArgType::OUT) != nullptr) { + if (GetOpFormat(op, OpArgType::IN) != nullptr) { grad_op->attrs_["output_format"] = std::vector({0}); } if (GetOpFormat(op, OpArgType::IN) != nullptr || diff --git a/paddle/framework/grad_op_builder_test.cc b/paddle/framework/grad_op_builder_test.cc index e9cf3b9798..3bc47e6f42 100644 --- a/paddle/framework/grad_op_builder_test.cc +++ b/paddle/framework/grad_op_builder_test.cc @@ -8,10 +8,49 @@ USE_OP(add_two); namespace paddle { namespace framework { +class EmptyOp : public OperatorBase { + public: + void InferShape(const Scope &scope) const override {} + void Run(const Scope &scope, + const platform::DeviceContext &dev_ctx) const override {} +}; + +class MutiInOutOpMaker : public OpProtoAndCheckerMaker { + public: + MutiInOutOpMaker(OpProto *proto, OpAttrChecker *op_checker) + : OpProtoAndCheckerMaker(proto, op_checker) { + AddInput("In1", "a single input"); + AddInput("In2_mult", "a multiple input").SetMultiple(); + AddInput("In3", "another single input"); + AddOutput("Out1", "a single output"); + AddOutput("Out2_mult", "a multiple output").SetMultiple(); + AddComment("test op with multiple inputs and outputs"); + } +}; + +class IOIgnoredOpMaker : public OpProtoAndCheckerMaker { + public: + IOIgnoredOpMaker(OpProto *proto, OpAttrChecker *op_checker) + : OpProtoAndCheckerMaker(proto, op_checker) { + AddInput("In1", "a single input"); + AddInput("In2_mult", "a multiple input").SetMultiple().IgnoreGradient(); + AddInput("In3_mult", "another multiple input").SetMultiple(); + AddOutput("Out1_mult", "a multiple output").SetMultiple(); + AddOutput("Out2", "a single output").IgnoreGradient(); + AddComment("op with inputs and outputs ignored in gradient calculating"); + } +}; + +} // namespace framework +} // namespace paddle + +namespace f = paddle::framework; + TEST(GradOpBuilder, AddTwo) { - std::shared_ptr add_op( - OpRegistry::CreateOp("add_two", {"x", "y"}, {"out"}, {})); - std::shared_ptr grad_add_op = OpRegistry::CreateGradOp(*add_op); + std::shared_ptr add_op( + f::OpRegistry::CreateOp("add_two", {"x", "y"}, {"out"}, {})); + std::shared_ptr grad_add_op = + f::OpRegistry::CreateGradOp(*add_op); EXPECT_EQ(static_cast(grad_add_op->inputs_.size()), 4); EXPECT_EQ(static_cast(grad_add_op->outputs_.size()), 2); EXPECT_EQ(grad_add_op->Input("X"), "x"); @@ -22,5 +61,85 @@ TEST(GradOpBuilder, AddTwo) { EXPECT_EQ(grad_add_op->Output("Y@GRAD"), "y@GRAD"); } -} // namespace framework -} // namespace paddle \ No newline at end of file +REGISTER_OP(mult_io, f::EmptyOp, f::MutiInOutOpMaker); +REGISTER_GRADIENT_OP(mult_io, mult_io_grad, f::EmptyOp); +REGISTER_OP(io_ignored, f::EmptyOp, f::IOIgnoredOpMaker); +REGISTER_GRADIENT_OP(io_ignored, io_ignored_grad, f::EmptyOp); + +TEST(GradOpBuilder, MutiInOut) { + f::AttributeMap attrs{{"input_format", std::vector{0, 1, 4, 5}}, + {"output_format", std::vector{0, 1, 3}}}; + std::shared_ptr test_op(f::OpRegistry::CreateOp( + "mult_io", {"in1", "in2_1", "in2_2", "in2_3", "in3"}, + {"out1", "out2_1", "out2_2"}, attrs)); + std::shared_ptr grad_test_op = + f::OpRegistry::CreateGradOp(*test_op); + + ASSERT_EQ(grad_test_op->inputs_.size(), 5UL + 3UL + 3UL); + EXPECT_EQ(grad_test_op->Input("In1"), "in1"); + EXPECT_EQ(grad_test_op->Inputs("In2_mult"), + std::vector({"in2_1", "in2_2", "in2_3"})); + EXPECT_EQ(grad_test_op->Input("In3"), "in3"); + EXPECT_EQ(grad_test_op->Input("Out1"), "out1"); + EXPECT_EQ(grad_test_op->Inputs("Out2_mult"), + std::vector({"out2_1", "out2_2"})); + EXPECT_EQ(grad_test_op->Input("Out1" + f::OperatorBase::GRAD_VAR_SUFFIX()), + "out1" + f::OperatorBase::GRAD_VAR_SUFFIX()); + EXPECT_EQ( + grad_test_op->Inputs("Out2_mult" + f::OperatorBase::GRAD_VAR_SUFFIX()), + std::vector( + {"out2_1" + f::OperatorBase::GRAD_VAR_SUFFIX(), + "out2_2" + f::OperatorBase::GRAD_VAR_SUFFIX()})); + + ASSERT_EQ(grad_test_op->outputs_.size(), 5UL); + EXPECT_EQ(grad_test_op->Output("In1" + f::OperatorBase::GRAD_VAR_SUFFIX()), + "in1" + f::OperatorBase::GRAD_VAR_SUFFIX()); + EXPECT_EQ( + grad_test_op->Outputs("In2_mult" + f::OperatorBase::GRAD_VAR_SUFFIX()), + std::vector({"in2_1" + f::OperatorBase::GRAD_VAR_SUFFIX(), + "in2_2" + f::OperatorBase::GRAD_VAR_SUFFIX(), + "in2_3" + f::OperatorBase::GRAD_VAR_SUFFIX()})); + EXPECT_EQ(grad_test_op->Output("In3" + f::OperatorBase::GRAD_VAR_SUFFIX()), + "in3" + f::OperatorBase::GRAD_VAR_SUFFIX()); +} + +TEST(GradOpBuilder, IOIgnoredInGradient) { + f::AttributeMap attrs{{"input_format", std::vector{0, 1, 3, 5}}, + {"output_format", std::vector{0, 2, 3}}}; + std::shared_ptr test_op(f::OpRegistry::CreateOp( + "io_ignored", {"in1", "in2_1", "in2_2", "in3_1", "in3_2"}, + {"out1_1", "out1_2", "out2"}, attrs)); + std::shared_ptr grad_test_op = + f::OpRegistry::CreateGradOp(*test_op); + + // 'In2' and 'Out2' are ignored in gradient calculating + ASSERT_EQ(grad_test_op->inputs_.size(), 5UL + 3UL + 3UL); + EXPECT_EQ(grad_test_op->Input("In1"), "in1"); + EXPECT_EQ(grad_test_op->Inputs("In2_mult"), + std::vector({f::OperatorBase::EMPTY_VAR_NAME(), + f::OperatorBase::EMPTY_VAR_NAME()})); + EXPECT_EQ(grad_test_op->Inputs("In3_mult"), + std::vector({"in3_1", "in3_2"})); + EXPECT_EQ(grad_test_op->Inputs("Out1_mult"), + std::vector({"out1_1", "out1_2"})); + EXPECT_EQ(grad_test_op->Input("Out2"), f::OperatorBase::EMPTY_VAR_NAME()); + EXPECT_EQ( + grad_test_op->Inputs("Out1_mult" + f::OperatorBase::GRAD_VAR_SUFFIX()), + std::vector( + {"out1_1" + f::OperatorBase::GRAD_VAR_SUFFIX(), + "out1_2" + f::OperatorBase::GRAD_VAR_SUFFIX()})); + EXPECT_EQ(grad_test_op->Input("Out2" + f::OperatorBase::GRAD_VAR_SUFFIX()), + "out2" + f::OperatorBase::GRAD_VAR_SUFFIX()); + + ASSERT_EQ(grad_test_op->outputs_.size(), 5UL); + EXPECT_EQ(grad_test_op->Output("In1" + f::OperatorBase::GRAD_VAR_SUFFIX()), + "in1" + f::OperatorBase::GRAD_VAR_SUFFIX()); + EXPECT_EQ( + grad_test_op->Outputs("In2_mult" + f::OperatorBase::GRAD_VAR_SUFFIX()), + std::vector({"in2_1" + f::OperatorBase::GRAD_VAR_SUFFIX(), + "in2_2" + f::OperatorBase::GRAD_VAR_SUFFIX()})); + EXPECT_EQ( + grad_test_op->Outputs("In3_mult" + f::OperatorBase::GRAD_VAR_SUFFIX()), + std::vector({"in3_1" + f::OperatorBase::GRAD_VAR_SUFFIX(), + "in3_2" + f::OperatorBase::GRAD_VAR_SUFFIX()})); +} -- GitLab