#include "paddle/framework/grad_op_builder.h" #include #include "paddle/framework/op_registry.h" #include "paddle/framework/operator.h" USE_OP(add_two); namespace paddle { namespace framework { class EmptyOp : public OperatorBase { public: void InferShape(const Scope &scope) const override {} void Run(const Scope &scope, const platform::DeviceContext &dev_ctx) const override {} }; class MutiInOutOpMaker : public OpProtoAndCheckerMaker { public: MutiInOutOpMaker(OpProto *proto, OpAttrChecker *op_checker) : OpProtoAndCheckerMaker(proto, op_checker) { AddInput("In1", "a single input"); AddInput("In2_mult", "a multiple input").SetMultiple(); AddInput("In3", "another single input"); AddOutput("Out1", "a single output"); AddOutput("Out2_mult", "a multiple output").SetMultiple(); AddComment("test op with multiple inputs and outputs"); } }; class IOIgnoredOpMaker : public OpProtoAndCheckerMaker { public: IOIgnoredOpMaker(OpProto *proto, OpAttrChecker *op_checker) : OpProtoAndCheckerMaker(proto, op_checker) { AddInput("In1", "a single input"); AddInput("In2_mult", "a multiple input").SetMultiple().IgnoreGradient(); AddInput("In3_mult", "another multiple input").SetMultiple(); AddOutput("Out1_mult", "a multiple output").SetMultiple(); AddOutput("Out2", "a single output").IgnoreGradient(); AddComment("op with inputs and outputs ignored in gradient calculating"); } }; } // namespace framework } // namespace paddle namespace f = paddle::framework; TEST(GradOpBuilder, AddTwo) { std::shared_ptr add_op( f::OpRegistry::CreateOp("add_two", {"x", "y"}, {"out"}, {})); std::shared_ptr grad_add_op = f::OpRegistry::CreateGradOp(*add_op); EXPECT_EQ(static_cast(grad_add_op->inputs_.size()), 4); EXPECT_EQ(static_cast(grad_add_op->outputs_.size()), 2); EXPECT_EQ(grad_add_op->Input("X"), "x"); EXPECT_EQ(grad_add_op->Input("Y"), "y"); EXPECT_EQ(grad_add_op->Input("Out"), "out"); EXPECT_EQ(grad_add_op->Input("Out@GRAD"), "out@GRAD"); EXPECT_EQ(grad_add_op->Output("X@GRAD"), "x@GRAD"); EXPECT_EQ(grad_add_op->Output("Y@GRAD"), "y@GRAD"); } REGISTER_OP(mult_io, f::EmptyOp, f::MutiInOutOpMaker); REGISTER_GRADIENT_OP(mult_io, mult_io_grad, f::EmptyOp); REGISTER_OP(io_ignored, f::EmptyOp, f::IOIgnoredOpMaker); REGISTER_GRADIENT_OP(io_ignored, io_ignored_grad, f::EmptyOp); TEST(GradOpBuilder, MutiInOut) { f::AttributeMap attrs{{"input_format", std::vector{0, 1, 4, 5}}, {"output_format", std::vector{0, 1, 3}}}; std::shared_ptr test_op(f::OpRegistry::CreateOp( "mult_io", {"in1", "in2_1", "in2_2", "in2_3", "in3"}, {"out1", "out2_1", "out2_2"}, attrs)); std::shared_ptr grad_test_op = f::OpRegistry::CreateGradOp(*test_op); ASSERT_EQ(grad_test_op->inputs_.size(), 5UL + 3UL + 3UL); EXPECT_EQ(grad_test_op->Input("In1"), "in1"); EXPECT_EQ(grad_test_op->Inputs("In2_mult"), std::vector({"in2_1", "in2_2", "in2_3"})); EXPECT_EQ(grad_test_op->Input("In3"), "in3"); EXPECT_EQ(grad_test_op->Input("Out1"), "out1"); EXPECT_EQ(grad_test_op->Inputs("Out2_mult"), std::vector({"out2_1", "out2_2"})); EXPECT_EQ(grad_test_op->Input("Out1" + f::OperatorBase::GRAD_VAR_SUFFIX()), "out1" + f::OperatorBase::GRAD_VAR_SUFFIX()); EXPECT_EQ( grad_test_op->Inputs("Out2_mult" + f::OperatorBase::GRAD_VAR_SUFFIX()), std::vector( {"out2_1" + f::OperatorBase::GRAD_VAR_SUFFIX(), "out2_2" + f::OperatorBase::GRAD_VAR_SUFFIX()})); ASSERT_EQ(grad_test_op->outputs_.size(), 5UL); EXPECT_EQ(grad_test_op->Output("In1" + f::OperatorBase::GRAD_VAR_SUFFIX()), "in1" + f::OperatorBase::GRAD_VAR_SUFFIX()); EXPECT_EQ( grad_test_op->Outputs("In2_mult" + f::OperatorBase::GRAD_VAR_SUFFIX()), std::vector({"in2_1" + f::OperatorBase::GRAD_VAR_SUFFIX(), "in2_2" + f::OperatorBase::GRAD_VAR_SUFFIX(), "in2_3" + f::OperatorBase::GRAD_VAR_SUFFIX()})); EXPECT_EQ(grad_test_op->Output("In3" + f::OperatorBase::GRAD_VAR_SUFFIX()), "in3" + f::OperatorBase::GRAD_VAR_SUFFIX()); } TEST(GradOpBuilder, IOIgnoredInGradient) { f::AttributeMap attrs{{"input_format", std::vector{0, 1, 3, 5}}, {"output_format", std::vector{0, 2, 3}}}; std::shared_ptr test_op(f::OpRegistry::CreateOp( "io_ignored", {"in1", "in2_1", "in2_2", "in3_1", "in3_2"}, {"out1_1", "out1_2", "out2"}, attrs)); std::shared_ptr grad_test_op = f::OpRegistry::CreateGradOp(*test_op); // 'In2' and 'Out2' are ignored in gradient calculating ASSERT_EQ(grad_test_op->inputs_.size(), 5UL + 3UL + 3UL); EXPECT_EQ(grad_test_op->Input("In1"), "in1"); EXPECT_EQ(grad_test_op->Inputs("In2_mult"), std::vector({f::OperatorBase::EMPTY_VAR_NAME(), f::OperatorBase::EMPTY_VAR_NAME()})); EXPECT_EQ(grad_test_op->Inputs("In3_mult"), std::vector({"in3_1", "in3_2"})); EXPECT_EQ(grad_test_op->Inputs("Out1_mult"), std::vector({"out1_1", "out1_2"})); EXPECT_EQ(grad_test_op->Input("Out2"), f::OperatorBase::EMPTY_VAR_NAME()); EXPECT_EQ( grad_test_op->Inputs("Out1_mult" + f::OperatorBase::GRAD_VAR_SUFFIX()), std::vector( {"out1_1" + f::OperatorBase::GRAD_VAR_SUFFIX(), "out1_2" + f::OperatorBase::GRAD_VAR_SUFFIX()})); EXPECT_EQ(grad_test_op->Input("Out2" + f::OperatorBase::GRAD_VAR_SUFFIX()), "out2" + f::OperatorBase::GRAD_VAR_SUFFIX()); ASSERT_EQ(grad_test_op->outputs_.size(), 5UL); EXPECT_EQ(grad_test_op->Output("In1" + f::OperatorBase::GRAD_VAR_SUFFIX()), "in1" + f::OperatorBase::GRAD_VAR_SUFFIX()); EXPECT_EQ( grad_test_op->Outputs("In2_mult" + f::OperatorBase::GRAD_VAR_SUFFIX()), std::vector({"in2_1" + f::OperatorBase::GRAD_VAR_SUFFIX(), "in2_2" + f::OperatorBase::GRAD_VAR_SUFFIX()})); EXPECT_EQ( grad_test_op->Outputs("In3_mult" + f::OperatorBase::GRAD_VAR_SUFFIX()), std::vector({"in3_1" + f::OperatorBase::GRAD_VAR_SUFFIX(), "in3_2" + f::OperatorBase::GRAD_VAR_SUFFIX()})); }