diff --git a/paddle/framework/backward_test.cc b/paddle/framework/backward_test.cc index 1a2bee50a1138a3f5fdd9a3522770bb38c711808..eb36ca4cfb9c7d6b05f1fce781430c8303541bfe 100644 --- a/paddle/framework/backward_test.cc +++ b/paddle/framework/backward_test.cc @@ -28,13 +28,6 @@ using OpAttrChecker = framework::OpAttrChecker; using Scope = framework::Scope; using DeviceContext = platform::DeviceContext; -class EmptyOp : public OperatorBase { - public: - using OperatorBase::OperatorBase; - void InferShape(const Scope &scope) const override {} - void Run(const Scope &scope, const DeviceContext &dev_ctx) const override {} -}; - class RowWiseAddOpMaker : public OpProtoAndCheckerMaker { public: RowWiseAddOpMaker(OpProto *proto, OpAttrChecker *op_checker) @@ -155,16 +148,16 @@ class AddOpMaker : public OpProtoAndCheckerMaker { namespace f = paddle::framework; namespace ops = paddle::operators; using EnforceNotMet = paddle::platform::EnforceNotMet; -REGISTER_OP(rowwise_add, f::EmptyOp, f::RowWiseAddOpMaker, rowwise_add_grad, - f::EmptyOp); -REGISTER_OP(mul, f::EmptyOp, f::MulOpMaker, mul_grad, f::EmptyOp); -REGISTER_OP(sigmoid, f::EmptyOp, f::SigmoidOpMaker, sigmoid_grad, f::EmptyOp); -REGISTER_OP_WITHOUT_GRADIENT(nograd, f::EmptyOp, f::NoGradOpMaker); -REGISTER_OP_WITHOUT_GRADIENT(fill_zeros_like, f::EmptyOp, f::FillZeroOpMaker); -REGISTER_OP(add, f::EmptyOp, f::AddOpMaker, add_grad, f::EmptyOp); +REGISTER_OP(rowwise_add, f::NOP, f::RowWiseAddOpMaker, rowwise_add_grad, + f::NOP); +REGISTER_OP(mul, f::NOP, f::MulOpMaker, mul_grad, f::NOP); +REGISTER_OP(sigmoid, f::NOP, f::SigmoidOpMaker, sigmoid_grad, f::NOP); +REGISTER_OP_WITHOUT_GRADIENT(nograd, f::NOP, f::NoGradOpMaker); +REGISTER_OP_WITHOUT_GRADIENT(fill_zeros_like, f::NOP, f::FillZeroOpMaker); +REGISTER_OP(add, f::NOP, f::AddOpMaker, add_grad, f::NOP); REGISTER_OP_WITHOUT_GRADIENT(fc, f::FcOp, f::FcOpMaker); -REGISTER_OP(many_output_op, f::EmptyOp, f::ManyOutputOpMaker, - many_output_op_grad, f::EmptyOp); +REGISTER_OP(many_output_op, f::NOP, f::ManyOutputOpMaker, many_output_op_grad, + f::NOP); TEST(Backward, simple_op_grad) { auto fwd = f::OpRegistry::CreateOp( diff --git a/paddle/operators/net_op_test.cc b/paddle/operators/net_op_test.cc index f7aa56262ef71c24bf668950f6e9914e5f96ff70..ea6327f1b08eda3071884ec6e566c491f1912329 100644 --- a/paddle/operators/net_op_test.cc +++ b/paddle/operators/net_op_test.cc @@ -20,13 +20,6 @@ class TestOp : public framework::OperatorBase { } }; -class EmptyOp : public framework::OperatorBase { - public: - using framework::OperatorBase::OperatorBase; - void InferShape(const Scope& scope) const override {} - void Run(const Scope& scope, const DeviceContext& dev_ctx) const override {} -}; - template void AssertSameVectorWithoutOrder(const std::vector& expected, const std::vector& actual) { @@ -67,9 +60,9 @@ TEST(OpKernel, all) { TEST(NetOp, insert_op) { NetOp net; - auto op1 = std::shared_ptr( - new EmptyOp("empty", {{"X", {"x"}}, {"W", {"w1"}}, {"b", {"b1"}}}, - {{"Out", {"y"}}}, {})); + auto op1 = std::shared_ptr( + new NOP("empty", {{"X", {"x"}}, {"W", {"w1"}}, {"b", {"b1"}}}, + {{"Out", {"y"}}}, {})); net.AddOp(op1); net.InsertOp(0, op1); ASSERT_EQ(2UL, net.ops_.size());