From 864b00cdf0a70bca09ad52c514a7a9875e22d3a5 Mon Sep 17 00:00:00 2001 From: fengjiayi Date: Mon, 14 Aug 2017 15:26:56 -0700 Subject: [PATCH] Replace `EmptyOp` with `NOP` --- paddle/framework/backward_test.cc | 25 +++++++++---------------- paddle/operators/net_op_test.cc | 13 +++---------- 2 files changed, 12 insertions(+), 26 deletions(-) diff --git a/paddle/framework/backward_test.cc b/paddle/framework/backward_test.cc index 1a2bee50a11..eb36ca4cfb9 100644 --- a/paddle/framework/backward_test.cc +++ b/paddle/framework/backward_test.cc @@ -28,13 +28,6 @@ using OpAttrChecker = framework::OpAttrChecker; using Scope = framework::Scope; using DeviceContext = platform::DeviceContext; -class EmptyOp : public OperatorBase { - public: - using OperatorBase::OperatorBase; - void InferShape(const Scope &scope) const override {} - void Run(const Scope &scope, const DeviceContext &dev_ctx) const override {} -}; - class RowWiseAddOpMaker : public OpProtoAndCheckerMaker { public: RowWiseAddOpMaker(OpProto *proto, OpAttrChecker *op_checker) @@ -155,16 +148,16 @@ class AddOpMaker : public OpProtoAndCheckerMaker { namespace f = paddle::framework; namespace ops = paddle::operators; using EnforceNotMet = paddle::platform::EnforceNotMet; -REGISTER_OP(rowwise_add, f::EmptyOp, f::RowWiseAddOpMaker, rowwise_add_grad, - f::EmptyOp); -REGISTER_OP(mul, f::EmptyOp, f::MulOpMaker, mul_grad, f::EmptyOp); -REGISTER_OP(sigmoid, f::EmptyOp, f::SigmoidOpMaker, sigmoid_grad, f::EmptyOp); -REGISTER_OP_WITHOUT_GRADIENT(nograd, f::EmptyOp, f::NoGradOpMaker); -REGISTER_OP_WITHOUT_GRADIENT(fill_zeros_like, f::EmptyOp, f::FillZeroOpMaker); -REGISTER_OP(add, f::EmptyOp, f::AddOpMaker, add_grad, f::EmptyOp); +REGISTER_OP(rowwise_add, f::NOP, f::RowWiseAddOpMaker, rowwise_add_grad, + f::NOP); +REGISTER_OP(mul, f::NOP, f::MulOpMaker, mul_grad, f::NOP); +REGISTER_OP(sigmoid, f::NOP, f::SigmoidOpMaker, sigmoid_grad, f::NOP); +REGISTER_OP_WITHOUT_GRADIENT(nograd, f::NOP, f::NoGradOpMaker); +REGISTER_OP_WITHOUT_GRADIENT(fill_zeros_like, f::NOP, f::FillZeroOpMaker); +REGISTER_OP(add, f::NOP, f::AddOpMaker, add_grad, f::NOP); REGISTER_OP_WITHOUT_GRADIENT(fc, f::FcOp, f::FcOpMaker); -REGISTER_OP(many_output_op, f::EmptyOp, f::ManyOutputOpMaker, - many_output_op_grad, f::EmptyOp); +REGISTER_OP(many_output_op, f::NOP, f::ManyOutputOpMaker, many_output_op_grad, + f::NOP); TEST(Backward, simple_op_grad) { auto fwd = f::OpRegistry::CreateOp( diff --git a/paddle/operators/net_op_test.cc b/paddle/operators/net_op_test.cc index f7aa56262ef..ea6327f1b08 100644 --- a/paddle/operators/net_op_test.cc +++ b/paddle/operators/net_op_test.cc @@ -20,13 +20,6 @@ class TestOp : public framework::OperatorBase { } }; -class EmptyOp : public framework::OperatorBase { - public: - using framework::OperatorBase::OperatorBase; - void InferShape(const Scope& scope) const override {} - void Run(const Scope& scope, const DeviceContext& dev_ctx) const override {} -}; - template void AssertSameVectorWithoutOrder(const std::vector& expected, const std::vector& actual) { @@ -67,9 +60,9 @@ TEST(OpKernel, all) { TEST(NetOp, insert_op) { NetOp net; - auto op1 = std::shared_ptr( - new EmptyOp("empty", {{"X", {"x"}}, {"W", {"w1"}}, {"b", {"b1"}}}, - {{"Out", {"y"}}}, {})); + auto op1 = std::shared_ptr( + new NOP("empty", {{"X", {"x"}}, {"W", {"w1"}}, {"b", {"b1"}}}, + {{"Out", {"y"}}}, {})); net.AddOp(op1); net.InsertOp(0, op1); ASSERT_EQ(2UL, net.ops_.size()); -- GitLab