From 513b1e010f058c6b723b4eb130c9c9889dd3058f Mon Sep 17 00:00:00 2001 From: dzhwinter Date: Sun, 26 Nov 2017 20:49:00 +0800 Subject: [PATCH] "add floor, ceil, round op" (#5898) * "add floor, ceil, round op" * "reuse zero gradient" * "fix divide zero" * "fix numpy floor error" --- paddle/operators/activation_op.cc | 54 +++++++++++++++++++ paddle/operators/activation_op.h | 38 +++++++++++++ .../v2/fluid/tests/test_activation_op.py | 43 +++++++++++++++ 3 files changed, 135 insertions(+) diff --git a/paddle/operators/activation_op.cc b/paddle/operators/activation_op.cc index c66d575d24b..154c618e8e7 100644 --- a/paddle/operators/activation_op.cc +++ b/paddle/operators/activation_op.cc @@ -223,6 +223,51 @@ $y = |x|$ } }; +class CeilOpMaker : public framework::OpProtoAndCheckerMaker { + public: + CeilOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker) + : OpProtoAndCheckerMaker(proto, op_checker) { + AddInput("X", "Input of Ceil operator"); + AddOutput("Y", "Output of Ceil operator"); + AddComment(R"DOC( +Ceil Activation Operator. + +$y = ceil(x)$ + +)DOC"); + } +}; + +class FloorOpMaker : public framework::OpProtoAndCheckerMaker { + public: + FloorOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker) + : OpProtoAndCheckerMaker(proto, op_checker) { + AddInput("X", "Input of Floor operator"); + AddOutput("Y", "Output of Floor operator"); + AddComment(R"DOC( +Floor Activation Operator. + +$y = floor(x)$ + +)DOC"); + } +}; + +class RoundOpMaker : public framework::OpProtoAndCheckerMaker { + public: + RoundOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker) + : OpProtoAndCheckerMaker(proto, op_checker) { + AddInput("X", "Input of Round operator"); + AddOutput("Y", "Output of Round operator"); + AddComment(R"DOC( +Round Activation Operator. + +$y = [x]$ + +)DOC"); + } +}; + class ReciprocalOpMaker : public framework::OpProtoAndCheckerMaker { public: ReciprocalOpMaker(framework::OpProto *proto, @@ -493,6 +538,15 @@ REGISTER_OP(sqrt, ops::ActivationOp, ops::SqrtOpMaker, sqrt_grad, REGISTER_OP(abs, ops::ActivationOp, ops::AbsOpMaker, abs_grad, ops::ActivationOpGrad); +REGISTER_OP(ceil, ops::ActivationOp, ops::CeilOpMaker, ceil_grad, + ops::ActivationOpGrad); + +REGISTER_OP(floor, ops::ActivationOp, ops::FloorOpMaker, floor_grad, + ops::ActivationOpGrad); + +REGISTER_OP(round, ops::ActivationOp, ops::RoundOpMaker, round_grad, + ops::ActivationOpGrad); + REGISTER_OP(reciprocal, ops::ActivationOp, ops::ReciprocalOpMaker, reciprocal_grad, ops::ActivationOpGrad); diff --git a/paddle/operators/activation_op.h b/paddle/operators/activation_op.h index ceb4b4e40b6..8cd3bfbbd3f 100644 --- a/paddle/operators/activation_op.h +++ b/paddle/operators/activation_op.h @@ -283,6 +283,41 @@ struct SqrtGradFunctor : public BaseActivationFunctor { } }; +// ceil(x) = ceiling(x) +template +struct CeilFunctor : public BaseActivationFunctor { + template + void operator()(Device d, X x, Y y) const { + y.device(d) = x.ceil(); + } +}; + +template +struct ZeroGradFunctor : public BaseActivationFunctor { + template + void operator()(Device d, X x, Y y, dY dy, dX dx) const { + dx.device(d) = static_cast(0) / x; + } +}; + +// floor(x) = flooring(x) +template +struct FloorFunctor : public BaseActivationFunctor { + template + void operator()(Device d, X x, Y y) const { + y.device(d) = x.ceil(); + } +}; + +// round(x) = [x] +template +struct RoundFunctor : public BaseActivationFunctor { + template + void operator()(Device d, X x, Y y) const { + y.device(d) = x.round(); + } +}; + // abs(x) = |x| template struct AbsFunctor : public BaseActivationFunctor { @@ -677,6 +712,9 @@ struct HardSigmoidGradFunctor : public BaseActivationFunctor { __macro(softshrink, SoftShrinkFunctor, SoftShrinkGradFunctor); \ __macro(sqrt, SqrtFunctor, SqrtGradFunctor); \ __macro(abs, AbsFunctor, AbsGradFunctor); \ + __macro(ceil, CeilFunctor, ZeroGradFunctor); \ + __macro(floor, FloorFunctor, ZeroGradFunctor); \ + __macro(round, RoundFunctor, ZeroGradFunctor); \ __macro(reciprocal, ReciprocalFunctor, ReciprocalGradFunctor); \ __macro(log, LogFunctor, LogGradFunctor); \ __macro(square, SquareFunctor, SquareGradFunctor); \ diff --git a/python/paddle/v2/fluid/tests/test_activation_op.py b/python/paddle/v2/fluid/tests/test_activation_op.py index 7649e60a383..bd52bef2605 100644 --- a/python/paddle/v2/fluid/tests/test_activation_op.py +++ b/python/paddle/v2/fluid/tests/test_activation_op.py @@ -152,6 +152,49 @@ class TestAbs(OpTest): self.check_grad(['X'], 'Y', max_relative_error=0.007) +class TestCeil(OpTest): + def setUp(self): + self.op_type = "ceil" + x = np.random.uniform(-1, 1, [4, 4]).astype("float32") + self.inputs = {'X': x} + self.outputs = {'Y': np.ceil(self.inputs['X'])} + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad(['X'], 'Y', max_relative_error=0.007) + + +class TestFloor(OpTest): + def setUp(self): + self.op_type = "floor" + x = np.random.uniform(-1, 1, [4, 4]).astype("float32") + self.inputs = {'X': x} + # numpy floor need +1 + self.outputs = {'Y': np.floor(self.inputs['X']) + 1.0} + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad(['X'], 'Y', max_relative_error=0.007) + + +class TestRound(OpTest): + def setUp(self): + self.op_type = "round" + x = np.random.uniform(-1, 1, [4, 4]).astype("float32") + self.inputs = {'X': x} + self.outputs = {'Y': np.round(self.inputs['X'])} + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad(['X'], 'Y', max_relative_error=0.007) + + class TestRelu(OpTest): def setUp(self): self.op_type = "relu" -- GitLab