From 60af56c1b8e60240238d877c093bf9c99706fefe Mon Sep 17 00:00:00 2001 From: Kavya Srinet Date: Wed, 4 Oct 2017 19:02:22 -0700 Subject: [PATCH] Added Leaky Relu activation --- paddle/operators/activation_op.cc | 19 ++++++++++++ paddle/operators/activation_op.h | 30 ++++++++++++++++++- .../v2/framework/tests/test_activation_op.py | 17 +++++++++++ 3 files changed, 65 insertions(+), 1 deletion(-) diff --git a/paddle/operators/activation_op.cc b/paddle/operators/activation_op.cc index 7ae4d2f6b6c..5f2ecc2673a 100644 --- a/paddle/operators/activation_op.cc +++ b/paddle/operators/activation_op.cc @@ -69,6 +69,22 @@ class ReluOpMaker : public framework::OpProtoAndCheckerMaker { } }; +template +class LeakyReluOpMaker : public framework::OpProtoAndCheckerMaker { + public: + LeakyReluOpMaker(framework::OpProto *proto, + framework::OpAttrChecker *op_checker) + : OpProtoAndCheckerMaker(proto, op_checker) { + AddInput("X", "Input of LeakyRelu operator"); + AddOutput("Y", "Output of LeakyRelu operator"); + AddComment( + "LeakyRelu activation operator, " + "leaky_relu = max(x, alpha * x)"); + AddAttr("alpha", "The small negative slope") + .SetDefault(static_cast(0.02f)); + } +}; + class TanhOpMaker : public framework::OpProtoAndCheckerMaker { public: TanhOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker) @@ -240,6 +256,9 @@ REGISTER_OP(softsign, ops::ActivationOp, ops::SoftsignOpMaker, softsign_grad, REGISTER_OP(brelu, ops::ActivationOp, ops::BReluOpMaker, brelu_grad, ops::ActivationOpGrad); +REGISTER_OP(leaky_relu, ops::ActivationOp, ops::LeakyReluOpMaker, + leaky_relu_grad, ops::ActivationOpGrad); + REGISTER_OP(soft_relu, ops::ActivationOp, ops::SoftReluOpMaker, soft_relu_grad, ops::ActivationOpGrad); diff --git a/paddle/operators/activation_op.h b/paddle/operators/activation_op.h index ff35c2d97e8..dae66cc77d9 100644 --- a/paddle/operators/activation_op.h +++ b/paddle/operators/activation_op.h @@ -309,6 +309,33 @@ struct SoftReluGradFunctor : public BaseActivationFunctor { } }; +template +struct LeakyReluFunctor : public BaseActivationFunctor { + float alpha; + typename BaseActivationFunctor::AttrPair GetAttrs() { + return {{"alpha", &alpha}}; + } + + template + void operator()(Device d, X x, Y y) const { + y.device(d) = x.cwiseMax(alpha * x); + } +}; + +template +struct LeakyReluGradFunctor : public BaseActivationFunctor { + float alpha; + typename BaseActivationFunctor::AttrPair GetAttrs() { + return {{"alpha", &alpha}}; + } + template + void operator()(Device d, X x, Y y, dY dy, dX dx) const { + auto temp1 = alpha * (x < static_cast(0)).template cast().eval(); + auto temp2 = (x >= static_cast(0)).template cast().eval(); + dx.device(d) = dy * (temp1 + temp2).template cast(); + } +}; + template struct PowFunctor : public BaseActivationFunctor { float factor; @@ -379,4 +406,5 @@ struct STanhGradFunctor : public BaseActivationFunctor { __macro(soft_relu, SoftReluFunctor, SoftReluGradFunctor); \ __macro(pow, PowFunctor, PowGradFunctor); \ __macro(stanh, STanhFunctor, STanhGradFunctor); \ - __macro(softsign, SoftsignFunctor, SoftsignGradFunctor) + __macro(softsign, SoftsignFunctor, SoftsignGradFunctor); \ + __macro(leaky_relu, LeakyReluFunctor, LeakyReluGradFunctor) diff --git a/python/paddle/v2/framework/tests/test_activation_op.py b/python/paddle/v2/framework/tests/test_activation_op.py index c44eb849063..ce6dec77485 100644 --- a/python/paddle/v2/framework/tests/test_activation_op.py +++ b/python/paddle/v2/framework/tests/test_activation_op.py @@ -122,6 +122,23 @@ class TestBRelu(OpTest): self.check_grad(['X'], 'Y', max_relative_error=0.02) +class TestLeakyRelu(OpTest): + def setUp(self): + self.op_type = "leaky_relu" + alpha = 0.02 + self.attrs = {'alpha': alpha} + self.inputs = {'X': np.random.uniform(-3, 3, [4, 4]).astype("float32")} + self.outputs = { + 'Y': np.maximum(self.inputs['X'], alpha * self.inputs['X']) + } + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad(['X'], 'Y', max_relative_error=0.008) + + class TestSoftRelu(OpTest): def setUp(self): self.op_type = "soft_relu" -- GitLab