From 1893cd6bb8619b5054fe5460d897b9406ee6820b Mon Sep 17 00:00:00 2001 From: Adam <38704900+grygielski@users.noreply.github.com> Date: Mon, 10 Aug 2020 10:23:20 +0200 Subject: [PATCH] Add oneDNN relu6 op (#26037) * Add oneDNN relu6 op * Lint fixes --- paddle/fluid/operators/activation_op.cc | 3 +++ .../fluid/operators/mkldnn/activation_mkldnn_op.cc | 13 +++++++++++++ python/paddle/fluid/layers/nn.py | 5 ++++- .../unittests/mkldnn/test_activation_mkldnn_op.py | 11 ++++++++++- 4 files changed, 30 insertions(+), 2 deletions(-) diff --git a/paddle/fluid/operators/activation_op.cc b/paddle/fluid/operators/activation_op.cc index 1ecb9dd26da..7ea78879e1e 100644 --- a/paddle/fluid/operators/activation_op.cc +++ b/paddle/fluid/operators/activation_op.cc @@ -504,6 +504,9 @@ class Relu6OpMaker : public framework::OpProtoAndCheckerMaker { AddAttr("threshold", "The threshold value of Relu6. Default is 6.0. ") .SetDefault(6.0f); + AddAttr("use_mkldnn", + "(bool, default false) Only used in mkldnn kernel") + .SetDefault(false); AddComment(R"DOC( Relu6 Activation Operator. diff --git a/paddle/fluid/operators/mkldnn/activation_mkldnn_op.cc b/paddle/fluid/operators/mkldnn/activation_mkldnn_op.cc index aa9606b5f85..5ca9216d0c8 100644 --- a/paddle/fluid/operators/mkldnn/activation_mkldnn_op.cc +++ b/paddle/fluid/operators/mkldnn/activation_mkldnn_op.cc @@ -76,6 +76,8 @@ void eltwise_forward(const framework::ExecutionContext &ctx, // paddle uses beta but mkldnn uses alpha for swish if (algorithm == mkldnn::algorithm::eltwise_swish) { std::swap(alpha, beta); + } else if (algorithm == dnnl::algorithm::eltwise_bounded_relu) { + alpha = ctx.Attr("threshold"); } PADDLE_ENFORCE( @@ -119,6 +121,8 @@ void eltwise_grad(const framework::ExecutionContext &ctx, // paddle uses beta but mkldnn uses alpha for swish if (algorithm == mkldnn::algorithm::eltwise_swish) { std::swap(alpha, beta); + } else if (algorithm == dnnl::algorithm::eltwise_bounded_relu) { + alpha = ctx.Attr("threshold"); } auto diff_dst_tz = framework::vectorize(diff_y->dims()); @@ -192,6 +196,10 @@ template using ReluMKLDNNFunctor = MKLDNNActivationFunc; +template +using Relu6MKLDNNFunctor = + MKLDNNActivationFunc; + template using SwishMKLDNNFunctor = MKLDNNActivationFunc; @@ -216,6 +224,10 @@ template using ReluMKLDNNGradFunctor = MKLDNNActivationGradFunc; +template +using Relu6MKLDNNGradFunctor = + MKLDNNActivationGradFunc; + template using SwishMKLDNNGradFunctor = MKLDNNActivationGradFunc; @@ -249,6 +261,7 @@ namespace ops = paddle::operators; #define FOR_EACH_MKLDNN_KERNEL_FUNCTOR(__macro) \ __macro(relu, ReluMKLDNNFunctor, ReluMKLDNNGradFunctor); \ + __macro(relu6, Relu6MKLDNNFunctor, Relu6MKLDNNGradFunctor); \ __macro(leaky_relu, ReluMKLDNNFunctor, ReluMKLDNNGradFunctor); \ __macro(gelu, GeluMKLDNNFunctor, GeluMKLDNNGradFunctor); \ __macro(swish, SwishMKLDNNFunctor, SwishMKLDNNGradFunctor); \ diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index ae42b3bbdf0..9a648ee80ac 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -9375,7 +9375,10 @@ def relu6(x, threshold=6.0, name=None): type='relu6', inputs={'X': x}, outputs={'Out': out}, - attrs={'threshold': threshold}) + attrs={ + 'threshold': threshold, + 'use_mkldnn': core.globals()["FLAGS_use_mkldnn"] + }) return out diff --git a/python/paddle/fluid/tests/unittests/mkldnn/test_activation_mkldnn_op.py b/python/paddle/fluid/tests/unittests/mkldnn/test_activation_mkldnn_op.py index 1e9c4b56093..55c6bad9af6 100644 --- a/python/paddle/fluid/tests/unittests/mkldnn/test_activation_mkldnn_op.py +++ b/python/paddle/fluid/tests/unittests/mkldnn/test_activation_mkldnn_op.py @@ -19,7 +19,7 @@ import numpy as np from scipy.special import expit import paddle.fluid.core as core from paddle.fluid.tests.unittests.op_test import OpTest -from paddle.fluid.tests.unittests.test_activation_op import TestActivation, TestRelu, TestTanh, TestSqrt, TestAbs, TestLeakyRelu, TestSwish, TestSigmoid +from paddle.fluid.tests.unittests.test_activation_op import TestActivation, TestRelu, TestTanh, TestSqrt, TestAbs, TestLeakyRelu, TestSwish, TestRelu6, TestSigmoid from paddle.fluid.tests.unittests.test_gelu_op import gelu from mkldnn_op_test import check_if_mkldnn_primitives_exist_in_bwd @@ -34,6 +34,15 @@ class TestMKLDNNReluDim2(TestRelu): self.dtype = np.float32 +class TestMKLDNNRelu6Dim2(TestRelu6): + def setUp(self): + super(TestMKLDNNRelu6Dim2, self).setUp() + self.attrs.update({"use_mkldnn": True}) + + def init_dtype(self): + self.dtype = np.float32 + + class TestMKLDNNLeakyReluDim2(TestLeakyRelu): def setUp(self): super(TestMKLDNNLeakyReluDim2, self).setUp() -- GitLab