From 1aaa26f102b2c0b76fd54d262debb51b307ed966 Mon Sep 17 00:00:00 2001 From: Sylwester Fraczek Date: Tue, 28 Jul 2020 06:21:12 +0200 Subject: [PATCH] add dnnl sigmoid (logistic) activation (#25745) --- .../operators/mkldnn/activation_mkldnn_op.cc | 23 +++++++++++++------ .../mkldnn/test_activation_mkldnn_op.py | 19 ++++++++++++++- .../tests/unittests/test_imperative_basic.py | 2 +- 3 files changed, 35 insertions(+), 9 deletions(-) diff --git a/paddle/fluid/operators/mkldnn/activation_mkldnn_op.cc b/paddle/fluid/operators/mkldnn/activation_mkldnn_op.cc index 86fe40c4f6a..aa9606b5f85 100644 --- a/paddle/fluid/operators/mkldnn/activation_mkldnn_op.cc +++ b/paddle/fluid/operators/mkldnn/activation_mkldnn_op.cc @@ -196,6 +196,10 @@ template using SwishMKLDNNFunctor = MKLDNNActivationFunc; +template +using SigmoidMKLDNNFunctor = + MKLDNNActivationFunc; + template using TanhMKLDNNFunctor = MKLDNNActivationFunc; @@ -216,6 +220,10 @@ template using SwishMKLDNNGradFunctor = MKLDNNActivationGradFunc; +template +using SigmoidMKLDNNGradFunctor = + MKLDNNActivationGradFunc; + template using TanhMKLDNNGradFunctor = MKLDNNActivationGradFunc; @@ -239,13 +247,14 @@ namespace ops = paddle::operators; act_type##_grad, MKLDNN, ::paddle::platform::CPUPlace, \ ops::MKLDNNActivationGradKernel>); -#define FOR_EACH_MKLDNN_KERNEL_FUNCTOR(__macro) \ - __macro(relu, ReluMKLDNNFunctor, ReluMKLDNNGradFunctor); \ - __macro(leaky_relu, ReluMKLDNNFunctor, ReluMKLDNNGradFunctor); \ - __macro(gelu, GeluMKLDNNFunctor, GeluMKLDNNGradFunctor); \ - __macro(swish, SwishMKLDNNFunctor, SwishMKLDNNGradFunctor); \ - __macro(tanh, TanhMKLDNNFunctor, TanhMKLDNNGradFunctor); \ - __macro(sqrt, SqrtMKLDNNFunctor, SqrtMKLDNNGradFunctor); \ +#define FOR_EACH_MKLDNN_KERNEL_FUNCTOR(__macro) \ + __macro(relu, ReluMKLDNNFunctor, ReluMKLDNNGradFunctor); \ + __macro(leaky_relu, ReluMKLDNNFunctor, ReluMKLDNNGradFunctor); \ + __macro(gelu, GeluMKLDNNFunctor, GeluMKLDNNGradFunctor); \ + __macro(swish, SwishMKLDNNFunctor, SwishMKLDNNGradFunctor); \ + __macro(sigmoid, SigmoidMKLDNNFunctor, SigmoidMKLDNNGradFunctor); \ + __macro(tanh, TanhMKLDNNFunctor, TanhMKLDNNGradFunctor); \ + __macro(sqrt, SqrtMKLDNNFunctor, SqrtMKLDNNGradFunctor); \ __macro(abs, AbsMKLDNNFunctor, AbsMKLDNNGradFunctor); FOR_EACH_MKLDNN_KERNEL_FUNCTOR(REGISTER_ACTIVATION_MKLDNN_KERNEL); diff --git a/python/paddle/fluid/tests/unittests/mkldnn/test_activation_mkldnn_op.py b/python/paddle/fluid/tests/unittests/mkldnn/test_activation_mkldnn_op.py index 9f635c3f267..2404aeb72b2 100644 --- a/python/paddle/fluid/tests/unittests/mkldnn/test_activation_mkldnn_op.py +++ b/python/paddle/fluid/tests/unittests/mkldnn/test_activation_mkldnn_op.py @@ -19,7 +19,7 @@ import numpy as np from scipy.special import expit import paddle.fluid.core as core from paddle.fluid.tests.unittests.op_test import OpTest -from paddle.fluid.tests.unittests.test_activation_op import TestActivation, TestRelu, TestTanh, TestSqrt, TestAbs, TestLeakyRelu, TestSwish +from paddle.fluid.tests.unittests.test_activation_op import TestActivation, TestRelu, TestTanh, TestSqrt, TestAbs, TestLeakyRelu, TestSwish, TestSigmoid from paddle.fluid.tests.unittests.test_gelu_op import gelu from mkldnn_op_test import check_if_mkldnn_primitives_exist_in_bwd @@ -162,6 +162,12 @@ class TestMKLDNNSwishDim2(TestSwish): self.check_grad(['X'], 'Out') +class TestMKLDNNSigmoidDim2(TestSigmoid): + def setUp(self): + super(TestMKLDNNSigmoidDim2, self).setUp() + self.attrs = {"use_mkldnn": True} + + class TestMKLDNNReluDim4(TestRelu): def setUp(self): super(TestMKLDNNReluDim4, self).setUp() @@ -328,6 +334,17 @@ class TestMKLDNNSwishDim4(TestSwish): self.check_grad(['X'], 'Out') +class TestMKLDNNSigmoidDim4(TestSigmoid): + def setUp(self): + super(TestMKLDNNSigmoidDim4, self).setUp() + + x = np.random.uniform(0.1, 1, [2, 4, 3, 5]).astype(self.dtype) + out = 1 / (1 + np.exp(-x)) + self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(x)} + self.outputs = {'Out': out} + self.attrs = {"use_mkldnn": True} + + # Check if primitives already exist in backward class TestMKLDNNAbsPrimitivesAlreadyExist(unittest.TestCase): def setUp(self): diff --git a/python/paddle/fluid/tests/unittests/test_imperative_basic.py b/python/paddle/fluid/tests/unittests/test_imperative_basic.py index 75661644c1b..9b6c307bbec 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_basic.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_basic.py @@ -626,7 +626,7 @@ class TestDygraphUtils(unittest.TestCase): a = fluid.dygraph.to_variable(a_np) res1 = func(a, act="sigmoid", use_mkldnn=True, use_cudnn=True) res2 = fluid.layers.sigmoid(a) - self.assertTrue(np.array_equal(res1.numpy(), res2.numpy())) + self.assertTrue(np.allclose(res1.numpy(), res2.numpy())) def test_append_bias_in_dygraph_exception(self): with new_program_scope(): -- GitLab