From b1bd483a7ded7bb54294a4c6dbff76245f471640 Mon Sep 17 00:00:00 2001 From: Krzysztof Binias Date: Mon, 27 May 2019 07:13:57 +0200 Subject: [PATCH] [NGraph] Enable gelu operator for the nGraph Bridge. (#17547) test=develop --- .../operators/ngraph/ops/activation_op.h | 48 +++++++++++++++++++ .../ngraph/test_activation_ngraph_op.py | 2 +- 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/paddle/fluid/operators/ngraph/ops/activation_op.h b/paddle/fluid/operators/ngraph/ops/activation_op.h index ef6c11bce70..884ec659267 100644 --- a/paddle/fluid/operators/ngraph/ops/activation_op.h +++ b/paddle/fluid/operators/ngraph/ops/activation_op.h @@ -26,6 +26,52 @@ namespace paddle { namespace operators { namespace ngraphs { +void BuildGeluNode( + const std::shared_ptr& op, + std::shared_ptr< + std::unordered_map>> + ngb_node_map) { + auto input = platform::GetInputNode(op, "X", ngb_node_map); + auto half = paddle::platform::CreateConstant(input->get_element_type(), + input->get_shape(), {0.5}); + auto one = paddle::platform::CreateConstant(input->get_element_type(), + input->get_shape(), {1}); + auto sqrt_two = + std::make_shared(paddle::platform::CreateConstant( + input->get_element_type(), input->get_shape(), {2})); + auto out = half * input * + (one + std::make_shared(input / sqrt_two)); + platform::SetOutputNode(op, "Out", out, ngb_node_map); +} + +void BuildGeluGradNode( + const std::shared_ptr& op, + std::shared_ptr< + std::unordered_map>> + ngb_node_map) { + auto input = platform::GetInputNode(op, "X", ngb_node_map); + auto dout = platform::GetInputNode(op, "Out@GRAD", ngb_node_map); + auto half = paddle::platform::CreateConstant(input->get_element_type(), + input->get_shape(), {0.5}); + auto minus_half = paddle::platform::CreateConstant( + input->get_element_type(), input->get_shape(), {-0.5}); + auto one = paddle::platform::CreateConstant(input->get_element_type(), + input->get_shape(), {1}); + auto two = paddle::platform::CreateConstant(input->get_element_type(), + input->get_shape(), {2}); + auto pi = paddle::platform::CreateConstant( + input->get_element_type(), input->get_shape(), {3.14159265359}); + auto sqrt_two = std::make_shared(two); + auto sqrt_pi = std::make_shared(pi); + + auto first = + half * (one + std::make_shared(input * one / sqrt_two)); + auto second = half * (two / sqrt_pi) * (one / sqrt_two) * input * + std::make_shared(minus_half * input * input); + auto gelu_grad = dout * (first + second); + platform::SetOutputNode(op, "X@GRAD", gelu_grad, ngb_node_map); +} + void BuildReluGradNode( const std::shared_ptr& op, std::shared_ptr< @@ -64,6 +110,8 @@ void BuildTanhGradNode( } // namespace operators } // namespace paddle +REGISTER_NG_OP(gelu, BuildGeluNode); +REGISTER_NG_OP(gelu_grad, BuildGeluGradNode); REGISTER_NG_OP(relu_grad, BuildReluGradNode); REGISTER_NG_OP(square, BuildSquareNode); REGISTER_NG_OP(tanh_grad, BuildTanhGradNode); diff --git a/python/paddle/fluid/tests/unittests/ngraph/test_activation_ngraph_op.py b/python/paddle/fluid/tests/unittests/ngraph/test_activation_ngraph_op.py index 3c1db3bf640..a7f167cbd41 100644 --- a/python/paddle/fluid/tests/unittests/ngraph/test_activation_ngraph_op.py +++ b/python/paddle/fluid/tests/unittests/ngraph/test_activation_ngraph_op.py @@ -18,7 +18,7 @@ import unittest import numpy as np import paddle.fluid.core as core from paddle.fluid.tests.unittests.op_test import OpTest -from paddle.fluid.tests.unittests.test_activation_op import TestAbs, TestSigmoid, TestSquare, TestRelu, TestTanh +from paddle.fluid.tests.unittests.test_activation_op import TestAbs, TestGelu, TestSigmoid, TestSquare, TestRelu, TestTanh class TestNGRAPHReluDim4(TestRelu): -- GitLab