diff --git a/paddle/fluid/operators/ngraph/ops/activation_op.h b/paddle/fluid/operators/ngraph/ops/activation_op.h index a66ec65a336f807f554157628888633db22ebfec..ef6c11bce706a84c8e6a330f3acaf69f63617516 100644 --- a/paddle/fluid/operators/ngraph/ops/activation_op.h +++ b/paddle/fluid/operators/ngraph/ops/activation_op.h @@ -37,6 +37,16 @@ void BuildReluGradNode( platform::SetOutputNode(op, "X@GRAD", relu_grad, ngb_node_map); } +void BuildSquareNode( + const std::shared_ptr& op, + std::shared_ptr< + std::unordered_map>> + ngb_node_map) { + auto input = platform::GetInputNode(op, "X", ngb_node_map); + auto out = input * input; + platform::SetOutputNode(op, "Out", out, ngb_node_map); +} + void BuildTanhGradNode( const std::shared_ptr& op, std::shared_ptr< @@ -55,4 +65,5 @@ void BuildTanhGradNode( } // namespace paddle REGISTER_NG_OP(relu_grad, BuildReluGradNode); +REGISTER_NG_OP(square, BuildSquareNode); REGISTER_NG_OP(tanh_grad, BuildTanhGradNode); diff --git a/python/paddle/fluid/tests/unittests/ngraph/test_activation_ngraph_op.py b/python/paddle/fluid/tests/unittests/ngraph/test_activation_ngraph_op.py index c7d62bd8ae1c8c30056192ee4217b22cfc3901db..3c1db3bf6406cce7952b7e07dc5b39c1389d7cea 100644 --- a/python/paddle/fluid/tests/unittests/ngraph/test_activation_ngraph_op.py +++ b/python/paddle/fluid/tests/unittests/ngraph/test_activation_ngraph_op.py @@ -18,7 +18,7 @@ import unittest import numpy as np import paddle.fluid.core as core from paddle.fluid.tests.unittests.op_test import OpTest -from paddle.fluid.tests.unittests.test_activation_op import TestAbs, TestSigmoid, TestRelu, TestTanh +from paddle.fluid.tests.unittests.test_activation_op import TestAbs, TestSigmoid, TestSquare, TestRelu, TestTanh class TestNGRAPHReluDim4(TestRelu):