diff --git a/paddle/fluid/platform/device/ipu/popart_canonicalization/activation_ops.cc b/paddle/fluid/platform/device/ipu/popart_canonicalization/activation_ops.cc index 1d5fe8c329f116e74dbda65a88e0c7891ac03b40..eeabd835ef3485408ae6f0939791ce5c0e8cdd4d 100644 --- a/paddle/fluid/platform/device/ipu/popart_canonicalization/activation_ops.cc +++ b/paddle/fluid/platform/device/ipu/popart_canonicalization/activation_ops.cc @@ -27,26 +27,98 @@ Node *activation_op_handler(Graph *graph, Node *node, const std::string &type) { return new_node; } -Node *relu_handler(Graph *graph, Node *node) { - return activation_op_handler(graph, node, "popart_relu"); +Node *abs_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_abs"); } -Node *tanh_handler(Graph *graph, Node *node) { - return activation_op_handler(graph, node, "popart_tanh"); +Node *acos_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_acos"); +} + +Node *asin_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_asin"); +} + +Node *atan_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_atan"); +} + +Node *ceil_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_ceil"); +} + +Node *cos_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_cos"); +} + +Node *cosh_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_cosh"); +} + +Node *erf_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_erf"); +} + +Node *exp_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_exp"); +} + +Node *floor_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_floor"); } Node *log_handler(Graph *graph, Node *node) { return activation_op_handler(graph, node, "popart_log"); } +Node *reciprocal_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_reciprocal"); +} + +Node *relu_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_relu"); +} + +Node *round_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_round"); +} + Node *sigmoid_handler(Graph *graph, Node *node) { return activation_op_handler(graph, node, "popart_sigmoid"); } +Node *sign_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_sign"); +} + +Node *sin_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_sin"); +} + +Node *sinh_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_sinh"); +} + +Node *softplus_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_softplus"); +} + +Node *softsign_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_softsign"); +} + Node *sqrt_handler(Graph *graph, Node *node) { return activation_op_handler(graph, node, "popart_sqrt"); } +Node *tan_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_tan"); +} + +Node *tanh_handler(Graph *graph, Node *node) { + return activation_op_handler(graph, node, "popart_tanh"); +} + Node *gelu_handler(Graph *graph, Node *node) { auto *op = node->Op(); auto approximate_ = BOOST_GET_CONST(bool, op->GetAttr("approximate")); @@ -93,10 +165,28 @@ Node *log_softmax_handler(Graph *graph, Node *node) { } // namespace platform } // namespace paddle -REGISTER_HANDLER(relu, relu_handler); -REGISTER_HANDLER(tanh, tanh_handler); +REGISTER_HANDLER(abs, abs_handler); +REGISTER_HANDLER(acos, acos_handler); +REGISTER_HANDLER(asin, asin_handler); +REGISTER_HANDLER(atan, atan_handler); +REGISTER_HANDLER(ceil, ceil_handler); +REGISTER_HANDLER(cos, cos_handler); +REGISTER_HANDLER(cosh, cosh_handler); +REGISTER_HANDLER(erf, erf_handler); +REGISTER_HANDLER(exp, exp_handler); +REGISTER_HANDLER(floor, floor_handler); REGISTER_HANDLER(log, log_handler); +REGISTER_HANDLER(reciprocal, reciprocal_handler); +REGISTER_HANDLER(relu, relu_handler); +REGISTER_HANDLER(round, round_handler); REGISTER_HANDLER(sigmoid, sigmoid_handler); +REGISTER_HANDLER(sign, sign_handler); +REGISTER_HANDLER(sin, sin_handler); +REGISTER_HANDLER(sinh, sinh_handler); +REGISTER_HANDLER(softplus, softplus_handler); +REGISTER_HANDLER(softsign, softsign_handler); REGISTER_HANDLER(sqrt, sqrt_handler); +REGISTER_HANDLER(tan, tan_handler); +REGISTER_HANDLER(tanh, tanh_handler); REGISTER_HANDLER(gelu, gelu_handler); REGISTER_HANDLER(log_softmax, log_softmax_handler); diff --git a/python/paddle/fluid/tests/unittests/ipu/test_activation_x_op_ipu.py b/python/paddle/fluid/tests/unittests/ipu/test_activation_x_op_ipu.py deleted file mode 100644 index 19abf74a556838c8f516a842a6aff6efece18e41..0000000000000000000000000000000000000000 --- a/python/paddle/fluid/tests/unittests/ipu/test_activation_x_op_ipu.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import numpy as np -import paddle -import paddle.nn.functional as F -import paddle.static -from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest - - -@unittest.skipIf(not paddle.is_compiled_with_ipu(), - "core is not compiled with IPU") -class TestRelu(IPUOpTest): - - def setUp(self): - self.set_atol() - self.set_test_op() - self.set_training() - self.set_data_feed() - self.set_feed_attr() - - def set_test_op(self): - self.op = paddle.fluid.layers.relu - self.op_attrs = {} - - def set_data_feed(self): - data = np.random.uniform(size=[1, 3, 10, 10]) - self.feed_fp32 = {'in_0': data.astype(np.float32)} - self.feed_fp16 = {'in_0': data.astype(np.float16)} - - def set_feed_attr(self): - self.feed_shape = [x.shape for x in self.feed_fp32.values()] - self.feed_list = list(self.feed_fp32.keys()) - - @IPUOpTest.static_graph - def build_model(self): - x = paddle.static.data(name=self.feed_list[0], - shape=self.feed_shape[0], - dtype='float32') - out = self.op(x, **self.op_attrs) - self.fetch_list = [out.name] - - def run_model(self, exec_mode): - self.run_op_test(exec_mode) - - def test(self): - for m in IPUOpTest.ExecutionMode: - if not self.skip_mode(m): - self.build_model() - self.run_model(m) - self.check() - - -class TestTanh(TestRelu): - - def set_test_op(self): - self.op = F.tanh - self.op_attrs = {} - - -class TestLog(TestRelu): - - def set_test_op(self): - self.op = paddle.fluid.layers.log - self.op_attrs = {} - - -class TestSigmoid(TestRelu): - - def set_test_op(self): - self.op = F.sigmoid - self.op_attrs = {} - - -class TestSqrt(TestRelu): - - def set_test_op(self): - self.op = paddle.fluid.layers.sqrt - self.op_attrs = {} - - -if __name__ == "__main__": - unittest.main() diff --git a/python/paddle/fluid/tests/unittests/ipu/test_unary_ops_ipu.py b/python/paddle/fluid/tests/unittests/ipu/test_unary_ops_ipu.py new file mode 100644 index 0000000000000000000000000000000000000000..eac32819f82329eb24e58f53064fe5cd5226e9a7 --- /dev/null +++ b/python/paddle/fluid/tests/unittests/ipu/test_unary_ops_ipu.py @@ -0,0 +1,233 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import paddle +import paddle.static +from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest + + +@unittest.skipIf(not paddle.is_compiled_with_ipu(), + "core is not compiled with IPU") +class TestBase(IPUOpTest): + + def setUp(self): + self.set_atol() + self.set_test_op() + self.set_training() + self.set_data_feed() + self.set_feed_attr() + + def set_test_op(self): + self.op = paddle.fluid.layers.abs + self.op_attrs = {} + + def set_data_feed(self): + data = np.random.uniform(size=[1, 3, 10, 10]) + self.feed_fp32 = {'in_0': data.astype(np.float32)} + self.feed_fp16 = {'in_0': data.astype(np.float16)} + + def set_feed_attr(self): + self.feed_shape = [x.shape for x in self.feed_fp32.values()] + self.feed_list = list(self.feed_fp32.keys()) + + @IPUOpTest.static_graph + def build_model(self): + x = paddle.static.data(name=self.feed_list[0], + shape=self.feed_shape[0], + dtype='float32') + out = self.op(x, **self.op_attrs) + self.fetch_list = [out.name] + + def run_model(self, exec_mode): + self.run_op_test(exec_mode) + + def test(self): + for m in IPUOpTest.ExecutionMode: + if not self.skip_mode(m): + self.build_model() + self.run_model(m) + self.check() + + +class TestAcos(TestBase): + + @property + def fp16_enabled(self): + return False + + def set_atol(self): + super().set_atol() + self.atol = 1e-6 + + def set_test_op(self): + self.op = paddle.fluid.layers.acos + self.op_attrs = {} + + +class TestAsin(TestAcos): + + def set_test_op(self): + self.op = paddle.fluid.layers.asin + self.op_attrs = {} + + +class TestSinh(TestAcos): + + def set_test_op(self): + self.op = paddle.fluid.layers.sinh + self.op_attrs = {} + + +class TestAtan(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.atan + self.op_attrs = {} + + +class TestCeil(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.ceil + self.op_attrs = {} + + +class TestCos(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.cos + self.op_attrs = {} + + +class TestCosh(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.cosh + self.op_attrs = {} + + +class TestErf(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.erf + self.op_attrs = {} + + +class TestExp(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.exp + self.op_attrs = {} + + +class TestFloor(TestBase): + + @property + def fp16_enabled(self): + return False + + def set_test_op(self): + self.op = paddle.fluid.layers.floor + self.op_attrs = {} + + +class TestLog(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.log + self.op_attrs = {} + + +class TestReciprocal(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.reciprocal + self.op_attrs = {} + + +class TestRelu(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.relu + self.op_attrs = {} + + +class TestRound(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.round + self.op_attrs = {} + + +class TestSigmoid(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.sigmoid + self.op_attrs = {} + + +class TestSign(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.sign + self.op_attrs = {} + + +class TestSin(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.sin + self.op_attrs = {} + + +class TestSoftplus(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.softplus + self.op_attrs = {} + + +class TestSoftsign(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.softsign + self.op_attrs = {} + + +class TestSqrt(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.sqrt + self.op_attrs = {} + + +class TestTan(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.tan + self.op_attrs = {} + + +class TestTanh(TestBase): + + def set_test_op(self): + self.op = paddle.fluid.layers.tanh + self.op_attrs = {} + + +if __name__ == "__main__": + unittest.main()