test_selu_op.py 5.1 KB
Newer Older
C
chengduo 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
import numpy as np
17
import paddle.fluid.core as core
C
chengduo 已提交
18
from op_test import OpTest
19
import paddle
20
import paddle.fluid as fluid
21 22 23 24 25 26 27 28 29 30 31 32 33 34
import paddle.nn.functional as F


def ref_selu(x,
             scale=1.0507009873554804934193349852946,
             alpha=1.6732632423543772848170429916717):
    out = np.copy(x)
    out_flat = out.flatten()
    for i in range(out_flat.size):
        if out_flat[i] < 0:
            out_flat[i] = alpha * np.exp(out_flat[i]) - alpha
        out_flat[i] = scale * out_flat[i]
    out = out_flat.reshape(x.shape)
    return out
C
chengduo 已提交
35 36 37


class SeluTest(OpTest):
38

C
chengduo 已提交
39 40
    def setUp(self):
        self.op_type = "selu"
41
        self.python_api = paddle.nn.functional.selu
C
chengduo 已提交
42
        self.x_shape = [3, 5, 5, 10]
43
        self.dtype = np.float64
C
chengduo 已提交
44 45 46 47 48 49 50 51 52 53 54 55
        self.init_x_shape()
        self.init_dtype()

        alpha = 1.6732632423543772848170429916717
        scale = 1.0507009873554804934193349852946

        x = np.random.normal(size=self.x_shape).astype(self.dtype)

        # Since zero point in selu is not differentiable, avoid randomize
        # zero.
        x[np.abs(x) < 0.005] = 0.02

56
        out = ref_selu(x, scale, alpha)
C
chengduo 已提交
57 58

        self.inputs = {'X': x}
59
        self.outputs = {'Out': out}
C
chengduo 已提交
60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78

        self.attrs = {
            'alpha': alpha,
            'scale': scale,
        }

    def init_x_shape(self):
        pass

    def init_dtype(self):
        pass

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
        self.check_grad(['X'], 'Out')


79 80 81 82 83 84 85 86 87 88 89 90 91 92
class TestSeluAPI(unittest.TestCase):
    # test paddle.nn.SELU, paddle.nn.functional.selu
    def setUp(self):
        self.scale = 1.5
        self.alpha = 2.0
        self.x_np = np.random.normal(size=[3, 5, 5, 10]).astype(np.float64)
        # Since zero point in selu is not differentiable, avoid randomize
        # zero.
        self.x_np[np.abs(self.x_np) < 0.005] = 0.02
        self.place=paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \
            else paddle.CPUPlace()

    def test_static_api(self):
        with paddle.static.program_guard(paddle.static.Program()):
93
            x = paddle.fluid.data('X', self.x_np.shape, self.x_np.dtype)
94 95 96 97 98 99 100
            out1 = F.selu(x, self.scale, self.alpha)
            selu = paddle.nn.SELU(self.scale, self.alpha)
            out2 = selu(x)
            exe = paddle.static.Executor(self.place)
            res = exe.run(feed={'X': self.x_np}, fetch_list=[out1, out2])
        out_ref = ref_selu(self.x_np, self.scale, self.alpha)
        for r in res:
101
            np.testing.assert_allclose(out_ref, r, rtol=1e-05)
102 103 104 105 106 107 108 109 110

    def test_dygraph_api(self):
        paddle.disable_static(self.place)
        x = paddle.to_tensor(self.x_np)
        out1 = F.selu(x, self.scale, self.alpha)
        selu = paddle.nn.SELU(self.scale, self.alpha)
        out2 = selu(x)
        out_ref = ref_selu(self.x_np, self.scale, self.alpha)
        for r in [out1, out2]:
111
            np.testing.assert_allclose(out_ref, r.numpy(), rtol=1e-05)
112 113 114 115 116 117 118 119 120
        paddle.enable_static()

    def test_fluid_api(self):
        with fluid.program_guard(fluid.Program()):
            x = fluid.data('X', self.x_np.shape, self.x_np.dtype)
            out = fluid.layers.selu(x, self.scale, self.alpha)
            exe = fluid.Executor(self.place)
            res = exe.run(feed={'X': self.x_np}, fetch_list=[out])
        out_ref = ref_selu(self.x_np, self.scale, self.alpha)
121
        np.testing.assert_allclose(out_ref, res[0], rtol=1e-05)
122

123
    def test_errors(self):
124
        with paddle.static.program_guard(paddle.static.Program()):
125
            # The input type must be Variable.
126
            self.assertRaises(TypeError, F.selu, 1)
127
            # The input dtype must be float16, float32, float64.
128 129 130
            x_int32 = paddle.fluid.data(name='x_int32',
                                        shape=[12, 10],
                                        dtype='int32')
131
            self.assertRaises(TypeError, F.selu, x_int32)
132
            # The scale must be greater than 1.0
133 134 135
            x_fp32 = paddle.fluid.data(name='x_fp32',
                                       shape=[12, 10],
                                       dtype='float32')
136 137 138
            self.assertRaises(ValueError, F.selu, x_fp32, -1.0)
            # The alpha must be no less than 0
            self.assertRaises(ValueError, F.selu, x_fp32, 1.6, -1.0)
139
            # support the input dtype is float16
140 141 142
            x_fp16 = paddle.fluid.data(name='x_fp16',
                                       shape=[12, 10],
                                       dtype='float16')
143
            F.selu(x_fp16)
144 145


C
chengduo 已提交
146 147
if __name__ == "__main__":
    unittest.main()