test_selu_op.py 5.0 KB
Newer Older
C
chengduo 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
import numpy as np
17
import paddle.fluid.core as core
C
chengduo 已提交
18
from op_test import OpTest
19
import paddle
20
import paddle.fluid as fluid
21 22 23
import paddle.nn.functional as F


24 25 26 27 28
def ref_selu(
    x,
    scale=1.0507009873554804934193349852946,
    alpha=1.6732632423543772848170429916717,
):
29 30 31 32 33 34 35 36
    out = np.copy(x)
    out_flat = out.flatten()
    for i in range(out_flat.size):
        if out_flat[i] < 0:
            out_flat[i] = alpha * np.exp(out_flat[i]) - alpha
        out_flat[i] = scale * out_flat[i]
    out = out_flat.reshape(x.shape)
    return out
C
chengduo 已提交
37 38 39 40 41


class SeluTest(OpTest):
    def setUp(self):
        self.op_type = "selu"
42
        self.python_api = paddle.nn.functional.selu
C
chengduo 已提交
43
        self.x_shape = [3, 5, 5, 10]
44
        self.dtype = np.float64
C
chengduo 已提交
45 46 47 48 49 50 51 52 53 54 55 56
        self.init_x_shape()
        self.init_dtype()

        alpha = 1.6732632423543772848170429916717
        scale = 1.0507009873554804934193349852946

        x = np.random.normal(size=self.x_shape).astype(self.dtype)

        # Since zero point in selu is not differentiable, avoid randomize
        # zero.
        x[np.abs(x) < 0.005] = 0.02

57
        out = ref_selu(x, scale, alpha)
C
chengduo 已提交
58 59

        self.inputs = {'X': x}
60
        self.outputs = {'Out': out}
C
chengduo 已提交
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79

        self.attrs = {
            'alpha': alpha,
            'scale': scale,
        }

    def init_x_shape(self):
        pass

    def init_dtype(self):
        pass

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
        self.check_grad(['X'], 'Out')


80 81 82 83 84 85 86 87 88
class TestSeluAPI(unittest.TestCase):
    # test paddle.nn.SELU, paddle.nn.functional.selu
    def setUp(self):
        self.scale = 1.5
        self.alpha = 2.0
        self.x_np = np.random.normal(size=[3, 5, 5, 10]).astype(np.float64)
        # Since zero point in selu is not differentiable, avoid randomize
        # zero.
        self.x_np[np.abs(self.x_np) < 0.005] = 0.02
89 90 91
        self.place = (
            paddle.CUDAPlace(0)
            if core.is_compiled_with_cuda()
92
            else paddle.CPUPlace()
93
        )
94 95 96

    def test_static_api(self):
        with paddle.static.program_guard(paddle.static.Program()):
97
            x = paddle.fluid.data('X', self.x_np.shape, self.x_np.dtype)
98 99 100 101 102 103 104
            out1 = F.selu(x, self.scale, self.alpha)
            selu = paddle.nn.SELU(self.scale, self.alpha)
            out2 = selu(x)
            exe = paddle.static.Executor(self.place)
            res = exe.run(feed={'X': self.x_np}, fetch_list=[out1, out2])
        out_ref = ref_selu(self.x_np, self.scale, self.alpha)
        for r in res:
105
            np.testing.assert_allclose(out_ref, r, rtol=1e-05)
106 107 108 109 110 111 112 113 114

    def test_dygraph_api(self):
        paddle.disable_static(self.place)
        x = paddle.to_tensor(self.x_np)
        out1 = F.selu(x, self.scale, self.alpha)
        selu = paddle.nn.SELU(self.scale, self.alpha)
        out2 = selu(x)
        out_ref = ref_selu(self.x_np, self.scale, self.alpha)
        for r in [out1, out2]:
115
            np.testing.assert_allclose(out_ref, r.numpy(), rtol=1e-05)
116 117 118 119 120
        paddle.enable_static()

    def test_fluid_api(self):
        with fluid.program_guard(fluid.Program()):
            x = fluid.data('X', self.x_np.shape, self.x_np.dtype)
V
Vvsmile 已提交
121
            out = F.selu(x, self.scale, self.alpha)
122 123 124
            exe = fluid.Executor(self.place)
            res = exe.run(feed={'X': self.x_np}, fetch_list=[out])
        out_ref = ref_selu(self.x_np, self.scale, self.alpha)
125
        np.testing.assert_allclose(out_ref, res[0], rtol=1e-05)
126

127
    def test_errors(self):
128
        with paddle.static.program_guard(paddle.static.Program()):
129
            # The input type must be Variable.
130
            self.assertRaises(TypeError, F.selu, 1)
131
            # The input dtype must be float16, float32, float64.
132 133 134
            x_int32 = paddle.fluid.data(
                name='x_int32', shape=[12, 10], dtype='int32'
            )
135
            self.assertRaises(TypeError, F.selu, x_int32)
136
            # The scale must be greater than 1.0
137 138 139
            x_fp32 = paddle.fluid.data(
                name='x_fp32', shape=[12, 10], dtype='float32'
            )
140 141 142
            self.assertRaises(ValueError, F.selu, x_fp32, -1.0)
            # The alpha must be no less than 0
            self.assertRaises(ValueError, F.selu, x_fp32, 1.6, -1.0)
143
            # support the input dtype is float16
144 145 146
            x_fp16 = paddle.fluid.data(
                name='x_fp16', shape=[12, 10], dtype='float16'
            )
147
            F.selu(x_fp16)
148 149


C
chengduo 已提交
150 151
if __name__ == "__main__":
    unittest.main()