test_maxout_op.py 5.3 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
from __future__ import print_function

W
wanghaox 已提交
17 18
import unittest
import numpy as np
19
import paddle
20 21
import paddle.fluid as fluid
import paddle.fluid.core as core
22
import paddle.nn.functional as F
23
from op_test import OpTest
24
from paddle.fluid.framework import _test_eager_guard
W
wanghaox 已提交
25

26 27
paddle.enable_static()
np.random.seed(1)
W
wanghaox 已提交
28

29 30 31 32 33 34 35 36

def maxout_forward_naive(x, groups, channel_axis):
    s0, s1, s2, s3 = x.shape
    if channel_axis == 1:
        return np.ndarray([s0, s1 // groups, groups, s2, s3], \
            buffer = x, dtype=x.dtype).max(axis=2)
    return np.ndarray([s0, s1, s2, s3 // groups, groups], \
        buffer = x, dtype=x.dtype).max(axis=4)
W
wanghaox 已提交
37 38 39


class TestMaxOutOp(OpTest):
40

W
wanghaox 已提交
41 42
    def setUp(self):
        self.op_type = "maxout"
43
        self.python_api = paddle.nn.functional.maxout
44 45 46 47 48 49 50 51
        self.dtype = 'float64'
        self.shape = [3, 6, 2, 4]
        self.groups = 2
        self.axis = 1
        self.set_attrs()

        x = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
        out = maxout_forward_naive(x, self.groups, self.axis)
W
wanghaox 已提交
52

53
        self.inputs = {'X': x}
54
        self.attrs = {'groups': self.groups, 'axis': self.axis}
55
        self.outputs = {'Out': out}
W
wanghaox 已提交
56

57 58
    def set_attrs(self):
        pass
W
wanghaox 已提交
59 60

    def test_check_output(self):
61
        self.check_output(check_eager=True)
W
wanghaox 已提交
62 63

    def test_check_grad(self):
64
        self.check_grad(['X'], 'Out', check_eager=True)
W
wanghaox 已提交
65

66

67
class TestMaxOutOpAxis0(TestMaxOutOp):
68

69 70
    def set_attrs(self):
        self.axis = -1
71 72


73
class TestMaxOutOpAxis1(TestMaxOutOp):
74

75 76
    def set_attrs(self):
        self.axis = 3
77 78


79
class TestMaxOutOpFP32(TestMaxOutOp):
80

81 82
    def set_attrs(self):
        self.dtype = 'float32'
83 84


85
class TestMaxOutOpGroups(TestMaxOutOp):
86

87 88
    def set_attrs(self):
        self.groups = 3
89

W
wanghaox 已提交
90

91 92 93 94 95 96 97 98 99 100 101
class TestMaxoutAPI(unittest.TestCase):
    # test paddle.nn.Maxout, paddle.nn.functional.maxout
    def setUp(self):
        self.x_np = np.random.uniform(-1, 1, [2, 6, 5, 4]).astype(np.float64)
        self.groups = 2
        self.axis = 1
        self.place=paddle.CUDAPlace(0) if core.is_compiled_with_cuda() \
            else paddle.CPUPlace()

    def test_static_api(self):
        with paddle.static.program_guard(paddle.static.Program()):
102
            x = paddle.fluid.data('X', self.x_np.shape, self.x_np.dtype)
103 104 105 106 107 108 109
            out1 = F.maxout(x, self.groups, self.axis)
            m = paddle.nn.Maxout(self.groups, self.axis)
            out2 = m(x)
            exe = paddle.static.Executor(self.place)
            res = exe.run(feed={'X': self.x_np}, fetch_list=[out1, out2])
        out_ref = maxout_forward_naive(self.x_np, self.groups, self.axis)
        for r in res:
110
            np.testing.assert_allclose(out_ref, r, rtol=1e-05)
111 112 113 114 115 116 117 118 119

    def test_dygraph_api(self):
        paddle.disable_static(self.place)
        x = paddle.to_tensor(self.x_np)
        out1 = F.maxout(x, self.groups, self.axis)
        m = paddle.nn.Maxout(self.groups, self.axis)
        out2 = m(x)
        out_ref = maxout_forward_naive(self.x_np, self.groups, self.axis)
        for r in [out1, out2]:
120
            np.testing.assert_allclose(out_ref, r.numpy(), rtol=1e-05)
121 122 123

        out3 = F.maxout(x, self.groups, -1)
        out3_ref = maxout_forward_naive(self.x_np, self.groups, -1)
124
        np.testing.assert_allclose(out3_ref, out3.numpy(), rtol=1e-05)
125 126 127 128 129 130 131 132 133
        paddle.enable_static()

    def test_fluid_api(self):
        with fluid.program_guard(fluid.Program()):
            x = fluid.data('X', self.x_np.shape, self.x_np.dtype)
            out = fluid.layers.maxout(x, groups=self.groups, axis=self.axis)
            exe = fluid.Executor(self.place)
            res = exe.run(feed={'X': self.x_np}, fetch_list=[out])
        out_ref = maxout_forward_naive(self.x_np, self.groups, self.axis)
134
        np.testing.assert_allclose(out_ref, res[0], rtol=1e-05)
135 136 137 138

        paddle.disable_static(self.place)
        x = paddle.to_tensor(self.x_np)
        out = paddle.fluid.layers.maxout(x, groups=self.groups, axis=self.axis)
139
        np.testing.assert_allclose(out_ref, out.numpy(), rtol=1e-05)
140
        paddle.enable_static()
W
wanghaox 已提交
141

142
    def test_errors(self):
143
        with paddle.static.program_guard(paddle.static.Program()):
144
            # The input type must be Variable.
145
            self.assertRaises(TypeError, F.maxout, 1)
146
            # The input dtype must be float16, float32, float64.
147 148 149
            x_int32 = paddle.fluid.data(name='x_int32',
                                        shape=[2, 4, 6, 8],
                                        dtype='int32')
150 151
            self.assertRaises(TypeError, F.maxout, x_int32)

152
            x_float32 = paddle.fluid.data(name='x_float32', shape=[2, 4, 6, 8])
153
            self.assertRaises(ValueError, F.maxout, x_float32, 2, 2)
154

155 156 157 158
    def test_dygraph_final_state_api(self):
        with _test_eager_guard():
            self.test_dygraph_api()

159

W
wanghaox 已提交
160 161
if __name__ == '__main__':
    unittest.main()