test_maxout_op.py 1.6 KB
Newer Older
D
dzhwinter 已提交
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

W
wanghaox 已提交
15 16 17 18 19
import unittest
import numpy as np
from op_test import OpTest


W
wanghaox 已提交
20
def maxout_forward_naive(input, groups):
W
wanghaox 已提交
21 22 23 24 25 26 27 28 29 30
    s0, s1, s2, s3 = input.shape
    return np.ndarray([s0, s1 / groups, groups, s2, s3], \
        buffer = input, dtype=input.dtype).max(axis=(2))


class TestMaxOutOp(OpTest):
    def setUp(self):
        self.op_type = "maxout"
        self.init_test_case()
        input = np.random.random(self.shape).astype("float32")
S
sweetsky0901 已提交
31
        output = self.MaxOut_forward_naive(input, self.groups).astype("float32")
W
wanghaox 已提交
32 33

        self.inputs = {'X': input}
W
wanghaox 已提交
34
        self.attrs = {'groups': self.groups}
W
wanghaox 已提交
35 36 37 38 39 40 41 42 43 44 45 46

        self.outputs = {'Out': output.astype('float32')}

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
        self.check_grad(['X'], 'Out')

    def init_test_case(self):
        self.MaxOut_forward_naive = maxout_forward_naive
        self.shape = [100, 6, 2, 2]
47
        self.groups = 2
W
wanghaox 已提交
48 49 50 51


if __name__ == '__main__':
    unittest.main()