test_logsumexp.py 5.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
#  Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import paddle
import unittest
import numpy as np
from op_test import OpTest


21 22 23 24 25 26 27 28 29 30 31
def ref_logsumexp(x, axis=None, keepdim=False, reduce_all=False):
    if isinstance(axis, int):
        axis = (axis, )
    elif isinstance(axis, list):
        axis = tuple(axis)
    if reduce_all:
        axis = None
    out = np.log(np.exp(x).sum(axis=axis, keepdims=keepdim))
    return out


32 33 34 35 36 37
def logsumexp_wrapper(x, axis=None, keepdim=False, allreduce=False):
    if allreduce:
        return paddle.logsumexp(x, None, keepdim)
    return paddle.logsumexp(x, axis, keepdim)


38
class TestLogsumexp(OpTest):
39

40 41
    def setUp(self):
        self.op_type = 'logsumexp'
42
        self.python_api = logsumexp_wrapper
43 44 45 46 47 48 49 50 51 52 53 54 55 56
        self.shape = [2, 3, 4, 5]
        self.dtype = 'float64'
        self.axis = [-1]
        self.keepdim = False
        self.reduce_all = False
        self.set_attrs()

        np.random.seed(10)
        x = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
        out = ref_logsumexp(x, self.axis, self.keepdim, self.reduce_all)

        self.inputs = {'X': x}
        self.outputs = {'Out': out}
        self.attrs = {
57 58
            'axis': self.axis,
            'keepdim': self.keepdim,
59 60
            'reduce_all': self.reduce_all
        }
61 62 63
        self.user_defined_grads = None
        self.user_defined_grad_outputs = None
        self.set_attrs_addition()
64 65 66 67

    def set_attrs(self):
        pass

68 69 70
    def set_attrs_addition(self):
        pass

71
    def test_check_output(self):
72
        self.check_output(check_eager=True)
73 74

    def test_check_grad(self):
75 76 77
        self.check_grad(
            ['X'], ['Out'],
            user_defined_grads=self.user_defined_grads,
78 79
            user_defined_grad_outputs=self.user_defined_grad_outputs,
            check_eager=True)
80 81 82 83 84 85

    def calc_grad(self):
        dy = np.ones(1, dtype=self.dtype)
        x = self.inputs['X']
        y = self.outputs['Out']
        return dy * np.exp(x - y)
86 87 88


class TestLogsumexp_shape(TestLogsumexp):
89

90 91 92 93 94
    def set_attrs(self):
        self.shape = [4, 5, 6]


class TestLogsumexp_axis(TestLogsumexp):
95

96 97 98 99 100
    def set_attrs(self):
        self.axis = [0, -1]


class TestLogsumexp_axis_all(TestLogsumexp):
101

102 103 104
    def set_attrs(self):
        self.axis = [0, 1, 2, 3]

105 106 107 108 109
    def set_attrs_addition(self):
        if paddle.fluid.core.is_compiled_with_rocm():
            self.user_defined_grads = [self.calc_grad()]
            self.user_defined_grad_outputs = [np.ones(1, dtype=self.dtype)]

110 111

class TestLogsumexp_keepdim(TestLogsumexp):
112

113 114 115 116 117
    def set_attrs(self):
        self.keepdim = True


class TestLogsumexp_reduce_all(TestLogsumexp):
118

119 120 121
    def set_attrs(self):
        self.reduce_all = True

122 123 124 125 126
    def set_attrs_addition(self):
        if paddle.fluid.core.is_compiled_with_rocm():
            self.user_defined_grads = [self.calc_grad()]
            self.user_defined_grad_outputs = [np.ones(1, dtype=self.dtype)]

127 128

class TestLogsumexpError(unittest.TestCase):
129

130
    def test_errors(self):
131 132
        with paddle.static.program_guard(paddle.static.Program()):
            self.assertRaises(TypeError, paddle.logsumexp, 1)
133
            x1 = paddle.fluid.data(name='x1', shape=[120], dtype="int32")
134 135 136 137
            self.assertRaises(TypeError, paddle.logsumexp, x1)


class TestLogsumexpAPI(unittest.TestCase):
138

139 140 141 142 143 144 145 146 147
    def setUp(self):
        self.shape = [2, 3, 4, 5]
        self.x = np.random.uniform(-1, 1, self.shape).astype(np.float32)
        self.place = paddle.CUDAPlace(0) if paddle.fluid.core.is_compiled_with_cuda() \
            else paddle.CPUPlace()

    def api_case(self, axis=None, keepdim=False):
        out_ref = ref_logsumexp(self.x, axis, keepdim)
        with paddle.static.program_guard(paddle.static.Program()):
148
            x = paddle.fluid.data('X', self.shape)
149 150 151 152 153 154
            out = paddle.logsumexp(x, axis, keepdim)
            exe = paddle.static.Executor(self.place)
            res = exe.run(feed={'X': self.x}, fetch_list=[out])
        self.assertTrue(np.allclose(res[0], out_ref))

        paddle.disable_static(self.place)
Z
Zhou Wei 已提交
155
        x = paddle.to_tensor(self.x)
156 157 158 159 160 161 162 163 164 165 166 167 168 169
        out = paddle.logsumexp(x, axis, keepdim)
        self.assertTrue(np.allclose(out.numpy(), out_ref))
        paddle.enable_static()

    def test_api(self):
        self.api_case()
        self.api_case(2)
        self.api_case([-1])
        self.api_case([2, -3])
        self.api_case((0, 1, -1))
        self.api_case(keepdim=True)

    def test_alias(self):
        paddle.disable_static(self.place)
Z
Zhou Wei 已提交
170
        x = paddle.to_tensor(self.x)
171 172 173 174 175 176 177
        out1 = paddle.logsumexp(x)
        out2 = paddle.tensor.logsumexp(x)
        out3 = paddle.tensor.math.logsumexp(x)
        out_ref = ref_logsumexp(self.x)
        for out in [out1, out2, out3]:
            self.assertTrue(np.allclose(out.numpy(), out_ref))
        paddle.enable_static()
178 179 180 181


if __name__ == '__main__':
    unittest.main()