test_distribution_laplace.py 7.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
16 17

import config
18
import numpy as np
19
import parameterize
20 21 22 23 24 25 26
import scipy.stats

import paddle


@parameterize.place(config.DEVICES)
@parameterize.parameterize_cls(
27 28 29 30 31 32
    (parameterize.TEST_CASE_NAME, 'loc', 'scale'),
    [
        ('one-dim', parameterize.xrand((2,)), parameterize.xrand((2,))),
        ('multi-dim', parameterize.xrand((5, 5)), parameterize.xrand((5, 5))),
    ],
)
33 34
class TestLaplace(unittest.TestCase):
    def setUp(self):
35 36 37
        self._dist = paddle.distribution.Laplace(
            loc=paddle.to_tensor(self.loc), scale=paddle.to_tensor(self.scale)
        )
38 39 40 41

    def test_mean(self):
        mean = self._dist.mean
        self.assertEqual(mean.numpy().dtype, self.scale.dtype)
42 43 44 45 46 47
        np.testing.assert_allclose(
            mean,
            self._np_mean(),
            rtol=config.RTOL.get(str(self.scale.dtype)),
            atol=config.ATOL.get(str(self.scale.dtype)),
        )
48 49 50 51

    def test_variance(self):
        var = self._dist.variance
        self.assertEqual(var.numpy().dtype, self.scale.dtype)
52 53 54 55 56 57
        np.testing.assert_allclose(
            var,
            self._np_variance(),
            rtol=config.RTOL.get(str(self.scale.dtype)),
            atol=config.ATOL.get(str(self.scale.dtype)),
        )
58 59 60 61

    def test_stddev(self):
        stddev = self._dist.stddev
        self.assertEqual(stddev.numpy().dtype, self.scale.dtype)
62 63 64 65 66 67
        np.testing.assert_allclose(
            stddev,
            self._np_stddev(),
            rtol=config.RTOL.get(str(self.scale.dtype)),
            atol=config.ATOL.get(str(self.scale.dtype)),
        )
68 69 70 71 72 73 74

    def test_entropy(self):
        entropy = self._dist.entropy()
        self.assertEqual(entropy.numpy().dtype, self.scale.dtype)

    def test_sample(self):

75
        sample_shape = (50000,)
76 77 78 79
        samples = self._dist.sample(sample_shape)
        sample_values = samples.numpy()

        self.assertEqual(samples.numpy().dtype, self.scale.dtype)
80 81 82
        self.assertEqual(
            tuple(samples.shape), tuple(self._dist._extend_shape(sample_shape))
        )
83 84 85 86

        self.assertEqual(samples.shape, list(sample_shape + self.loc.shape))
        self.assertEqual(sample_values.shape, sample_shape + self.loc.shape)

87 88 89 90 91 92 93 94 95 96 97 98
        np.testing.assert_allclose(
            sample_values.mean(axis=0),
            scipy.stats.laplace.mean(self.loc, scale=self.scale),
            rtol=0.2,
            atol=0.0,
        )
        np.testing.assert_allclose(
            sample_values.var(axis=0),
            scipy.stats.laplace.var(self.loc, scale=self.scale),
            rtol=0.1,
            atol=0.0,
        )
99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114

    def _np_mean(self):
        return self.loc

    def _np_stddev(self):
        return (2**0.5) * self.scale

    def _np_variance(self):
        stddev = (2**0.5) * self.scale
        return np.power(stddev, 2)

    def _np_entropy(self):
        return scipy.stats.laplace.entropy(loc=self.loc, scale=self.scale)


@parameterize.place(config.DEVICES)
115 116 117 118 119 120 121
@parameterize.parameterize_cls(
    (parameterize.TEST_CASE_NAME, 'loc', 'scale'),
    [
        ('float', 1.0, 2.0),
        ('int', 3, 4),
    ],
)
122 123 124 125 126 127
class TestLaplaceKS(unittest.TestCase):
    def setUp(self):
        self._dist = paddle.distribution.Laplace(loc=self.loc, scale=self.scale)

    def test_sample(self):

128
        sample_shape = (20000,)
129 130 131 132 133 134 135
        samples = self._dist.sample(sample_shape)
        sample_values = samples.numpy()
        self.assertTrue(self._kstest(self.loc, self.scale, sample_values))

    def _kstest(self, loc, scale, samples):
        # Uses the Kolmogorov-Smirnov test for goodness of fit.
        ks, p_value = scipy.stats.kstest(
136 137
            samples, scipy.stats.laplace(loc, scale=scale).cdf
        )
138 139 140 141 142 143 144
        return ks < 0.02


@parameterize.place(config.DEVICES)
@parameterize.parameterize_cls(
    (parameterize.TEST_CASE_NAME, 'loc', 'scale', 'value'),
    [
145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164
        (
            'value-float',
            np.array([0.2, 0.3]),
            np.array([2.0, 3.0]),
            np.array([2.0, 5.0]),
        ),
        (
            'value-int',
            np.array([0.2, 0.3]),
            np.array([2.0, 3.0]),
            np.array([2, 5]),
        ),
        (
            'value-multi-dim',
            np.array([0.2, 0.3]),
            np.array([2.0, 3.0]),
            np.array([[4.0, 6], [8, 2]]),
        ),
    ],
)
165 166
class TestLaplacePDF(unittest.TestCase):
    def setUp(self):
167 168 169
        self._dist = paddle.distribution.Laplace(
            loc=paddle.to_tensor(self.loc), scale=paddle.to_tensor(self.scale)
        )
170 171 172 173 174 175

    def test_prob(self):
        np.testing.assert_allclose(
            self._dist.prob(paddle.to_tensor(self.value)),
            scipy.stats.laplace.pdf(self.value, self.loc, self.scale),
            rtol=config.RTOL.get(str(self.loc.dtype)),
176 177
            atol=config.ATOL.get(str(self.loc.dtype)),
        )
178 179 180 181 182 183

    def test_log_prob(self):
        np.testing.assert_allclose(
            self._dist.log_prob(paddle.to_tensor(self.value)),
            scipy.stats.laplace.logpdf(self.value, self.loc, self.scale),
            rtol=config.RTOL.get(str(self.loc.dtype)),
184 185
            atol=config.ATOL.get(str(self.loc.dtype)),
        )
186 187

    def test_cdf(self):
188 189 190 191 192 193
        np.testing.assert_allclose(
            self._dist.cdf(paddle.to_tensor(self.value)),
            scipy.stats.laplace.cdf(self.value, self.loc, self.scale),
            rtol=config.RTOL.get(str(self.loc.dtype)),
            atol=config.ATOL.get(str(self.loc.dtype)),
        )
194 195 196 197 198 199

    def test_icdf(self):
        np.testing.assert_allclose(
            self._dist.icdf(paddle.to_tensor(self.value)),
            scipy.stats.laplace.ppf(self.value, self.loc, self.scale),
            rtol=config.RTOL.get(str(self.loc.dtype)),
200 201
            atol=config.ATOL.get(str(self.loc.dtype)),
        )
202 203 204 205


@parameterize.place(config.DEVICES)
@parameterize.parameterize_cls(
206 207 208 209 210 211 212 213 214 215 216
    (parameterize.TEST_CASE_NAME, 'loc1', 'scale1', 'loc2', 'scale2'),
    [
        (
            'kl',
            np.array([0.0]),
            np.array([1.0]),
            np.array([1.0]),
            np.array([0.5]),
        )
    ],
)
217 218
class TestLaplaceAndLaplaceKL(unittest.TestCase):
    def setUp(self):
219 220 221 222 223 224
        self._dist_1 = paddle.distribution.Laplace(
            loc=paddle.to_tensor(self.loc1), scale=paddle.to_tensor(self.scale1)
        )
        self._dist_2 = paddle.distribution.Laplace(
            loc=paddle.to_tensor(self.loc2), scale=paddle.to_tensor(self.scale2)
        )
225 226

    def test_kl_divergence(self):
227 228 229 230 231 232
        np.testing.assert_allclose(
            paddle.distribution.kl_divergence(self._dist_1, self._dist_2),
            self._np_kl(),
            atol=0,
            rtol=0.50,
        )
233 234

    def _np_kl(self):
235 236 237 238 239
        x = np.linspace(
            scipy.stats.laplace.ppf(0.01), scipy.stats.laplace.ppf(0.99), 1000
        )
        d1 = scipy.stats.laplace.pdf(x, loc=0.0, scale=1.0)
        d2 = scipy.stats.laplace.pdf(x, loc=1.0, scale=0.5)
240 241 242 243 244
        return scipy.stats.entropy(d1, d2)


if __name__ == '__main__':
    unittest.main()