test_sigmoid_focal_loss.py 7.2 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import paddle
import paddle.fluid as fluid
import numpy as np
import unittest
from op_test import OpTest
from test_sigmoid_focal_loss_op import sigmoid_focal_loss_forward
H
hong 已提交
21
from paddle.fluid.framework import _test_eager_guard
22 23 24 25 26 27 28 29


def call_sfl_functional(logit,
                        label,
                        normalizer,
                        alpha=0.25,
                        gamma=2.0,
                        reduction='sum'):
30 31 32 33 34 35
    res = paddle.nn.functional.sigmoid_focal_loss(logit,
                                                  label,
                                                  normalizer,
                                                  alpha=alpha,
                                                  gamma=gamma,
                                                  reduction=reduction)
36 37 38 39 40 41 42 43 44 45 46 47 48 49
    return res


def test_static(place,
                logit_np,
                label_np,
                normalizer_np,
                alpha=0.25,
                gamma=2.0,
                reduction='sum'):
    paddle.enable_static()
    prog = paddle.static.Program()
    startup_prog = paddle.static.Program()
    with paddle.static.program_guard(prog, startup_prog):
50 51 52 53 54 55
        logit = paddle.fluid.data(name='logit',
                                  shape=logit_np.shape,
                                  dtype='float64')
        label = paddle.fluid.data(name='label',
                                  shape=label_np.shape,
                                  dtype='float64')
56 57 58 59
        feed_dict = {"logit": logit_np, "label": label_np}

        normalizer = None
        if normalizer_np is not None:
60 61 62
            normalizer = paddle.fluid.data(name='normalizer',
                                           shape=normalizer_np.shape,
                                           dtype='float64')
63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124
            feed_dict["normalizer"] = normalizer_np

        res = call_sfl_functional(logit, label, normalizer, alpha, gamma,
                                  reduction)
        exe = paddle.static.Executor(place)
        static_result = exe.run(prog, feed=feed_dict, fetch_list=[res])
    return static_result


def test_dygraph(place,
                 logit_np,
                 label_np,
                 normalizer_np,
                 alpha=0.25,
                 gamma=2.0,
                 reduction='sum'):
    paddle.disable_static()
    logit = paddle.to_tensor(logit_np)
    label = paddle.to_tensor(label_np)
    normalizer = None
    if normalizer_np is not None:
        normalizer = paddle.to_tensor(normalizer_np)
    dy_res = call_sfl_functional(logit, label, normalizer, alpha, gamma,
                                 reduction)
    dy_result = dy_res.numpy()
    paddle.enable_static()
    return dy_result


def calc_sigmoid_focal_loss(logit_np,
                            label_np,
                            normalizer_np,
                            alpha=0.25,
                            gamma=2.0,
                            reduction='sum'):

    loss = np.maximum(
        logit_np,
        0) - logit_np * label_np + np.log(1 + np.exp(-np.abs(logit_np)))

    pred = 1 / (1 + np.exp(-logit_np))
    p_t = pred * label_np + (1 - pred) * (1 - label_np)

    if alpha is not None:
        alpha_t = alpha * label_np + (1 - alpha) * (1 - label_np)
        loss = alpha_t * loss

    if gamma is not None:
        loss = loss * ((1 - p_t)**gamma)

    if normalizer_np is not None:
        loss = loss / normalizer_np

    if reduction == 'mean':
        loss = np.mean(loss)
    elif reduction == 'sum':
        loss = np.sum(loss)

    return loss


class TestSigmoidFocalLoss(unittest.TestCase):
125

126
    def test_SigmoidFocalLoss(self):
127 128 129 130
        logit_np = np.random.uniform(0.1, 0.8,
                                     size=(2, 3, 4, 10)).astype(np.float64)
        label_np = np.random.randint(0, 2,
                                     size=(2, 3, 4, 10)).astype(np.float64)
131
        normalizer_nps = [
132
            np.asarray([np.sum(label_np > 0)], dtype=label_np.dtype), None
133 134 135 136 137 138 139 140 141 142 143 144
        ]
        places = [fluid.CPUPlace()]
        if fluid.core.is_compiled_with_cuda():
            places.append(fluid.CUDAPlace(0))
        reductions = ['sum', 'mean', 'none']
        alphas = [0.25, 0.5]
        gammas = [3, 0.]
        for place in places:
            for reduction in reductions:
                for alpha in alphas:
                    for gamma in gammas:
                        for normalizer_np in normalizer_nps:
145 146 147
                            static_result, = test_static(
                                place, logit_np, label_np, normalizer_np, alpha,
                                gamma, reduction)
148 149 150
                            dy_result = test_dygraph(place, logit_np, label_np,
                                                     normalizer_np, alpha,
                                                     gamma, reduction)
H
hong 已提交
151 152 153 154
                            with _test_eager_guard():
                                eager_result = test_dygraph(
                                    place, logit_np, label_np, normalizer_np,
                                    alpha, gamma, reduction)
155 156 157
                            expected = calc_sigmoid_focal_loss(
                                logit_np, label_np, normalizer_np, alpha, gamma,
                                reduction)
158 159 160 161 162 163 164 165 166 167 168 169
                            np.testing.assert_allclose(static_result,
                                                       expected,
                                                       rtol=1e-05)
                            np.testing.assert_allclose(static_result,
                                                       dy_result,
                                                       rtol=1e-05)
                            np.testing.assert_allclose(dy_result,
                                                       expected,
                                                       rtol=1e-05)
                            np.testing.assert_allclose(eager_result,
                                                       expected,
                                                       rtol=1e-05)
170 171 172 173 174

    def test_SigmoidFocalLoss_error(self):
        paddle.disable_static()
        logit = paddle.to_tensor([[0.97], [0.91], [0.03]], dtype='float32')
        label = paddle.to_tensor([[1.0], [1.0], [0.0]], dtype='float32')
175 176 177 178 179 180
        self.assertRaises(ValueError,
                          paddle.nn.functional.sigmoid_focal_loss,
                          logit=logit,
                          label=label,
                          normalizer=None,
                          reduction="unsupport reduction")
181 182 183 184 185
        paddle.enable_static()


if __name__ == "__main__":
    unittest.main()