test_kldiv_loss_op.py 3.5 KB
Newer Older
D
dengkaipeng 已提交
1 2 3 4 5 6 7 8
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
D
dengkaipeng 已提交
9
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS,
D
dengkaipeng 已提交
10 11 12 13 14 15
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import division

16
import paddle
D
dengkaipeng 已提交
17 18 19 20 21 22 23
import unittest
import numpy as np
from op_test import OpTest


def kldiv_loss(x, target, reduction):
    output = target * (np.log(target) - x)
D
dengkaipeng 已提交
24
    loss = np.where(target >= 0, output, np.zeros_like(x))
D
dengkaipeng 已提交
25 26

    if reduction == "batchmean":
27 28 29 30
        if len(x.shape) > 0:
            return loss.sum() / x.shape[0]
        else:
            return loss.sum()
D
dengkaipeng 已提交
31 32 33 34 35 36 37 38 39 40 41 42
    if reduction == "mean":
        return loss.mean()
    if reduction == "sum":
        return loss.sum()

    return loss


class TestKLDivLossOp(OpTest):
    def setUp(self):
        self.initTestCase()
        self.op_type = 'kldiv_loss'
43 44
        x = np.random.uniform(-10, 10, self.x_shape).astype('float64')
        target = np.random.uniform(-10, 10, self.x_shape).astype('float64')
D
dengkaipeng 已提交
45 46 47 48 49 50 51 52

        self.attrs = {"reduction": self.reduction}

        self.inputs = {
            'X': x,
            'Target': target,
        }
        loss = kldiv_loss(x, target, self.reduction)
53
        self.outputs = {'Loss': loss.astype('float64')}
D
dengkaipeng 已提交
54 55 56 57 58

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
59
        self.check_grad(['X'], 'Loss', no_grad_set=set(["Target"]))
D
dengkaipeng 已提交
60

D
dengkaipeng 已提交
61
    def initTestCase(self):
62
        self.x_shape = (4, 5, 5)
D
dengkaipeng 已提交
63
        self.reduction = 'batchmean'
D
dengkaipeng 已提交
64 65 66


class TestKLDivLossOp2(TestKLDivLossOp):
D
dengkaipeng 已提交
67
    def initTestCase(self):
D
dengkaipeng 已提交
68 69
        self.x_shape = (3, 2, 7, 7)
        self.reduction = 'none'
D
dengkaipeng 已提交
70 71


D
dengkaipeng 已提交
72 73 74 75 76 77 78 79
class TestKLDivLossOp3(TestKLDivLossOp):
    def initTestCase(self):
        self.x_shape = (2, 3, 5, 7, 9)
        self.reduction = 'mean'


class TestKLDivLossOp4(TestKLDivLossOp):
    def initTestCase(self):
Z
zhupengyang 已提交
80
        self.x_shape = (5, 20)
D
dengkaipeng 已提交
81 82
        self.reduction = 'sum'

D
dengkaipeng 已提交
83

84 85 86 87 88 89 90 91 92 93 94 95 96 97 98
class TestKLDivLossDygraph(unittest.TestCase):
    def run_kl_loss(self, reduction, shape=(5, 20)):
        x = np.random.uniform(-10, 10, shape).astype('float64')
        target = np.random.uniform(-10, 10, shape).astype('float64')
        gt_loss = kldiv_loss(x, target, reduction)

        with paddle.fluid.dygraph.guard():
            kldiv_criterion = paddle.nn.KLDivLoss(reduction)
            pred_loss = kldiv_criterion(
                paddle.to_variable(x), paddle.to_variable(target))
            self.assertTrue(np.allclose(pred_loss.numpy(), gt_loss))

    def test_kl_loss_batchmean(self):
        self.run_kl_loss('batchmean')

99 100 101
    def test_kl_loss_batchmean_shape(self):
        self.run_kl_loss('batchmean', ())

102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117
    def test_kl_loss_mean(self):
        self.run_kl_loss('mean')

    def test_kl_loss_sum(self):
        self.run_kl_loss('sum')

    def test_kl_loss_none(self):
        self.run_kl_loss('none')

    def test_kl_loss_static_api(self):
        input = paddle.fluid.data(name='input', shape=[5, 20])
        label = paddle.fluid.data(name='label', shape=[5, 20])

        pred_loss = paddle.nn.functional.kl_div(input, label)


D
dengkaipeng 已提交
118 119
if __name__ == "__main__":
    unittest.main()