test_cross_entropy_op.py 2.8 KB
Newer Older
Q
Qiao Longfei 已提交
1
import unittest
2
import numpy as np
3
from op_test import OpTest, randomize_probability
Q
Qiao Longfei 已提交
4 5


6
class TestCrossEntropyOp1(OpTest):
C
caoying03 已提交
7
    """Test cross-entropy with discrete one-hot labels.
8 9
    """

Q
Qiao Longfei 已提交
10
    def setUp(self):
11
        self.op_type = "cross_entropy"
Q
qijun 已提交
12 13
        batch_size = 30
        class_num = 10
C
caoying03 已提交
14

15 16
        X = randomize_probability(batch_size, class_num, dtype='float64')

17
        label = np.random.randint(0, class_num, (batch_size, 1), dtype="int64")
18 19
        cross_entropy = np.asmatrix(
            [[-np.log(X[i][label[i][0]])] for i in range(X.shape[0])],
20
            dtype="float64")
C
caoying03 已提交
21

22
        self.inputs = {"X": X, "Label": label}
23
        self.outputs = {"Y": cross_entropy}
Q
qijun 已提交
24
        self.attrs = {"soft_label": False}
Q
Qiao Longfei 已提交
25

26
    def test_check_output(self):
Q
qijun 已提交
27
        self.check_output()
Q
Qiao Longfei 已提交
28

29
    def test_check_grad(self):
30
        self.check_grad(["X"], "Y", numeric_grad_delta=0.001)
31

Y
Yan Chunwei 已提交
32

33
class TestCrossEntropyOp2(OpTest):
C
caoying03 已提交
34
    """Test cross-entropy with vectorized soft labels.
35 36
    """

37 38
    def setUp(self):
        self.op_type = "cross_entropy"
C
caoying03 已提交
39
        batch_size = 5
40
        class_num = 37
C
caoying03 已提交
41

42
        X = randomize_probability(batch_size, class_num)
43 44
        label = np.random.uniform(0.1, 1.0,
                                  [batch_size, class_num]).astype("float32")
45
        label /= label.sum(axis=1, keepdims=True)
46 47
        cross_entropy = (-label * np.log(X)).sum(
            axis=1, keepdims=True).astype("float32")
C
caoying03 已提交
48

C
caoying03 已提交
49 50
        self.inputs = {"X": X, "Label": label}
        self.outputs = {"Y": cross_entropy}
51
        self.attrs = {"soft_label": True}
52 53 54 55 56

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
57 58
        self.check_grad(
            ["X"], "Y", max_relative_error=0.05, numeric_grad_delta=0.001)
59 60 61


class TestCrossEntropyOp3(OpTest):
C
caoying03 已提交
62
    """Test cross-entropy with vectorized one-hot representation of labels.
63 64 65 66
    """

    def setUp(self):
        self.op_type = "cross_entropy"
C
caoying03 已提交
67 68
        batch_size = 5
        class_num = 17
C
caoying03 已提交
69

70
        X = randomize_probability(batch_size, class_num)
71 72 73 74
        label_index = np.random.randint(
            0, class_num, (batch_size), dtype="int32")
        label = np.zeros(X.shape)
        label[np.arange(batch_size), label_index] = 1
C
caoying03 已提交
75

76 77 78 79
        cross_entropy = np.asmatrix(
            [[-np.log(X[i][label_index[i]])] for i in range(X.shape[0])],
            dtype="float32")
        cross_entropy2 = (-label * np.log(X)).sum(
80
            axis=1, keepdims=True).astype("float32")
C
caoying03 已提交
81

Y
Yu Yang 已提交
82
        self.inputs = {"X": X, "Label": label.astype(np.float32)}
C
caoying03 已提交
83
        self.outputs = {"Y": cross_entropy}
84
        self.attrs = {"soft_label": True}
85 86 87 88 89

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
90 91
        self.check_grad(
            ["X"], "Y", max_relative_error=0.05, numeric_grad_delta=0.001)
92 93


Q
Qiao Longfei 已提交
94 95
if __name__ == "__main__":
    unittest.main()