test_cross_entropy_op.py 3.9 KB
Newer Older
Q
Qiao Longfei 已提交
1
import unittest
2
import numpy as np
3
from op_test import OpTest
Q
Qiao Longfei 已提交
4 5


6 7 8 9
class TestCrossEntropyOp1(OpTest):
    """Test standard cross-entropy, with index representation of labels.
    """

Q
Qiao Longfei 已提交
10
    def setUp(self):
11
        self.op_type = "cross_entropy"
Q
qijun 已提交
12 13
        batch_size = 30
        class_num = 10
14 15 16 17 18
        X = np.random.uniform(0.1, 1.0,
                              [batch_size, class_num]).astype("float32")
        label = np.random.randint(0, class_num, (batch_size, 1), dtype="int32")
        cross_entropy = np.asmatrix(
            [[-np.log(X[i][label[i][0]])] for i in range(X.shape[0])],
19
            dtype="float32")
20
        self.inputs = {"X": X, "Label": label}
21
        self.outputs = {"Y": cross_entropy}
C
caoying03 已提交
22
        self.attrs = {"soft_label": False}
Q
Qiao Longfei 已提交
23

24
    def test_check_output(self):
Q
qijun 已提交
25
        self.check_output()
Q
Qiao Longfei 已提交
26

27
    def test_check_grad(self):
28
        self.check_grad(["X"], "Y")
29

Y
Yan Chunwei 已提交
30

31 32 33 34
class TestCrossEntropyOp2(OpTest):
    """Test soft-label cross-entropy, with vecterized soft labels.
    """

35 36
    def setUp(self):
        self.op_type = "cross_entropy"
C
caoying03 已提交
37 38
        batch_size = 5
        # this setting tests threads in more than one wrap.
39
        class_num = 37
40 41 42 43
        X = np.random.uniform(0.1, 1.0,
                              [batch_size, class_num]).astype("float32")
        label = np.random.uniform(0.1, 1.0,
                                  [batch_size, class_num]).astype("float32")
44
        label /= label.sum(axis=1, keepdims=True)
45 46
        cross_entropy = (-label * np.log(X)).sum(
            axis=1, keepdims=True).astype("float32")
C
caoying03 已提交
47 48 49
        self.inputs = {"X": X, "Label": label}
        self.outputs = {"Y": cross_entropy}
        self.attrs = {"soft_label": True}
50 51 52 53 54

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
55
        self.check_grad(["X"], "Y", max_relative_error=0.05)
56 57 58 59 60 61 62 63 64


class TestCrossEntropyOp3(OpTest):
    """Test one-hot cross-entropy, with vecterized one-hot representation of
    labels.
    """

    def setUp(self):
        self.op_type = "cross_entropy"
C
caoying03 已提交
65 66 67
        batch_size = 5
        # this setting tests all threads in one wrap.
        class_num = 17
68 69 70 71 72 73 74 75 76 77
        X = np.random.uniform(0.1, 1.0,
                              [batch_size, class_num]).astype("float32")
        label_index = np.random.randint(
            0, class_num, (batch_size), dtype="int32")
        label = np.zeros(X.shape)
        label[np.arange(batch_size), label_index] = 1
        cross_entropy = np.asmatrix(
            [[-np.log(X[i][label_index[i]])] for i in range(X.shape[0])],
            dtype="float32")
        cross_entropy2 = (-label * np.log(X)).sum(
78
            axis=1, keepdims=True).astype("float32")
C
caoying03 已提交
79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108
        self.inputs = {"X": X, "Label": label}
        self.outputs = {"Y": cross_entropy}
        self.attrs = {"soft_label": True}

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
        self.check_grad(["X"], "Y", max_relative_error=0.05)


class TestCrossEntropyOp4(OpTest):
    """Test soft-label cross-entropy.
    This unittest tests the gpu kernel for layer size excesses 512.
    """

    def setUp(self):
        self.op_type = "cross_entropy"
        batch_size = 2
        class_num = 517
        X = np.random.uniform(0.1, 1.0,
                              [batch_size, class_num]).astype("float32")
        label = np.random.uniform(0.1, 1.0,
                                  [batch_size, class_num]).astype("float32")
        label /= label.sum(axis=1, keepdims=True)
        cross_entropy = (-label * np.log(X)).sum(
            axis=1, keepdims=True).astype("float32")
        self.inputs = {"X": X, "Label": label}
        self.outputs = {"Y": cross_entropy}
        self.attrs = {"soft_label": True}
109 110 111 112 113

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
114
        self.check_grad(["X"], "Y", max_relative_error=0.05)
115 116


Q
Qiao Longfei 已提交
117 118
if __name__ == "__main__":
    unittest.main()