test_cross_entropy_op.py 13.9 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Q
Qiao Longfei 已提交
15
import unittest
16
import numpy as np
C
chengduo 已提交
17
import paddle.fluid.core as core
18
from op_test import OpTest, randomize_probability
19 20
import paddle.fluid as fluid
from paddle.fluid import Program, program_guard
Q
Qiao Longfei 已提交
21 22


C
chengduo 已提交
23
class TestCrossEntropyOp(OpTest):
C
caoying03 已提交
24
    """Test cross-entropy with discrete one-hot labels.
25 26
    """

Q
Qiao Longfei 已提交
27
    def setUp(self):
28
        self.op_type = "cross_entropy"
C
chengduo 已提交
29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
        self.soft_label = False
        self.ignore_index = -100
        self.dtype = np.float64
        self.batch_size = 30
        self.class_num = 10

        self.init_dtype_type()
        self.init_attr_type()
        self.init_bs_class_num()
        self.init_x()
        self.init_label()
        self.get_cross_entropy()

        self.inputs = {"X": self.x, "Label": self.label}
        self.outputs = {"Y": self.cross_entropy}
        self.attrs = {
            "soft_label": self.soft_label,
            "ignore_index": self.ignore_index
        }

    def init_x(self):
50 51 52
        self.x = randomize_probability(self.batch_size,
                                       self.class_num,
                                       dtype=self.dtype)
C
chengduo 已提交
53 54

    def init_label(self):
55 56 57
        self.label = np.random.randint(0,
                                       self.class_num, (self.batch_size, 1),
                                       dtype="int64")
C
chengduo 已提交
58 59 60 61 62 63

    def get_cross_entropy(self):
        self.cross_entropy = np.asmatrix(
            [[-np.log(self.x[i][self.label[i][0]])]
             for i in range(self.x.shape[0])],
            dtype="float64")
C
caoying03 已提交
64

C
chengduo 已提交
65 66
    def init_attr_type(self):
        pass
67

C
chengduo 已提交
68 69
    def init_dtype_type(self):
        pass
C
caoying03 已提交
70

C
chengduo 已提交
71 72
    def init_bs_class_num(self):
        pass
Q
Qiao Longfei 已提交
73

74
    def test_check_output(self):
Q
qijun 已提交
75
        self.check_output()
Q
Qiao Longfei 已提交
76

77
    def test_check_grad(self):
78
        self.check_grad(["X"], "Y", numeric_grad_delta=0.001)
79

Y
Yan Chunwei 已提交
80

81 82 83 84 85
class TestCrossEntropyOpRemoveLastDim(TestCrossEntropyOp):
    """Test cross-entropy with discrete one-hot labels with shape [batch_size]
    """

    def init_label(self):
86 87 88
        self.label = np.random.randint(0,
                                       self.class_num, (self.batch_size),
                                       dtype="int64")
89 90 91

    def get_cross_entropy(self):
        self.cross_entropy = np.asmatrix(
92
            [-np.log(self.x[i][self.label[i]]) for i in range(self.x.shape[0])],
93 94 95
            dtype="float64")


C
chengduo 已提交
96
class TestCrossEntropyOp2(TestCrossEntropyOp):
C
caoying03 已提交
97
    """Test cross-entropy with vectorized soft labels.
98 99
    """

C
chengduo 已提交
100 101 102 103
    def init_label(self):
        self.label = np.random.uniform(
            0.1, 1.0, [self.batch_size, self.class_num]).astype(self.dtype)
        self.label /= self.label.sum(axis=1, keepdims=True)
C
caoying03 已提交
104

C
chengduo 已提交
105 106 107
    def get_cross_entropy(self):
        self.cross_entropy = (-self.label * np.log(self.x)).sum(
            axis=1, keepdims=True).astype(self.dtype)
C
caoying03 已提交
108

C
chengduo 已提交
109 110
    def init_attr_type(self):
        self.soft_label = True
111

C
chengduo 已提交
112
    def init_dtype_type(self):
113
        self.dtype = np.float64
C
chengduo 已提交
114 115 116 117

    def init_bs_class_num(self):
        self.batch_size = 5
        self.class_num = 37
118 119

    def test_check_grad(self):
120 121 122 123
        self.check_grad(["X"],
                        "Y",
                        max_relative_error=0.05,
                        numeric_grad_delta=0.001)
124 125


C
chengduo 已提交
126
class TestCrossEntropyOp3(TestCrossEntropyOp):
C
caoying03 已提交
127
    """Test cross-entropy with vectorized one-hot representation of labels.
128 129
    """

C
chengduo 已提交
130 131 132 133 134
    def init_label(self):
        self.label_index = np.random.randint(0, self.class_num,
                                             (self.batch_size))
        self.label = np.zeros(self.x.shape).astype(self.dtype)
        self.label[np.arange(self.batch_size), self.label_index] = 1
C
caoying03 已提交
135

C
chengduo 已提交
136 137 138 139
    def get_cross_entropy(self):
        self.cross_entropy = np.asmatrix(
            [[-np.log(self.x[i][self.label_index[i]])]
             for i in range(self.x.shape[0])]).astype(self.dtype)
C
caoying03 已提交
140

C
chengduo 已提交
141 142
    def init_attr_type(self):
        self.soft_label = True
C
caoying03 已提交
143

C
chengduo 已提交
144
    def init_dtype_type(self):
145
        self.dtype = np.float64
146

C
chengduo 已提交
147 148
    def init_bs_class_num(self):
        self.batch_size = 5
Z
zhupengyang 已提交
149
        self.class_num = 27
150 151

    def test_check_grad(self):
152 153 154 155
        self.check_grad(["X"],
                        "Y",
                        max_relative_error=0.05,
                        numeric_grad_delta=0.001)
156 157


C
chengduo 已提交
158
class TestCrossEntropyOp4(TestCrossEntropyOp):
159 160 161
    """Test high rank tensor cross-entropy with discrete one-hot labels.
    """

C
chengduo 已提交
162 163 164 165 166 167
    def init_x(self):
        self.shape = [10, 2, 4]
        self.ins_num = np.prod(np.array(self.shape))
        self.X_2d = randomize_probability(self.ins_num,
                                          self.class_num).astype(self.dtype)
        self.x = self.X_2d.reshape(self.shape + [self.class_num])
168

C
chengduo 已提交
169
    def init_label(self):
170 171 172
        self.label_2d = np.random.randint(0,
                                          self.class_num, (self.ins_num, 1),
                                          dtype="int64")
C
chengduo 已提交
173
        self.label = self.label_2d.reshape(self.shape + [1])
174

C
chengduo 已提交
175
    def get_cross_entropy(self):
176
        cross_entropy_2d = np.asmatrix(
C
chengduo 已提交
177 178 179 180
            [[-np.log(self.X_2d[i][self.label_2d[i][0]])]
             for i in range(self.X_2d.shape[0])]).astype(self.dtype)
        self.cross_entropy = np.array(cross_entropy_2d).reshape(self.shape +
                                                                [1])
181

C
chengduo 已提交
182 183
    def init_attr_type(self):
        self.soft_label = False
184

C
chengduo 已提交
185 186
    def init_dtype_type(self):
        self.dtype = np.float64
187

C
chengduo 已提交
188 189
    def init_bs_class_num(self):
        self.class_num = 10
190 191


192 193 194 195 196
class TestCrossEntropyOp4RemoveLastDim(TestCrossEntropyOp4):
    """Test high rank tensor cross-entropy with discrete one-hot labels with shape [batch_size]
    """

    def init_label(self):
197 198 199
        self.label_2d = np.random.randint(0,
                                          self.class_num, (self.ins_num, 1),
                                          dtype="int64")
200 201 202 203 204 205 206 207 208
        self.label = self.label_2d.reshape(self.shape)

    def get_cross_entropy(self):
        cross_entropy_2d = np.asmatrix(
            [[-np.log(self.X_2d[i][self.label_2d[i][0]])]
             for i in range(self.X_2d.shape[0])]).astype(self.dtype)
        self.cross_entropy = np.array(cross_entropy_2d).reshape(self.shape)


C
chengduo 已提交
209
class TestCrossEntropyOp5(TestCrossEntropyOp):
210 211 212
    """Test high rank tensor cross-entropy with vectorized soft labels.
    """

C
chengduo 已提交
213 214 215 216 217 218
    def init_x(self):
        self.shape = [4, 3]
        self.ins_num = np.prod(np.array(self.shape))
        self.X_2d = randomize_probability(self.ins_num,
                                          self.class_num).astype(self.dtype)
        self.x = self.X_2d.reshape(self.shape + [self.class_num])
219

C
chengduo 已提交
220 221 222 223 224
    def init_label(self):
        self.label_2d = np.random.uniform(
            0.1, 1.0, [self.ins_num, self.class_num]).astype(self.dtype)
        self.label_2d /= self.label_2d.sum(axis=1, keepdims=True)
        self.label = self.label_2d.reshape(self.shape + [self.class_num])
225

C
chengduo 已提交
226 227 228 229 230
    def get_cross_entropy(self):
        cross_entropy_2d = (-self.label_2d * np.log(self.X_2d)).sum(
            axis=1, keepdims=True).astype(self.dtype)
        self.cross_entropy = np.array(cross_entropy_2d).reshape(self.shape +
                                                                [1])
231

C
chengduo 已提交
232 233
    def init_attr_type(self):
        self.soft_label = True
234

C
chengduo 已提交
235
    def init_dtype_type(self):
236
        self.dtype = np.float64
C
chengduo 已提交
237 238 239

    def init_bs_class_num(self):
        self.class_num = 37
240 241

    def test_check_grad(self):
242 243 244 245
        self.check_grad(["X"],
                        "Y",
                        max_relative_error=0.05,
                        numeric_grad_delta=0.001)
246 247


C
chengduo 已提交
248
class TestCrossEntropyOp6(TestCrossEntropyOp):
249 250 251
    """Test high rank tensor cross-entropy with vectorized one-hot representation of labels.
    """

C
chengduo 已提交
252 253 254 255 256 257 258 259
    def init_x(self):
        self.shape = [4, 3, 2]
        self.ins_num = np.prod(np.array(self.shape))
        self.X_2d = randomize_probability(self.ins_num,
                                          self.class_num).astype(self.dtype)
        self.x = self.X_2d.reshape(self.shape + [self.class_num])

    def init_label(self):
260 261 262
        self.label_index_2d = np.random.randint(0,
                                                self.class_num, (self.ins_num),
                                                dtype="int64")
C
chengduo 已提交
263 264 265 266 267 268
        label_2d = np.zeros(self.X_2d.shape)
        label_2d[np.arange(self.ins_num), self.label_index_2d] = 1
        self.label = label_2d.reshape(self.shape + [self.class_num]).astype(
            self.dtype)

    def get_cross_entropy(self):
269
        cross_entropy_2d = np.asmatrix(
C
chengduo 已提交
270 271
            [[-np.log(self.X_2d[i][self.label_index_2d[i]])]
             for i in range(self.X_2d.shape[0])])
272 273 274
        self.cross_entropy = np.array(cross_entropy_2d).reshape(self.shape +
                                                                [1]).astype(
                                                                    self.dtype)
275

C
chengduo 已提交
276 277
    def init_attr_type(self):
        self.soft_label = True
278

C
chengduo 已提交
279
    def init_dtype_type(self):
280
        self.dtype = np.float64
281

C
chengduo 已提交
282 283
    def init_bs_class_num(self):
        self.class_num = 17
284 285

    def test_check_grad(self):
286 287 288 289
        self.check_grad(["X"],
                        "Y",
                        max_relative_error=0.05,
                        numeric_grad_delta=0.001)
290 291


C
chengduo 已提交
292
class TestCrossEntropyOp7(TestCrossEntropyOp):
293 294 295
    """Test cross-entropy with ignore index.
    """

C
chengduo 已提交
296
    def init_label(self):
297 298 299
        self.label = np.random.randint(0,
                                       self.class_num, (self.batch_size, 1),
                                       dtype="int64")
C
chengduo 已提交
300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318

    def get_cross_entropy(self):
        self.cross_entropy = np.asmatrix(
            [[-np.log(self.x[i][self.label[i][0]])]
             if self.label[i][0] != self.ignore_index else [0]
             for i in range(self.x.shape[0])]).astype(self.dtype)

    def init_attr_type(self):
        self.soft_label = False
        self.ignore_index = 3

    def init_dtype_type(self):
        self.dtype = np.float64

    def init_bs_class_num(self):
        self.batch_size = 30
        self.class_num = 10


319 320 321 322 323
class TestCrossEntropyOp7RemoveLastDim(TestCrossEntropyOp7):
    """Test cross-entropy with ignore index with shape [batch_size].
    """

    def init_label(self):
324 325 326
        self.label = np.random.randint(0,
                                       self.class_num, (self.batch_size),
                                       dtype="int64")
327 328 329 330 331 332 333 334 335 336

    def get_cross_entropy(self):
        self.cross_entropy = np.asmatrix(
            [[-np.log(self.x[i][self.label[i]])]
             if self.label[i] != self.ignore_index else [0]
             for i in range(self.x.shape[0])]).astype(self.dtype)
        self.cross_entropy = np.array(self.cross_entropy).reshape(
            [self.batch_size]).astype(self.dtype)


C
chengduo 已提交
337 338
# Add Fp16 test
def create_test_class(parent, cls_name):
339

C
chengduo 已提交
340 341 342
    @unittest.skipIf(not core.is_compiled_with_cuda(),
                     "core is not compiled with CUDA")
    class TestCrossEntropyFP16Op(parent):
343

C
chengduo 已提交
344 345 346 347 348 349 350 351 352 353 354
        def init_dtype_type(self):
            return np.float16

        def test_check_output(self):
            place = core.CUDAPlace(0)
            if core.is_float16_supported(place):
                self.check_output_with_place(place, atol=2e-1)

        def test_check_grad(self):
            place = core.CUDAPlace(0)
            if core.is_float16_supported(place):
355 356 357
                self.check_grad_with_place(place, ['X'],
                                           'Y',
                                           max_relative_error=0.9)
C
chengduo 已提交
358 359 360 361 362 363 364 365 366 367

    cls_name = "{0}".format(cls_name)
    TestCrossEntropyFP16Op.__name__ = cls_name
    globals()[cls_name] = TestCrossEntropyFP16Op


create_test_class(TestCrossEntropyOp, "TestCrossEntropyF16Op")
#create_test_class(TestCrossEntropyOp2, "TestCrossEntropyF16Op2")
create_test_class(TestCrossEntropyOp3, "TestCrossEntropyF16Op3")
create_test_class(TestCrossEntropyOp4, "TestCrossEntropyF16Op4")
368 369
create_test_class(TestCrossEntropyOp4RemoveLastDim,
                  "TestCrossEntropyF16Op4RemoveLastDim")
C
chengduo 已提交
370 371 372
#create_test_class(TestCrossEntropyOp5, "TestCrossEntropyF16Op5")
create_test_class(TestCrossEntropyOp6, "TestCrossEntropyF16Op6")
create_test_class(TestCrossEntropyOp7, "TestCrossEntropyF16Op7")
373 374
create_test_class(TestCrossEntropyOp7RemoveLastDim,
                  "TestCrossEntropyF16Op7RemoveLastDim")
375

376

377
class TestCrossEntropyOpError(unittest.TestCase):
378

379 380 381 382 383
    def test_errors(self):
        with program_guard(Program(), Program()):

            def test_Variable():
                # the input of cross_entropy must be Variable.
384 385 386 387
                x1 = fluid.create_lod_tensor(np.array([-1, 3, 5, 5]),
                                             [[1, 1, 1, 1]], fluid.CPUPlace())
                lab1 = fluid.create_lod_tensor(np.array([-1, 3, 5, 5]),
                                               [[1, 1, 1, 1]], fluid.CPUPlace())
388 389 390 391 392 393 394
                fluid.layers.cross_entropy(x1, lab1)

            self.assertRaises(TypeError, test_Variable)

            def test_dtype():
                # the input dtype of cross_entropy must be float16 or float32 or float64
                # float16 only can be set on GPU place
395 396 397 398 399 400
                x2 = fluid.layers.data(name='x2',
                                       shape=[3, 4, 5, 6],
                                       dtype="int32")
                lab2 = fluid.layers.data(name='lab2',
                                         shape=[3, 4, 5, 6],
                                         dtype="int32")
401 402 403 404 405
                fluid.layers.cross_entropy(x2, lab2)

            self.assertRaises(TypeError, test_dtype)


Q
Qiao Longfei 已提交
406 407
if __name__ == "__main__":
    unittest.main()