test_label_smooth_op.py 2.8 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Y
Yibing Liu 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
from __future__ import print_function

Y
Yibing Liu 已提交
17 18
import unittest
import numpy as np
19
from op_test import OpTest
H
hong 已提交
20
import paddle
Y
Yibing Liu 已提交
21 22 23


class TestLabelSmoothOp(OpTest):
24
    def config(self):
Y
Yibing Liu 已提交
25
        self.op_type = "label_smooth"
H
hong 已提交
26
        self.python_api = paddle.nn.functional.label_smooth
27
        self.epsilon = 0.1
28
        batch_size, self.label_dim = 10, 12
29 30 31 32 33 34 35 36 37 38
        self.label = np.zeros((batch_size, self.label_dim)).astype("float64")
        nonzero_index = np.random.randint(self.label_dim, size=(batch_size))
        self.label[np.arange(batch_size), nonzero_index] = 1

    def setUp(self):
        self.config()
        smoothed_label = (1 - self.epsilon
                          ) * self.label + self.epsilon / self.label_dim
        self.inputs = {'X': self.label}
        self.attrs = {'epsilon': self.epsilon}
Y
Yibing Liu 已提交
39 40 41
        self.outputs = {'Out': smoothed_label}

    def test_check_output(self):
42
        self.check_output(check_eager=True)
Y
Yibing Liu 已提交
43 44

    def test_check_grad(self):
45
        self.check_grad(["X"], "Out", check_eager=True)
Y
Yibing Liu 已提交
46 47


48 49 50 51 52 53 54 55 56 57
class TestLabelSmoothOpWithPriorDist(TestLabelSmoothOp):
    def setUp(self):
        self.config()
        dist = np.random.random((1, self.label_dim))
        smoothed_label = (1 - self.epsilon) * self.label + self.epsilon * dist
        self.inputs = {'X': self.label, 'PriorDist': dist}
        self.attrs = {'epsilon': self.epsilon}
        self.outputs = {'Out': smoothed_label}


58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75
class TestLabelSmoothOp3D(TestLabelSmoothOp):
    def setUp(self):
        super(TestLabelSmoothOp3D, self).setUp()
        self.inputs['X'] = self.inputs['X'].reshape(
            [2, -1, self.inputs['X'].shape[-1]])
        self.outputs['Out'] = self.outputs['Out'].reshape(self.inputs['X']
                                                          .shape)


class TestLabelSmoothOpWithPriorDist3D(TestLabelSmoothOpWithPriorDist):
    def setUp(self):
        super(TestLabelSmoothOpWithPriorDist3D, self).setUp()
        self.inputs['X'] = self.inputs['X'].reshape(
            [2, -1, self.inputs['X'].shape[-1]])
        self.outputs['Out'] = self.outputs['Out'].reshape(self.inputs['X']
                                                          .shape)


Y
Yibing Liu 已提交
76
if __name__ == '__main__':
H
hong 已提交
77
    paddle.enable_static()
Y
Yibing Liu 已提交
78
    unittest.main()