test_smooth_l1_loss_op.py 2.8 KB
Newer Older
Y
yangyaming 已提交
1 2
import unittest
import numpy as np
3
from op_test import OpTest
Y
yangyaming 已提交
4 5 6 7 8 9 10 11 12 13


def smooth_l1_loss_forward(val, sigma2):
    abs_val = abs(val)
    if abs_val < 1.0 / sigma2:
        return 0.5 * val * val * sigma2
    else:
        return abs_val - 0.5 / sigma2


14
class TestSmoothL1LossOp1(OpTest):
Y
yangyaming 已提交
15
    def setUp(self):
16
        self.op_type = "smooth_l1_loss"
Y
yangyaming 已提交
17
        dims = (5, 10)
Y
yangyaming 已提交
18 19 20 21 22 23 24 25 26 27
        self.inputs = {
            'X': np.random.random(dims).astype("float32"),
            'Y': np.random.random(dims).astype("float32")
        }
        sigma = 3.0
        self.attrs = {'sigma': sigma}
        sigma2 = sigma * sigma
        diff = self.inputs['X'] - self.inputs['Y']
        loss = np.vectorize(smooth_l1_loss_forward)(diff, sigma2).sum(1)
        loss = loss.reshape((dims[0], 1))
Y
Yu Yang 已提交
28 29 30 31
        self.outputs = {
            'Diff': diff.astype('float32'),
            'Out': loss.astype('float32')
        }
32 33 34 35 36

    def test_check_output(self):
        self.check_output()

    def test_check_grad_normal(self):
Y
yangyaming 已提交
37
        self.check_grad(['X', 'Y'], 'Out', max_relative_error=0.02)
Y
yangyaming 已提交
38

39 40
    def test_check_grad_ingore_x(self):
        self.check_grad(
Y
yangyaming 已提交
41
            ['Y'], 'Out', max_relative_error=0.03, no_grad_set=set("X"))
42 43 44

    def test_check_grad_ingore_y(self):
        self.check_grad(
Y
yangyaming 已提交
45
            ['X'], 'Out', max_relative_error=0.03, no_grad_set=set('Y'))
Y
yangyaming 已提交
46 47


48
class TestSmoothL1LossOp2(OpTest):
Y
yangyaming 已提交
49
    def setUp(self):
50
        self.op_type = "smooth_l1_loss"
Y
yangyaming 已提交
51
        dims = (5, 10)
Y
yangyaming 已提交
52 53 54 55 56 57 58 59 60 61 62 63 64 65
        self.inputs = {
            'X': np.random.random(dims).astype("float32"),
            'Y': np.random.random(dims).astype("float32"),
            'InsideWeight': np.random.random(dims).astype("float32"),
            'OutsideWeight': np.random.random(dims).astype("float32")
        }
        sigma = 3.0
        self.attrs = {'sigma': sigma}
        sigma2 = sigma * sigma
        diff = self.inputs['X'] - self.inputs['Y']
        diff = diff * self.inputs['InsideWeight']
        loss = np.vectorize(smooth_l1_loss_forward)(diff, sigma2)
        loss = loss * self.inputs['OutsideWeight']
        loss = loss.sum(1).reshape((dims[0], 1))
Y
Yu Yang 已提交
66 67 68 69
        self.outputs = {
            'Diff': diff.astype('float32'),
            'Out': loss.astype('float32')
        }
70 71 72 73 74

    def test_check_output(self):
        self.check_output()

    def test_check_grad_normal(self):
Y
yangyaming 已提交
75
        self.check_grad(['X', 'Y'], 'Out', max_relative_error=0.03)
76 77 78 79 80

    def test_check_grad_ingore_x(self):
        self.check_grad(
            ['Y'],
            'Out',
Y
yangyaming 已提交
81
            max_relative_error=0.03,
82 83 84
            no_grad_set=set(['X', 'InsideWeight', 'OutsideWeight']))

    def test_check_grad_ingore_y(self):
Y
yangyaming 已提交
85
        self.check_grad(
86 87
            ['X'],
            'Out',
Y
yangyaming 已提交
88
            max_relative_error=0.03,
89
            no_grad_set=set(['Y', 'InsideWeight', 'OutsideWeight']))
Y
yangyaming 已提交
90 91 92 93


if __name__ == '__main__':
    unittest.main()