test_huber_loss_op.py 1.4 KB
Newer Older
Y
yangyaming 已提交
1 2
import unittest
import numpy as np
3
from op_test import OpTest
Y
yangyaming 已提交
4 5 6 7 8 9 10 11 12 13


def huber_loss_forward(val, delta):
    abs_val = abs(val)
    if abs_val <= delta:
        return 0.5 * val * val
    else:
        return delta * (abs_val - 0.5 * delta)


14
class TestHuberLossOp(OpTest):
Y
yangyaming 已提交
15
    def setUp(self):
16
        self.op_type = 'huber_loss'
Y
yangyaming 已提交
17 18 19 20 21 22 23
        samples_num = 64
        delta = 1.0
        self.inputs = {
            'X': np.random.uniform(0, 1., (samples_num, 1)).astype('float32'),
            'Y': np.random.uniform(0, 1., (samples_num, 1)).astype('float32'),
        }
        residual = self.inputs['Y'] - self.inputs['X']
24 25
        loss = np.vectorize(huber_loss_forward)(residual,
                                                delta).astype('float32')
Y
yangyaming 已提交
26 27
        self.attrs = {'delta': delta}
        self.outputs = {
28
            'Residual': residual,
Y
yangyaming 已提交
29 30 31
            'Out': loss.reshape((samples_num, 1))
        }

32 33
    def test_check_output(self):
        self.check_output()
Y
yangyaming 已提交
34

35
    def test_check_grad_normal(self):
36
        self.check_grad(['X', 'Y'], 'Out', max_relative_error=0.008)
37 38 39

    def test_check_grad_ingore_x(self):
        self.check_grad(
40
            ['Y'], 'Out', max_relative_error=0.008, no_grad_set=set("residual"))
41 42 43

    def test_check_grad_ingore_y(self):
        self.check_grad(
44
            ['X'], 'Out', max_relative_error=0.008, no_grad_set=set('residual'))
Y
yangyaming 已提交
45 46


47 48
if __name__ == '__main__':
    unittest.main()