From f7d8b516480eff21c23a781caad23aa1a654138f Mon Sep 17 00:00:00 2001 From: Megvii Engine Team Date: Mon, 18 May 2020 18:01:34 +0800 Subject: [PATCH] feat(mge/functional): add smooth l1 loss GitOrigin-RevId: c1437788d732e55ca3f99557c8049d556f4d2b67 --- .../megengine/functional/__init__.py | 1 + python_module/megengine/functional/loss.py | 49 +++++++++++++++++++ .../test/unit/functional/test_functional.py | 13 +++++ 3 files changed, 63 insertions(+) diff --git a/python_module/megengine/functional/__init__.py b/python_module/megengine/functional/__init__.py index 89065f678..e322d9987 100644 --- a/python_module/megengine/functional/__init__.py +++ b/python_module/megengine/functional/__init__.py @@ -46,6 +46,7 @@ from .loss import ( hinge_loss, l1_loss, nll_loss, + smooth_l1_loss, square_loss, triplet_margin_loss, ) diff --git a/python_module/megengine/functional/loss.py b/python_module/megengine/functional/loss.py index 86b206702..20390d154 100644 --- a/python_module/megengine/functional/loss.py +++ b/python_module/megengine/functional/loss.py @@ -340,3 +340,52 @@ def hinge_loss(pred: Tensor, label: Tensor, norm: str = "L1") -> Tensor: return loss.sum(axis=1).mean() else: return (loss ** 2).sum(axis=1).mean() + + +def smooth_l1_loss(pred: Tensor, label: Tensor) -> Tensor: + r""" + Caculate the smooth l1 loss proposed in `Fast R-CNN paper by Ross Girshick`. + + The smooth l1 loss can be described as: + + .. math:: + \text{loss}(x, y) = \frac{1}{n} \sum_{i} l_{i} + + where :math:`l_{i}` is given by: + + .. math:: + l_{i} = + \begin{cases} + 0.5 (x_i - y_i)^2, & \text{if } |x_i - y_i| < 1 \\ + |x_i - y_i| - 0.5, & \text{otherwise } + \end{cases} + + :param pred: The predicted result from model. + :param label: The ground truth to compare. + + Examples: + + .. testcode:: + + from megengine import tensor + import megengine.functional as F + + pred = tensor([[0.5, -0.5, 0.1], [-0.6, 0.7, 0.8]]) + label = tensor([[0.4, 1.5, 1.2], [0., 0.1, 2.2]]) + + loss = F.smooth_l1_loss(pred, label) + + print(loss.numpy()) + + Outputs: + + .. testoutput:: + + [0.5608334] + """ + diff = abs(pred - label) + l2_loss = 0.5 * (diff ** 2) + l1_loss = diff - 0.5 + mask = diff < 1 + loss = where(mask, l2_loss, l1_loss) + return loss.mean() diff --git a/python_module/test/unit/functional/test_functional.py b/python_module/test/unit/functional/test_functional.py index 3ec83845e..a3739b3de 100644 --- a/python_module/test/unit/functional/test_functional.py +++ b/python_module/test/unit/functional/test_functional.py @@ -362,6 +362,19 @@ def test_hinge_loss(): opr_test(cases, hinge_loss_with_l2_norm) +def test_smooth_l1_loss(): + np.random.seed(123) + cases = [] + for shape in [(2, 2), (2, 3)]: + data = np.random.uniform(size=shape).astype(np.float32) + label = np.random.uniform(size=shape).astype(np.float32) + diff = np.abs(data - label) + expect = np.where(diff < 1, 0.5 * diff ** 2, diff - 0.5).mean() + cases.append({"input": [data, label], "output": tensor(expect)}) + + opr_test(cases, F.smooth_l1_loss) + + @pytest.mark.skip def test_conv_bias(): inp_scale = 0.01 -- GitLab