npairsloss.py 1.3 KB
Newer Older
B
Bin Lu 已提交
1 2 3 4 5
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import paddle

W
weishengyu 已提交
6

B
Bin Lu 已提交
7 8 9 10
class NpairsLoss(paddle.nn.Layer):
    def __init__(self, reg_lambda=0.01):
        super(NpairsLoss, self).__init__()
        self.reg_lambda = reg_lambda
W
weishengyu 已提交
11

B
Bin Lu 已提交
12 13 14 15 16 17 18
    def forward(self, input, target=None):
        """
        anchor and positive(should include label)
        """
        features = input["features"]
        reg_lambda = self.reg_lambda
        batch_size = features.shape[0]
W
weishengyu 已提交
19
        fea_dim = features.shape[1]
B
Bin Lu 已提交
20
        num_class = batch_size // 2
W
weishengyu 已提交
21

B
Bin Lu 已提交
22 23
        #reshape
        out_feas = paddle.reshape(features, shape=[-1, 2, fea_dim])
W
weishengyu 已提交
24 25
        anc_feas, pos_feas = paddle.split(out_feas, num_or_sections=2, axis=1)
        anc_feas = paddle.squeeze(anc_feas, axis=1)
B
Bin Lu 已提交
26
        pos_feas = paddle.squeeze(pos_feas, axis=1)
W
weishengyu 已提交
27

B
Bin Lu 已提交
28
        #get simi matrix
W
weishengyu 已提交
29 30
        similarity_matrix = paddle.matmul(
            anc_feas, pos_feas, transpose_y=True)  #get similarity matrix
B
Bin Lu 已提交
31
        sparse_labels = paddle.arange(0, num_class, dtype='int64')
W
weishengyu 已提交
32 33 34
        xentloss = paddle.nn.CrossEntropyLoss()(
            similarity_matrix, sparse_labels)  #by default: mean

B
Bin Lu 已提交
35 36 37 38
        #l2 norm
        reg = paddle.mean(paddle.sum(paddle.square(features), axis=1))
        l2loss = 0.5 * reg_lambda * reg
        return {"npairsloss": xentloss + l2loss}