未验证 提交 90418d79 编写于 作者: B Bin Lu 提交者: GitHub

Update __init__.py

上级 bb4c2c6d
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import copy
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
class CircleMargin(nn.Layer):
def __init__(self, embedding_size,
class_num,
margin,
scale):
super(CircleSoftmax, self).__init__()
self.scale = scale
self.margin = margin
self.embedding_size = embedding_size
self.class_num = class_num
weight_attr = paddle.ParamAttr(initializer = paddle.nn.initializer.XavierNormal())
self.fc0 = paddle.nn.Linear(self.embedding_size, self.class_num, weight_attr=weight_attr)
def forward(self, input, label):
feat_norm = paddle.sqrt(paddle.sum(paddle.square(input), axis=1, keepdim=True))
input = paddle.divide(input, feat_norm)
from .celoss import CELoss
from .triplet import TripletLoss, TripletLossV2
from .msmloss import MSMLoss
from .emlloss import EmlLoss
from .npairsloss import NpairsLoss
from .trihardloss import TriHardLoss
from .centerloss import CenterLoss
class CombinedLoss(nn.Layer):
def __init__(self, config_list):
super().__init__()
self.loss_func = []
self.loss_weight = []
assert isinstance(config_list, list), (
'operator config should be a list')
for config in config_list:
print(config)
assert isinstance(config,
dict) and len(config) == 1, "yaml format error"
name = list(config)[0]
param = config[name]
assert "weight" in param, "weight must be in param, but param just contains {}".format(
param.keys())
self.loss_weight.append(param.pop("weight"))
self.loss_func.append(eval(name)(**param))
weight = self.fc0.weight
weight_norm = paddle.sqrt(paddle.sum(paddle.square(weight), axis=0, keepdim=True))
weight = paddle.divide(weight, weight_norm)
logits = paddle.matmul(input, weight)
def __call__(self, input, batch):
loss_dict = {}
for idx, loss_func in enumerate(self.loss_func):
loss = loss_func(input, batch)
weight = self.loss_weight[idx]
loss = {key: loss[key] * weight for key in loss}
loss_dict.update(loss)
loss_dict["loss"] = paddle.add_n(list(loss_dict.values()))
return loss_dict
alpha_p = paddle.clip(-logits.detach() + 1 + self.margin, min=0.)
alpha_n = paddle.clip(logits.detach() + self.margin, min=0.)
delta_p = 1 - self.margin
delta_n = self.margin
index = paddle.fluid.layers.where(label != -1).reshape([-1])
m_hot = F.one_hot(label.reshape([-1]), num_classes=logits.shape[1])
logits_p = alpha_p * (logits - delta_p)
logits_n = alpha_n * (logits - delta_n)
pre_logits = logits_p * m_hot + logits_n * (1 - m_hot)
pre_logits = self.scale * pre_logits
return pre_logits
def build_loss(config):
module_class = CombinedLoss(config)
logger.info("build loss {} success.".format(module_class))
return module_class
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册