From bb4c2c6df0e5513a4be8036b49e26568bdbfbdab Mon Sep 17 00:00:00 2001 From: Bin Lu Date: Mon, 31 May 2021 14:17:31 +0800 Subject: [PATCH] Create __init__.py --- ppcls/losses/__init__.py | 55 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 ppcls/losses/__init__.py diff --git a/ppcls/losses/__init__.py b/ppcls/losses/__init__.py new file mode 100644 index 00000000..c6c7c83f --- /dev/null +++ b/ppcls/losses/__init__.py @@ -0,0 +1,55 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math +import paddle +import paddle.nn as nn +import paddle.nn.functional as F + +class CircleMargin(nn.Layer): + def __init__(self, embedding_size, + class_num, + margin, + scale): + super(CircleSoftmax, self).__init__() + self.scale = scale + self.margin = margin + self.embedding_size = embedding_size + self.class_num = class_num + + weight_attr = paddle.ParamAttr(initializer = paddle.nn.initializer.XavierNormal()) + self.fc0 = paddle.nn.Linear(self.embedding_size, self.class_num, weight_attr=weight_attr) + + def forward(self, input, label): + feat_norm = paddle.sqrt(paddle.sum(paddle.square(input), axis=1, keepdim=True)) + input = paddle.divide(input, feat_norm) + + weight = self.fc0.weight + weight_norm = paddle.sqrt(paddle.sum(paddle.square(weight), axis=0, keepdim=True)) + weight = paddle.divide(weight, weight_norm) + + logits = paddle.matmul(input, weight) + + alpha_p = paddle.clip(-logits.detach() + 1 + self.margin, min=0.) + alpha_n = paddle.clip(logits.detach() + self.margin, min=0.) + delta_p = 1 - self.margin + delta_n = self.margin + index = paddle.fluid.layers.where(label != -1).reshape([-1]) + m_hot = F.one_hot(label.reshape([-1]), num_classes=logits.shape[1]) + logits_p = alpha_p * (logits - delta_p) + logits_n = alpha_n * (logits - delta_n) + pre_logits = logits_p * m_hot + logits_n * (1 - m_hot) + pre_logits = self.scale * pre_logits + + return pre_logits -- GitLab