Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleClas
提交
90418d79
P
PaddleClas
项目概览
PaddlePaddle
/
PaddleClas
1 年多 前同步成功
通知
115
Star
4999
Fork
1114
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
19
列表
看板
标记
里程碑
合并请求
6
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleClas
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
19
Issue
19
列表
看板
标记
里程碑
合并请求
6
合并请求
6
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
90418d79
编写于
5月 31, 2021
作者:
B
Bin Lu
提交者:
GitHub
5月 31, 2021
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Update __init__.py
上级
bb4c2c6d
变更
1
隐藏空白更改
内联
并排
Showing
1 changed file
with
40 addition
and
50 deletion
+40
-50
ppcls/losses/__init__.py
ppcls/losses/__init__.py
+40
-50
未找到文件。
ppcls/losses/__init__.py
浏览文件 @
90418d79
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import
math
import
copy
import
paddle
import
paddle.nn
as
nn
import
paddle.nn.functional
as
F
class
CircleMargin
(
nn
.
Layer
):
def
__init__
(
self
,
embedding_size
,
class_num
,
margin
,
scale
):
super
(
CircleSoftmax
,
self
).
__init__
()
self
.
scale
=
scale
self
.
margin
=
margin
self
.
embedding_size
=
embedding_size
self
.
class_num
=
class_num
weight_attr
=
paddle
.
ParamAttr
(
initializer
=
paddle
.
nn
.
initializer
.
XavierNormal
())
self
.
fc0
=
paddle
.
nn
.
Linear
(
self
.
embedding_size
,
self
.
class_num
,
weight_attr
=
weight_attr
)
def
forward
(
self
,
input
,
label
):
feat_norm
=
paddle
.
sqrt
(
paddle
.
sum
(
paddle
.
square
(
input
),
axis
=
1
,
keepdim
=
True
))
input
=
paddle
.
divide
(
input
,
feat_norm
)
from
.celoss
import
CELoss
from
.triplet
import
TripletLoss
,
TripletLossV2
from
.msmloss
import
MSMLoss
from
.emlloss
import
EmlLoss
from
.npairsloss
import
NpairsLoss
from
.trihardloss
import
TriHardLoss
from
.centerloss
import
CenterLoss
class
CombinedLoss
(
nn
.
Layer
):
def
__init__
(
self
,
config_list
):
super
().
__init__
()
self
.
loss_func
=
[]
self
.
loss_weight
=
[]
assert
isinstance
(
config_list
,
list
),
(
'operator config should be a list'
)
for
config
in
config_list
:
print
(
config
)
assert
isinstance
(
config
,
dict
)
and
len
(
config
)
==
1
,
"yaml format error"
name
=
list
(
config
)[
0
]
param
=
config
[
name
]
assert
"weight"
in
param
,
"weight must be in param, but param just contains {}"
.
format
(
param
.
keys
())
self
.
loss_weight
.
append
(
param
.
pop
(
"weight"
))
self
.
loss_func
.
append
(
eval
(
name
)(
**
param
))
weight
=
self
.
fc0
.
weight
weight_norm
=
paddle
.
sqrt
(
paddle
.
sum
(
paddle
.
square
(
weight
),
axis
=
0
,
keepdim
=
True
))
weight
=
paddle
.
divide
(
weight
,
weight_norm
)
logits
=
paddle
.
matmul
(
input
,
weight
)
def
__call__
(
self
,
input
,
batch
):
loss_dict
=
{}
for
idx
,
loss_func
in
enumerate
(
self
.
loss_func
):
loss
=
loss_func
(
input
,
batch
)
weight
=
self
.
loss_weight
[
idx
]
loss
=
{
key
:
loss
[
key
]
*
weight
for
key
in
loss
}
loss_dict
.
update
(
loss
)
loss_dict
[
"loss"
]
=
paddle
.
add_n
(
list
(
loss_dict
.
values
()))
return
loss_dict
alpha_p
=
paddle
.
clip
(
-
logits
.
detach
()
+
1
+
self
.
margin
,
min
=
0.
)
alpha_n
=
paddle
.
clip
(
logits
.
detach
()
+
self
.
margin
,
min
=
0.
)
delta_p
=
1
-
self
.
margin
delta_n
=
self
.
margin
index
=
paddle
.
fluid
.
layers
.
where
(
label
!=
-
1
).
reshape
([
-
1
])
m_hot
=
F
.
one_hot
(
label
.
reshape
([
-
1
]),
num_classes
=
logits
.
shape
[
1
])
logits_p
=
alpha_p
*
(
logits
-
delta_p
)
logits_n
=
alpha_n
*
(
logits
-
delta_n
)
pre_logits
=
logits_p
*
m_hot
+
logits_n
*
(
1
-
m_hot
)
pre_logits
=
self
.
scale
*
pre_logits
return
pre_logits
def
build_loss
(
config
):
module_class
=
CombinedLoss
(
config
)
logger
.
info
(
"build loss {} success."
.
format
(
module_class
))
return
module_class
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录