Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleClas
提交
586af751
P
PaddleClas
项目概览
PaddlePaddle
/
PaddleClas
1 年多 前同步成功
通知
116
Star
4999
Fork
1114
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
19
列表
看板
标记
里程碑
合并请求
6
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleClas
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
19
Issue
19
列表
看板
标记
里程碑
合并请求
6
合并请求
6
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
586af751
编写于
6月 03, 2021
作者:
W
weishengyu
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
rename head -> gears
上级
55943da6
变更
6
隐藏空白更改
内联
并排
Showing
6 changed file
with
48 addition
and
37 deletion
+48
-37
ppcls/arch/__init__.py
ppcls/arch/__init__.py
+2
-2
ppcls/arch/gears/__init__.py
ppcls/arch/gears/__init__.py
+3
-2
ppcls/arch/gears/arcmargin.py
ppcls/arch/gears/arcmargin.py
+0
-0
ppcls/arch/gears/circlemargin.py
ppcls/arch/gears/circlemargin.py
+16
-14
ppcls/arch/gears/cosmargin.py
ppcls/arch/gears/cosmargin.py
+20
-14
ppcls/arch/gears/fc.py
ppcls/arch/gears/fc.py
+7
-5
未找到文件。
ppcls/arch/__init__.py
浏览文件 @
586af751
...
...
@@ -18,10 +18,10 @@ import importlib
import
paddle.nn
as
nn
from
.
import
backbone
from
.
import
head
from
.
import
gears
from
.backbone
import
*
from
.
head
import
*
from
.
gears
import
*
from
.utils
import
*
__all__
=
[
"build_model"
,
"RecModel"
]
...
...
ppcls/arch/
head
/__init__.py
→
ppcls/arch/
gears
/__init__.py
浏览文件 @
586af751
...
...
@@ -19,10 +19,11 @@ from .fc import FC
__all__
=
[
'build_head'
]
def
build_head
(
config
):
support_dict
=
[
'ArcMargin'
,
'CosMargin'
,
'CircleMargin'
,
'FC'
]
module_name
=
config
.
pop
(
'name'
)
assert
module_name
in
support_dict
,
Exception
(
'head only support {}'
.
format
(
support_dict
))
assert
module_name
in
support_dict
,
Exception
(
'head only support {}'
.
format
(
support_dict
))
module_class
=
eval
(
module_name
)(
**
config
)
return
module_class
ppcls/arch/
head
/arcmargin.py
→
ppcls/arch/
gears
/arcmargin.py
浏览文件 @
586af751
文件已移动
ppcls/arch/
head
/circlemargin.py
→
ppcls/arch/
gears
/circlemargin.py
浏览文件 @
586af751
...
...
@@ -16,30 +16,32 @@ import math
import
paddle
import
paddle.nn
as
nn
import
paddle.nn.functional
as
F
class
CircleMargin
(
nn
.
Layer
):
def
__init__
(
self
,
embedding_size
,
class_num
,
margin
,
scale
):
def
__init__
(
self
,
embedding_size
,
class_num
,
margin
,
scale
):
super
(
CircleSoftmax
,
self
).
__init__
()
self
.
scale
=
scale
self
.
scale
=
scale
self
.
margin
=
margin
self
.
embedding_size
=
embedding_size
self
.
class_num
=
class_num
weight_attr
=
paddle
.
ParamAttr
(
initializer
=
paddle
.
nn
.
initializer
.
XavierNormal
())
self
.
fc0
=
paddle
.
nn
.
Linear
(
self
.
embedding_size
,
self
.
class_num
,
weight_attr
=
weight_attr
)
weight_attr
=
paddle
.
ParamAttr
(
initializer
=
paddle
.
nn
.
initializer
.
XavierNormal
())
self
.
fc0
=
paddle
.
nn
.
Linear
(
self
.
embedding_size
,
self
.
class_num
,
weight_attr
=
weight_attr
)
def
forward
(
self
,
input
,
label
):
feat_norm
=
paddle
.
sqrt
(
paddle
.
sum
(
paddle
.
square
(
input
),
axis
=
1
,
keepdim
=
True
))
feat_norm
=
paddle
.
sqrt
(
paddle
.
sum
(
paddle
.
square
(
input
),
axis
=
1
,
keepdim
=
True
))
input
=
paddle
.
divide
(
input
,
feat_norm
)
weight
=
self
.
fc0
.
weight
weight_norm
=
paddle
.
sqrt
(
paddle
.
sum
(
paddle
.
square
(
weight
),
axis
=
0
,
keepdim
=
True
))
weight_norm
=
paddle
.
sqrt
(
paddle
.
sum
(
paddle
.
square
(
weight
),
axis
=
0
,
keepdim
=
True
))
weight
=
paddle
.
divide
(
weight
,
weight_norm
)
logits
=
paddle
.
matmul
(
input
,
weight
)
logits
=
paddle
.
matmul
(
input
,
weight
)
alpha_p
=
paddle
.
clip
(
-
logits
.
detach
()
+
1
+
self
.
margin
,
min
=
0.
)
alpha_n
=
paddle
.
clip
(
logits
.
detach
()
+
self
.
margin
,
min
=
0.
)
...
...
@@ -51,5 +53,5 @@ class CircleMargin(nn.Layer):
logits_n
=
alpha_n
*
(
logits
-
delta_n
)
pre_logits
=
logits_p
*
m_hot
+
logits_n
*
(
1
-
m_hot
)
pre_logits
=
self
.
scale
*
pre_logits
return
pre_logits
ppcls/arch/
head
/cosmargin.py
→
ppcls/arch/
gears
/cosmargin.py
浏览文件 @
586af751
...
...
@@ -16,35 +16,41 @@ import paddle
import
math
import
paddle.nn
as
nn
class
CosMargin
(
paddle
.
nn
.
Layer
):
def
__init__
(
self
,
embedding_size
,
class_num
,
margin
=
0.35
,
scale
=
64.0
):
def
__init__
(
self
,
embedding_size
,
class_num
,
margin
=
0.35
,
scale
=
64.0
):
super
(
CosMargin
,
self
).
__init__
()
self
.
scale
=
scale
self
.
margin
=
margin
self
.
embedding_size
=
embedding_size
self
.
class_num
=
class_num
weight_attr
=
paddle
.
ParamAttr
(
initializer
=
paddle
.
nn
.
initializer
.
XavierNormal
())
self
.
fc
=
nn
.
Linear
(
self
.
embedding_size
,
self
.
class_num
,
weight_attr
=
weight_attr
,
bias_attr
=
False
)
weight_attr
=
paddle
.
ParamAttr
(
initializer
=
paddle
.
nn
.
initializer
.
XavierNormal
())
self
.
fc
=
nn
.
Linear
(
self
.
embedding_size
,
self
.
class_num
,
weight_attr
=
weight_attr
,
bias_attr
=
False
)
def
forward
(
self
,
input
,
label
):
label
.
stop_gradient
=
True
input_norm
=
paddle
.
sqrt
(
paddle
.
sum
(
paddle
.
square
(
input
),
axis
=
1
,
keepdim
=
True
))
input
=
paddle
.
divide
(
input
,
x_norm
)
input_norm
=
paddle
.
sqrt
(
paddle
.
sum
(
paddle
.
square
(
input
),
axis
=
1
,
keepdim
=
True
))
input
=
paddle
.
divide
(
input
,
x_norm
)
weight
=
self
.
fc
.
weight
weight_norm
=
paddle
.
sqrt
(
paddle
.
sum
(
paddle
.
square
(
weight
),
axis
=
0
,
keepdim
=
True
))
weight_norm
=
paddle
.
sqrt
(
paddle
.
sum
(
paddle
.
square
(
weight
),
axis
=
0
,
keepdim
=
True
))
weight
=
paddle
.
divide
(
weight
,
weight_norm
)
cos
=
paddle
.
matmul
(
input
,
weight
)
cos
=
paddle
.
matmul
(
input
,
weight
)
cos_m
=
cos
-
self
.
margin
one_hot
=
paddle
.
nn
.
functional
.
one_hot
(
label
,
self
.
class_num
)
one_hot
=
paddle
.
squeeze
(
one_hot
,
axis
=
[
1
])
output
=
paddle
.
multiply
(
one_hot
,
cos_m
)
+
paddle
.
multiply
((
1.0
-
one_hot
),
cos
)
output
=
paddle
.
multiply
(
one_hot
,
cos_m
)
+
paddle
.
multiply
(
(
1.0
-
one_hot
),
cos
)
output
=
output
*
self
.
scale
return
output
ppcls/arch/
head
/fc.py
→
ppcls/arch/
gears
/fc.py
浏览文件 @
586af751
...
...
@@ -19,14 +19,16 @@ from __future__ import print_function
import
paddle
import
paddle.nn
as
nn
class
FC
(
nn
.
Layer
):
def
__init__
(
self
,
embedding_size
,
class_num
):
def
__init__
(
self
,
embedding_size
,
class_num
):
super
(
FC
,
self
).
__init__
()
self
.
embedding_size
=
embedding_size
self
.
embedding_size
=
embedding_size
self
.
class_num
=
class_num
weight_attr
=
paddle
.
ParamAttr
(
initializer
=
paddle
.
nn
.
initializer
.
XavierNormal
())
self
.
fc
=
paddle
.
nn
.
Linear
(
self
.
embedding_size
,
self
.
class_num
,
weight_attr
=
weight_attr
)
weight_attr
=
paddle
.
ParamAttr
(
initializer
=
paddle
.
nn
.
initializer
.
XavierNormal
())
self
.
fc
=
paddle
.
nn
.
Linear
(
self
.
embedding_size
,
self
.
class_num
,
weight_attr
=
weight_attr
)
def
forward
(
self
,
input
,
label
):
out
=
self
.
fc
(
input
)
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录