提交 586af751 编写于 作者: W weishengyu

rename head -> gears

上级 55943da6
...@@ -18,10 +18,10 @@ import importlib ...@@ -18,10 +18,10 @@ import importlib
import paddle.nn as nn import paddle.nn as nn
from . import backbone from . import backbone
from . import head from . import gears
from .backbone import * from .backbone import *
from .head import * from .gears import *
from .utils import * from .utils import *
__all__ = ["build_model", "RecModel"] __all__ = ["build_model", "RecModel"]
......
...@@ -19,10 +19,11 @@ from .fc import FC ...@@ -19,10 +19,11 @@ from .fc import FC
__all__ = ['build_head'] __all__ = ['build_head']
def build_head(config): def build_head(config):
support_dict = ['ArcMargin', 'CosMargin', 'CircleMargin', 'FC'] support_dict = ['ArcMargin', 'CosMargin', 'CircleMargin', 'FC']
module_name = config.pop('name') module_name = config.pop('name')
assert module_name in support_dict, Exception('head only support {}'.format( assert module_name in support_dict, Exception(
support_dict)) 'head only support {}'.format(support_dict))
module_class = eval(module_name)(**config) module_class = eval(module_name)(**config)
return module_class return module_class
...@@ -17,26 +17,28 @@ import paddle ...@@ -17,26 +17,28 @@ import paddle
import paddle.nn as nn import paddle.nn as nn
import paddle.nn.functional as F import paddle.nn.functional as F
class CircleMargin(nn.Layer): class CircleMargin(nn.Layer):
def __init__(self, embedding_size, def __init__(self, embedding_size, class_num, margin, scale):
class_num,
margin,
scale):
super(CircleSoftmax, self).__init__() super(CircleSoftmax, self).__init__()
self.scale = scale self.scale = scale
self.margin = margin self.margin = margin
self.embedding_size = embedding_size self.embedding_size = embedding_size
self.class_num = class_num self.class_num = class_num
weight_attr = paddle.ParamAttr(initializer = paddle.nn.initializer.XavierNormal()) weight_attr = paddle.ParamAttr(
self.fc0 = paddle.nn.Linear(self.embedding_size, self.class_num, weight_attr=weight_attr) initializer=paddle.nn.initializer.XavierNormal())
self.fc0 = paddle.nn.Linear(
self.embedding_size, self.class_num, weight_attr=weight_attr)
def forward(self, input, label): def forward(self, input, label):
feat_norm = paddle.sqrt(paddle.sum(paddle.square(input), axis=1, keepdim=True)) feat_norm = paddle.sqrt(
paddle.sum(paddle.square(input), axis=1, keepdim=True))
input = paddle.divide(input, feat_norm) input = paddle.divide(input, feat_norm)
weight = self.fc0.weight weight = self.fc0.weight
weight_norm = paddle.sqrt(paddle.sum(paddle.square(weight), axis=0, keepdim=True)) weight_norm = paddle.sqrt(
paddle.sum(paddle.square(weight), axis=0, keepdim=True))
weight = paddle.divide(weight, weight_norm) weight = paddle.divide(weight, weight_norm)
logits = paddle.matmul(input, weight) logits = paddle.matmul(input, weight)
......
...@@ -16,28 +16,33 @@ import paddle ...@@ -16,28 +16,33 @@ import paddle
import math import math
import paddle.nn as nn import paddle.nn as nn
class CosMargin(paddle.nn.Layer): class CosMargin(paddle.nn.Layer):
def __init__(self, embedding_size, def __init__(self, embedding_size, class_num, margin=0.35, scale=64.0):
class_num,
margin=0.35,
scale=64.0):
super(CosMargin, self).__init__() super(CosMargin, self).__init__()
self.scale = scale self.scale = scale
self.margin = margin self.margin = margin
self.embedding_size = embedding_size self.embedding_size = embedding_size
self.class_num = class_num self.class_num = class_num
weight_attr = paddle.ParamAttr(initializer = paddle.nn.initializer.XavierNormal()) weight_attr = paddle.ParamAttr(
self.fc = nn.Linear(self.embedding_size, self.class_num, weight_attr=weight_attr, bias_attr=False) initializer=paddle.nn.initializer.XavierNormal())
self.fc = nn.Linear(
self.embedding_size,
self.class_num,
weight_attr=weight_attr,
bias_attr=False)
def forward(self, input, label): def forward(self, input, label):
label.stop_gradient = True label.stop_gradient = True
input_norm = paddle.sqrt(paddle.sum(paddle.square(input), axis=1, keepdim=True)) input_norm = paddle.sqrt(
paddle.sum(paddle.square(input), axis=1, keepdim=True))
input = paddle.divide(input, x_norm) input = paddle.divide(input, x_norm)
weight = self.fc.weight weight = self.fc.weight
weight_norm = paddle.sqrt(paddle.sum(paddle.square(weight), axis=0, keepdim=True)) weight_norm = paddle.sqrt(
paddle.sum(paddle.square(weight), axis=0, keepdim=True))
weight = paddle.divide(weight, weight_norm) weight = paddle.divide(weight, weight_norm)
cos = paddle.matmul(input, weight) cos = paddle.matmul(input, weight)
...@@ -45,6 +50,7 @@ class CosMargin(paddle.nn.Layer): ...@@ -45,6 +50,7 @@ class CosMargin(paddle.nn.Layer):
one_hot = paddle.nn.functional.one_hot(label, self.class_num) one_hot = paddle.nn.functional.one_hot(label, self.class_num)
one_hot = paddle.squeeze(one_hot, axis=[1]) one_hot = paddle.squeeze(one_hot, axis=[1])
output = paddle.multiply(one_hot, cos_m) + paddle.multiply((1.0 - one_hot), cos) output = paddle.multiply(one_hot, cos_m) + paddle.multiply(
(1.0 - one_hot), cos)
output = output * self.scale output = output * self.scale
return output return output
...@@ -19,14 +19,16 @@ from __future__ import print_function ...@@ -19,14 +19,16 @@ from __future__ import print_function
import paddle import paddle
import paddle.nn as nn import paddle.nn as nn
class FC(nn.Layer): class FC(nn.Layer):
def __init__(self, embedding_size, def __init__(self, embedding_size, class_num):
class_num):
super(FC, self).__init__() super(FC, self).__init__()
self.embedding_size = embedding_size self.embedding_size = embedding_size
self.class_num = class_num self.class_num = class_num
weight_attr = paddle.ParamAttr(initializer = paddle.nn.initializer.XavierNormal()) weight_attr = paddle.ParamAttr(
self.fc = paddle.nn.Linear(self.embedding_size, self.class_num, weight_attr=weight_attr) initializer=paddle.nn.initializer.XavierNormal())
self.fc = paddle.nn.Linear(
self.embedding_size, self.class_num, weight_attr=weight_attr)
def forward(self, input, label): def forward(self, input, label):
out = self.fc(input) out = self.fc(input)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册