rename head -> gears
parent
55943da6d1
commit
586af751ac
|
@ -18,10 +18,10 @@ import importlib
|
||||||
import paddle.nn as nn
|
import paddle.nn as nn
|
||||||
|
|
||||||
from . import backbone
|
from . import backbone
|
||||||
from . import head
|
from . import gears
|
||||||
|
|
||||||
from .backbone import *
|
from .backbone import *
|
||||||
from .head import *
|
from .gears import *
|
||||||
from .utils import *
|
from .utils import *
|
||||||
|
|
||||||
__all__ = ["build_model", "RecModel"]
|
__all__ = ["build_model", "RecModel"]
|
||||||
|
|
|
@ -19,10 +19,11 @@ from .fc import FC
|
||||||
|
|
||||||
__all__ = ['build_head']
|
__all__ = ['build_head']
|
||||||
|
|
||||||
|
|
||||||
def build_head(config):
|
def build_head(config):
|
||||||
support_dict = ['ArcMargin', 'CosMargin', 'CircleMargin', 'FC']
|
support_dict = ['ArcMargin', 'CosMargin', 'CircleMargin', 'FC']
|
||||||
module_name = config.pop('name')
|
module_name = config.pop('name')
|
||||||
assert module_name in support_dict, Exception('head only support {}'.format(
|
assert module_name in support_dict, Exception(
|
||||||
support_dict))
|
'head only support {}'.format(support_dict))
|
||||||
module_class = eval(module_name)(**config)
|
module_class = eval(module_name)(**config)
|
||||||
return module_class
|
return module_class
|
|
@ -17,29 +17,31 @@ import paddle
|
||||||
import paddle.nn as nn
|
import paddle.nn as nn
|
||||||
import paddle.nn.functional as F
|
import paddle.nn.functional as F
|
||||||
|
|
||||||
|
|
||||||
class CircleMargin(nn.Layer):
|
class CircleMargin(nn.Layer):
|
||||||
def __init__(self, embedding_size,
|
def __init__(self, embedding_size, class_num, margin, scale):
|
||||||
class_num,
|
|
||||||
margin,
|
|
||||||
scale):
|
|
||||||
super(CircleSoftmax, self).__init__()
|
super(CircleSoftmax, self).__init__()
|
||||||
self.scale = scale
|
self.scale = scale
|
||||||
self.margin = margin
|
self.margin = margin
|
||||||
self.embedding_size = embedding_size
|
self.embedding_size = embedding_size
|
||||||
self.class_num = class_num
|
self.class_num = class_num
|
||||||
|
|
||||||
weight_attr = paddle.ParamAttr(initializer = paddle.nn.initializer.XavierNormal())
|
weight_attr = paddle.ParamAttr(
|
||||||
self.fc0 = paddle.nn.Linear(self.embedding_size, self.class_num, weight_attr=weight_attr)
|
initializer=paddle.nn.initializer.XavierNormal())
|
||||||
|
self.fc0 = paddle.nn.Linear(
|
||||||
|
self.embedding_size, self.class_num, weight_attr=weight_attr)
|
||||||
|
|
||||||
def forward(self, input, label):
|
def forward(self, input, label):
|
||||||
feat_norm = paddle.sqrt(paddle.sum(paddle.square(input), axis=1, keepdim=True))
|
feat_norm = paddle.sqrt(
|
||||||
|
paddle.sum(paddle.square(input), axis=1, keepdim=True))
|
||||||
input = paddle.divide(input, feat_norm)
|
input = paddle.divide(input, feat_norm)
|
||||||
|
|
||||||
weight = self.fc0.weight
|
weight = self.fc0.weight
|
||||||
weight_norm = paddle.sqrt(paddle.sum(paddle.square(weight), axis=0, keepdim=True))
|
weight_norm = paddle.sqrt(
|
||||||
|
paddle.sum(paddle.square(weight), axis=0, keepdim=True))
|
||||||
weight = paddle.divide(weight, weight_norm)
|
weight = paddle.divide(weight, weight_norm)
|
||||||
|
|
||||||
logits = paddle.matmul(input, weight)
|
logits = paddle.matmul(input, weight)
|
||||||
|
|
||||||
alpha_p = paddle.clip(-logits.detach() + 1 + self.margin, min=0.)
|
alpha_p = paddle.clip(-logits.detach() + 1 + self.margin, min=0.)
|
||||||
alpha_n = paddle.clip(logits.detach() + self.margin, min=0.)
|
alpha_n = paddle.clip(logits.detach() + self.margin, min=0.)
|
|
@ -16,35 +16,41 @@ import paddle
|
||||||
import math
|
import math
|
||||||
import paddle.nn as nn
|
import paddle.nn as nn
|
||||||
|
|
||||||
|
|
||||||
class CosMargin(paddle.nn.Layer):
|
class CosMargin(paddle.nn.Layer):
|
||||||
def __init__(self, embedding_size,
|
def __init__(self, embedding_size, class_num, margin=0.35, scale=64.0):
|
||||||
class_num,
|
|
||||||
margin=0.35,
|
|
||||||
scale=64.0):
|
|
||||||
super(CosMargin, self).__init__()
|
super(CosMargin, self).__init__()
|
||||||
self.scale = scale
|
self.scale = scale
|
||||||
self.margin = margin
|
self.margin = margin
|
||||||
self.embedding_size = embedding_size
|
self.embedding_size = embedding_size
|
||||||
self.class_num = class_num
|
self.class_num = class_num
|
||||||
|
|
||||||
weight_attr = paddle.ParamAttr(initializer = paddle.nn.initializer.XavierNormal())
|
weight_attr = paddle.ParamAttr(
|
||||||
self.fc = nn.Linear(self.embedding_size, self.class_num, weight_attr=weight_attr, bias_attr=False)
|
initializer=paddle.nn.initializer.XavierNormal())
|
||||||
|
self.fc = nn.Linear(
|
||||||
|
self.embedding_size,
|
||||||
|
self.class_num,
|
||||||
|
weight_attr=weight_attr,
|
||||||
|
bias_attr=False)
|
||||||
|
|
||||||
def forward(self, input, label):
|
def forward(self, input, label):
|
||||||
label.stop_gradient = True
|
label.stop_gradient = True
|
||||||
|
|
||||||
input_norm = paddle.sqrt(paddle.sum(paddle.square(input), axis=1, keepdim=True))
|
input_norm = paddle.sqrt(
|
||||||
|
paddle.sum(paddle.square(input), axis=1, keepdim=True))
|
||||||
input = paddle.divide(input, x_norm)
|
input = paddle.divide(input, x_norm)
|
||||||
|
|
||||||
weight = self.fc.weight
|
weight = self.fc.weight
|
||||||
weight_norm = paddle.sqrt(paddle.sum(paddle.square(weight), axis=0, keepdim=True))
|
weight_norm = paddle.sqrt(
|
||||||
|
paddle.sum(paddle.square(weight), axis=0, keepdim=True))
|
||||||
weight = paddle.divide(weight, weight_norm)
|
weight = paddle.divide(weight, weight_norm)
|
||||||
|
|
||||||
cos = paddle.matmul(input, weight)
|
cos = paddle.matmul(input, weight)
|
||||||
cos_m = cos - self.margin
|
cos_m = cos - self.margin
|
||||||
|
|
||||||
one_hot = paddle.nn.functional.one_hot(label, self.class_num)
|
one_hot = paddle.nn.functional.one_hot(label, self.class_num)
|
||||||
one_hot = paddle.squeeze(one_hot, axis=[1])
|
one_hot = paddle.squeeze(one_hot, axis=[1])
|
||||||
output = paddle.multiply(one_hot, cos_m) + paddle.multiply((1.0 - one_hot), cos)
|
output = paddle.multiply(one_hot, cos_m) + paddle.multiply(
|
||||||
|
(1.0 - one_hot), cos)
|
||||||
output = output * self.scale
|
output = output * self.scale
|
||||||
return output
|
return output
|
|
@ -19,14 +19,16 @@ from __future__ import print_function
|
||||||
import paddle
|
import paddle
|
||||||
import paddle.nn as nn
|
import paddle.nn as nn
|
||||||
|
|
||||||
|
|
||||||
class FC(nn.Layer):
|
class FC(nn.Layer):
|
||||||
def __init__(self, embedding_size,
|
def __init__(self, embedding_size, class_num):
|
||||||
class_num):
|
|
||||||
super(FC, self).__init__()
|
super(FC, self).__init__()
|
||||||
self.embedding_size = embedding_size
|
self.embedding_size = embedding_size
|
||||||
self.class_num = class_num
|
self.class_num = class_num
|
||||||
weight_attr = paddle.ParamAttr(initializer = paddle.nn.initializer.XavierNormal())
|
weight_attr = paddle.ParamAttr(
|
||||||
self.fc = paddle.nn.Linear(self.embedding_size, self.class_num, weight_attr=weight_attr)
|
initializer=paddle.nn.initializer.XavierNormal())
|
||||||
|
self.fc = paddle.nn.Linear(
|
||||||
|
self.embedding_size, self.class_num, weight_attr=weight_attr)
|
||||||
|
|
||||||
def forward(self, input, label):
|
def forward(self, input, label):
|
||||||
out = self.fc(input)
|
out = self.fc(input)
|
Loading…
Reference in New Issue