pull/1819/head
weishengyu 2022-04-08 14:29:03 +08:00
parent 7f0b7a04cd
commit 9de22673df
5 changed files with 10 additions and 10 deletions

View File

@ -8,8 +8,8 @@ class BNNeck(paddle.nn.Layer):
self.bn = paddle.nn.BatchNorm1D(
self.num_filters)
if not trainable:
self.bn.bias.trainable = False
# if not trainable:
# self.bn.bias.trainable = False
def forward(self, input, label=None):
out = self.bn(input)

View File

@ -21,14 +21,14 @@ import paddle.nn as nn
class FC(nn.Layer):
def __init__(self, embedding_size, class_num):
def __init__(self, embedding_size, class_num, bias_attr=None):
super(FC, self).__init__()
self.embedding_size = embedding_size
self.class_num = class_num
weight_attr = paddle.ParamAttr(
initializer=paddle.nn.initializer.XavierNormal())
self.fc = paddle.nn.Linear(
self.embedding_size, self.class_num, weight_attr=weight_attr)
self.embedding_size, self.class_num, weight_attr=weight_attr, bias_attr=bias_attr)
def forward(self, input, label=None):
out = self.fc(input)

View File

@ -26,14 +26,14 @@ Arch:
stem_act: null
BackboneStopLayer:
name: "flatten"
#Neck:
# name: BNNeck
# num_filters: 2048
# trainable: false
Neck:
name: BNNeck
num_filters: 2048
Head:
name: "FC"
embedding_size: 2048
class_num: 751
bias_attr: false
# loss function config for traing/eval process
Loss:

View File

@ -125,7 +125,7 @@ def cal_feature(engine, name='gallery'):
out = engine.model(batch[0], batch[1])
if "Student" in out:
out = out["Student"]
batch_feas = out["features"]
batch_feas = out["backbone"]
# do norm
if engine.config["Global"].get("feature_normalize", True):

View File

@ -24,7 +24,7 @@ class TripletLossV2(nn.Layer):
inputs: feature matrix with shape (batch_size, feat_dim)
target: ground truth labels with shape (num_classes)
"""
inputs = input["features"]
inputs = input["backbone"]
if self.normalize_feature:
inputs = 1. * inputs / (paddle.expand_as(