Merge branch 'develop_reg' of https://github.com/weisy11/PaddleClas into develop_reg

pull/794/head
weishengyu 2021-06-06 18:22:06 +08:00
commit 2921934016
3 changed files with 44 additions and 57 deletions

View File

@ -148,9 +148,9 @@ Infer:
Metric: Metric:
Train: Train:
- Topk: - TopkAcc:
k: [1, 5] topk: [1, 5]
Eval: Eval:
- Topk: - TopkAcc:
k: [1, 5] topk: [1, 5]

View File

@ -1,6 +1,4 @@
# global configs # global configs
Trainer:
name: TrainerReID
Global: Global:
checkpoints: null checkpoints: null
pretrained_model: null pretrained_model: null
@ -16,8 +14,7 @@ Global:
# used for static mode and model export # used for static mode and model export
image_shape: [3, 224, 224] image_shape: [3, 224, 224]
save_inference_dir: "./inference" save_inference_dir: "./inference"
num_split: 1 eval_mode: "retrieval"
feature_normalize: True
# model architecture # model architecture
Arch: Arch:
@ -99,7 +96,7 @@ DataLoader:
loader: loader:
num_workers: 6 num_workers: 6
use_shared_memory: False use_shared_memory: False
Eval:
Query: Query:
# TOTO: modify to the latest trainer # TOTO: modify to the latest trainer
dataset: dataset:
@ -146,6 +143,12 @@ DataLoader:
num_workers: 6 num_workers: 6
use_shared_memory: False use_shared_memory: False
Metric:
Eval:
- Recallk:
topk: [1, 5]
- mAP: {}
Infer: Infer:
infer_imgs: "docs/images/whl/demo.jpg" infer_imgs: "docs/images/whl/demo.jpg"
batch_size: 10 batch_size: 10

View File

@ -15,6 +15,7 @@
import numpy as np import numpy as np
import paddle import paddle
import paddle.nn as nn import paddle.nn as nn
from functools import lru_cache
# TODO: fix the format # TODO: fix the format
@ -38,23 +39,13 @@ class TopkAcc(nn.Layer):
class mAP(nn.Layer): class mAP(nn.Layer):
def __init__(self, max_rank=50): def __init__(self):
super().__init__() super().__init__()
self.max_rank = max_rank
def forward(self, similarities_matrix, query_img_id, gallery_img_id): def forward(self, similarities_matrix, query_img_id, gallery_img_id):
metric_dict = dict() metric_dict = dict()
num_q, num_g = similarities_matrix.shape _, all_AP, _ = get_metrics(similarities_matrix, query_img_id,
q_pids = query_img_id.numpy().reshape((query_img_id.shape[0])) gallery_img_id)
g_pids = gallery_img_id.numpy().reshape((gallery_img_id.shape[0]))
if num_g < self.max_rank:
self.max_rank = num_g
print('Note: number of gallery samples is quite small, got {}'.
format(num_g))
indices = paddle.argsort(
similarities_matrix, axis=1, descending=True).numpy()
_, all_AP, _ = get_metrics(indices, num_q, num_g, q_pids, g_pids,
self.max_rank)
mAP = np.mean(all_AP) mAP = np.mean(all_AP)
metric_dict["mAP"] = mAP metric_dict["mAP"] = mAP
@ -62,23 +53,13 @@ class mAP(nn.Layer):
class mINP(nn.Layer): class mINP(nn.Layer):
def __init__(self, max_rank=50): def __init__(self):
super().__init__() super().__init__()
self.max_rank = max_rank
def forward(self, similarities_matrix, query_img_id, gallery_img_id): def forward(self, similarities_matrix, query_img_id, gallery_img_id):
metric_dict = dict() metric_dict = dict()
num_q, num_g = similarities_matrix.shape _, _, all_INP = get_metrics(similarities_matrix, query_img_id,
q_pids = query_img_id.numpy().reshape((query_img_id.shape[0])) gallery_img_id)
g_pids = gallery_img_id.numpy().reshape((gallery_img_id.shape[0]))
if num_g < self.max_rank:
max_rank = num_g
print('Note: number of gallery samples is quite small, got {}'.
format(num_g))
indices = paddle.argsort(
similarities_matrix, axis=1, descending=True).numpy()
_, _, all_INP = get_metrics(indices, num_q, num_g, q_pids, g_pids,
self.max_rank)
mINP = np.mean(all_INP) mINP = np.mean(all_INP)
metric_dict["mINP"] = mINP metric_dict["mINP"] = mINP
@ -86,34 +67,37 @@ class mINP(nn.Layer):
class Recallk(nn.Layer): class Recallk(nn.Layer):
def __init__(self, max_rank=50, topk=(1, 5)): def __init__(self, topk=(1, 5)):
super().__init__() super().__init__()
self.max_rank = max_rank
assert isinstance(topk, (int, list)) assert isinstance(topk, (int, list))
if isinstance(topk, int): if isinstance(topk, int):
topk = [topk] topk = [topk]
self.topk = topk self.topk = topk
self.max_rank = max(self.topk) if max(self.topk) > 50 else 50
def forward(self, similarities_matrix, query_img_id, gallery_img_id): def forward(self, similarities_matrix, query_img_id, gallery_img_id):
metric_dict = dict() metric_dict = dict()
num_q, num_g = similarities_matrix.shape all_cmc, _, _ = get_metrics(similarities_matrix, query_img_id,
q_pids = query_img_id.numpy().reshape((query_img_id.shape[0])) gallery_img_id, self.max_rank)
g_pids = gallery_img_id.numpy().reshape((gallery_img_id.shape[0]))
if num_g < self.max_rank:
max_rank = num_g
print('Note: number of gallery samples is quite small, got {}'.
format(num_g))
indices = paddle.argsort(
similarities_matrix, axis=1, descending=True).numpy()
all_cmc, _, _ = get_metrics(indices, num_q, num_g, q_pids, g_pids,
self.max_rank)
for k in self.topk: for k in self.topk:
metric_dict["recall{}".format(k)] = all_cmc[k - 1] metric_dict["recall{}".format(k)] = all_cmc[k - 1]
return metric_dict return metric_dict
def get_metrics(indices, num_q, num_g, q_pids, g_pids, max_rank=50): @lru_cache()
def get_metrics(similarities_matrix, query_img_id, gallery_img_id,
max_rank=50):
num_q, num_g = similarities_matrix.shape
q_pids = query_img_id.numpy().reshape((query_img_id.shape[0]))
g_pids = gallery_img_id.numpy().reshape((gallery_img_id.shape[0]))
if num_g < max_rank:
max_rank = num_g
print('Note: number of gallery samples is quite small, got {}'.format(
num_g))
indices = paddle.argsort(
similarities_matrix, axis=1, descending=True).numpy()
all_cmc = [] all_cmc = []
all_AP = [] all_AP = []
all_INP = [] all_INP = []