v1.3.6: added University-1652
parent
6e498f8b17
commit
93b8c9f3db
|
@ -33,7 +33,7 @@ You can find some research projects that are built on top of Torchreid `here <ht
|
|||
|
||||
What's new
|
||||
------------
|
||||
- [Feb 2021] We support the new multi-view multi-source geo-localization dataset `University-1652 <https://dl.acm.org/doi/abs/10.1145/3394171.3413896>`_.
|
||||
- [Feb 2021] ``v1.3.6`` Added `University-1652 <https://dl.acm.org/doi/abs/10.1145/3394171.3413896>`_, a new dataset for multi-view multi-source geo-localization (credit to `Zhedong Zheng <https://github.com/layumi>`_).
|
||||
- [Feb 2021] ``v1.3.5``: Now the `cython code <https://github.com/KaiyangZhou/deep-person-reid/pull/412>`_ works on Windows (credit to `lablabla <https://github.com/lablabla>`_).
|
||||
- [Jan 2021] Our recent work, `MixStyle <https://openreview.net/forum?id=6xHJ37MVxxp>`_ (mixing instance-level feature statistics of samples of different domains for improving domain generalization), has been accepted to ICLR'21. The code has been released at https://github.com/KaiyangZhou/mixstyle-release where the person re-ID part is based on Torchreid.
|
||||
- [Jan 2021] A new evaluation metric called `mean Inverse Negative Penalty (mINP)` for person re-ID has been introduced in `Deep Learning for Person Re-identification: A Survey and Outlook (TPAMI 2021) <https://arxiv.org/abs/2001.04193>`_. Their code can be accessed at `<https://github.com/mangye16/ReID-Survey>`_.
|
||||
|
@ -232,7 +232,7 @@ Image-reid datasets
|
|||
- `PRID <https://pdfs.semanticscholar.org/4c1b/f0592be3e535faf256c95e27982db9b3d3d3.pdf>`_
|
||||
|
||||
Geo-localization datasets
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
- `University-1652 <https://dl.acm.org/doi/abs/10.1145/3394171.3413896>`_
|
||||
|
||||
Video-reid datasets
|
||||
|
|
|
@ -2,7 +2,7 @@ from __future__ import print_function, absolute_import
|
|||
|
||||
from torchreid import data, optim, utils, engine, losses, models, metrics
|
||||
|
||||
__version__ = '1.3.5'
|
||||
__version__ = '1.3.6'
|
||||
__author__ = 'Kaiyang Zhou'
|
||||
__homepage__ = 'https://kaiyangzhou.github.io/'
|
||||
__description__ = 'Deep learning person re-identification in PyTorch'
|
||||
|
|
|
@ -2,7 +2,7 @@ from __future__ import print_function, absolute_import
|
|||
|
||||
from .image import (
|
||||
GRID, PRID, CUHK01, CUHK02, CUHK03, MSMT17, VIPeR, SenseReID, Market1501,
|
||||
DukeMTMCreID, iLIDS, University1652
|
||||
DukeMTMCreID, University1652, iLIDS
|
||||
)
|
||||
from .video import PRID2011, Mars, DukeMTMCVidReID, iLIDSVID
|
||||
from .dataset import Dataset, ImageDataset, VideoDataset
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
from __future__ import division, print_function, absolute_import
|
||||
import os
|
||||
import re
|
||||
import glob
|
||||
import os.path as osp
|
||||
import os
|
||||
import gdown
|
||||
|
||||
from ..dataset import ImageDataset
|
||||
|
@ -48,18 +48,23 @@ datamanager = torchreid.data.ImageDataManager(
|
|||
"""
|
||||
dataset_dir = 'university1652'
|
||||
dataset_url = 'https://drive.google.com/uc?id=1iVnP4gjw-iHXa0KerZQ1IfIO0i1jADsR'
|
||||
|
||||
def __init__(self, root='', **kwargs):
|
||||
self.root = osp.abspath(osp.expanduser(root))
|
||||
self.dataset_dir = osp.join(self.root, self.dataset_dir)
|
||||
print(self.dataset_dir)
|
||||
if not os.path.isdir(self.dataset_dir):
|
||||
os.mkdir(self.dataset_dir)
|
||||
gdown.download(self.dataset_url, self.dataset_dir+'data.zip', quiet=False)
|
||||
gdown.download(
|
||||
self.dataset_url, self.dataset_dir + 'data.zip', quiet=False
|
||||
)
|
||||
os.system('unzip %s' % (self.dataset_dir + 'data.zip'))
|
||||
self.train_dir = osp.join(
|
||||
self.dataset_dir, 'University-Release/train/'
|
||||
)
|
||||
self.query_dir = osp.join(self.dataset_dir, 'University-Release/test/query_drone')
|
||||
self.query_dir = osp.join(
|
||||
self.dataset_dir, 'University-Release/test/query_drone'
|
||||
)
|
||||
self.gallery_dir = osp.join(
|
||||
self.dataset_dir, 'University-Release/test/gallery_satellite'
|
||||
)
|
||||
|
@ -77,7 +82,10 @@ datamanager = torchreid.data.ImageDataManager(
|
|||
super(University1652, self).__init__(train, query, gallery, **kwargs)
|
||||
|
||||
def process_dir(self, dir_path, relabel=False, train=False):
|
||||
IMG_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif', '.tiff', '.webp')
|
||||
IMG_EXTENSIONS = (
|
||||
'.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif', '.tiff',
|
||||
'.webp'
|
||||
)
|
||||
if train:
|
||||
img_paths = glob.glob(osp.join(dir_path, '*/*/*'))
|
||||
else:
|
||||
|
|
|
@ -16,22 +16,21 @@
|
|||
with limited time cost.
|
||||
"""
|
||||
|
||||
from setuptools import setup, Extension
|
||||
|
||||
from setuptools import Extension, setup
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
from torch.autograd import Function
|
||||
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
|
||||
|
||||
from torch.utils.cpp_extension import CUDAExtension, BuildExtension
|
||||
|
||||
setup(
|
||||
name='build_adjacency_matrix',
|
||||
ext_modules=[
|
||||
CUDAExtension('build_adjacency_matrix', [
|
||||
CUDAExtension(
|
||||
'build_adjacency_matrix', [
|
||||
'build_adjacency_matrix.cpp',
|
||||
'build_adjacency_matrix_kernel.cu',
|
||||
]),
|
||||
]
|
||||
),
|
||||
],
|
||||
cmdclass={
|
||||
'build_ext':BuildExtension
|
||||
})
|
||||
cmdclass={'build_ext': BuildExtension}
|
||||
)
|
||||
|
|
|
@ -16,22 +16,21 @@
|
|||
with limited time cost.
|
||||
"""
|
||||
|
||||
from setuptools import setup, Extension
|
||||
|
||||
from setuptools import Extension, setup
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
from torch.autograd import Function
|
||||
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
|
||||
|
||||
from torch.utils.cpp_extension import CUDAExtension, BuildExtension
|
||||
|
||||
setup(
|
||||
name='gnn_propagate',
|
||||
ext_modules=[
|
||||
CUDAExtension('gnn_propagate', [
|
||||
CUDAExtension(
|
||||
'gnn_propagate', [
|
||||
'gnn_propagate.cpp',
|
||||
'gnn_propagate_kernel.cu',
|
||||
]),
|
||||
]
|
||||
),
|
||||
],
|
||||
cmdclass={
|
||||
'build_ext':BuildExtension
|
||||
})
|
||||
cmdclass={'build_ext': BuildExtension}
|
||||
)
|
||||
|
|
|
@ -16,16 +16,14 @@
|
|||
with limited time cost.
|
||||
"""
|
||||
|
||||
import torch
|
||||
import numpy as np
|
||||
import torch
|
||||
|
||||
import build_adjacency_matrix
|
||||
import gnn_propagate
|
||||
|
||||
import build_adjacency_matrix
|
||||
from utils import *
|
||||
|
||||
|
||||
|
||||
def gnn_reranking(X_q, X_g, k1, k2):
|
||||
query_num, gallery_num = X_q.shape[0], X_g.shape[0]
|
||||
|
||||
|
@ -34,7 +32,9 @@ def gnn_reranking(X_q, X_g, k1, k2):
|
|||
del X_u, X_q, X_g
|
||||
|
||||
# initial ranking list
|
||||
S, initial_rank = original_score.topk(k=k1, dim=-1, largest=True, sorted=True)
|
||||
S, initial_rank = original_score.topk(
|
||||
k=k1, dim=-1, largest=True, sorted=True
|
||||
)
|
||||
|
||||
# stage 1
|
||||
A = build_adjacency_matrix.forward(initial_rank.float())
|
||||
|
@ -44,11 +44,13 @@ def gnn_reranking(X_q, X_g, k1, k2):
|
|||
if k2 != 1:
|
||||
for i in range(2):
|
||||
A = A + A.T
|
||||
A = gnn_propagate.forward(A, initial_rank[:, :k2].contiguous().float(), S[:, :k2].contiguous().float())
|
||||
A = gnn_propagate.forward(
|
||||
A, initial_rank[:, :k2].contiguous().float(),
|
||||
S[:, :k2].contiguous().float()
|
||||
)
|
||||
A_norm = torch.norm(A, p=2, dim=1, keepdim=True)
|
||||
A = A.div(A_norm.expand_as(A))
|
||||
|
||||
|
||||
cosine_similarity = torch.mm(A[:query_num, ], A[query_num:, ].t())
|
||||
del A, S
|
||||
|
||||
|
|
|
@ -17,31 +17,38 @@
|
|||
"""
|
||||
|
||||
import os
|
||||
import torch
|
||||
import argparse
|
||||
import numpy as np
|
||||
import argparse
|
||||
import torch
|
||||
|
||||
from utils import *
|
||||
from gnn_reranking import *
|
||||
|
||||
parser = argparse.ArgumentParser(description='Reranking_is_GNN')
|
||||
parser.add_argument('--data_path',
|
||||
parser.add_argument(
|
||||
'--data_path',
|
||||
type=str,
|
||||
default='../xm_rerank_gpu_2/features/market_88_test.pkl',
|
||||
help='path to dataset')
|
||||
parser.add_argument('--k1',
|
||||
help='path to dataset'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--k1',
|
||||
type=int,
|
||||
default=26, # Market-1501
|
||||
# default=60, # Veri-776
|
||||
help='parameter k1')
|
||||
parser.add_argument('--k2',
|
||||
help='parameter k1'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--k2',
|
||||
type=int,
|
||||
default=7, # Market-1501
|
||||
# default=10, # Veri-776
|
||||
help='parameter k2')
|
||||
help='parameter k2'
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
data = load_pickle(args.data_path)
|
||||
|
||||
|
@ -56,7 +63,10 @@ def main():
|
|||
gallery_feature = gallery_feature.cuda()
|
||||
|
||||
indices = gnn_reranking(query_feature, gallery_feature, args.k1, args.k2)
|
||||
evaluate_ranking_list(indices, query_label, query_cam, gallery_label, gallery_cam)
|
||||
evaluate_ranking_list(
|
||||
indices, query_label, query_cam, gallery_label, gallery_cam
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -16,9 +16,9 @@
|
|||
with limited time cost.
|
||||
"""
|
||||
|
||||
import pickle
|
||||
import numpy as np
|
||||
import os
|
||||
import numpy as np
|
||||
import pickle
|
||||
import torch
|
||||
|
||||
|
||||
|
@ -27,10 +27,12 @@ def load_pickle(pickle_path):
|
|||
data = pickle.load(f)
|
||||
return data
|
||||
|
||||
|
||||
def save_pickle(pickle_path, data):
|
||||
with open(pickle_path, 'wb') as f:
|
||||
pickle.dump(data, f, protocol=pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
|
||||
def pairwise_squared_distance(x):
|
||||
'''
|
||||
x : (n_samples, n_points, dims)
|
||||
|
@ -39,17 +41,23 @@ def pairwise_squared_distance(x):
|
|||
x2s = (x * x).sum(-1, keepdim=True)
|
||||
return x2s + x2s.transpose(-1, -2) - 2 * x @ x.transpose(-1, -2)
|
||||
|
||||
|
||||
def pairwise_distance(x, y):
|
||||
m, n = x.size(0), y.size(0)
|
||||
|
||||
x = x.view(m, -1)
|
||||
y = y.view(n, -1)
|
||||
|
||||
dist = torch.pow(x, 2).sum(dim=1, keepdim=True).expand(m, n) + torch.pow(y, 2).sum(dim=1, keepdim=True).expand(n,m).t()
|
||||
dist = torch.pow(x, 2).sum(
|
||||
dim=1, keepdim=True
|
||||
).expand(m, n) + torch.pow(y, 2).sum(
|
||||
dim=1, keepdim=True
|
||||
).expand(n, m).t()
|
||||
dist.addmm_(1, -2, x, y.t())
|
||||
|
||||
return dist
|
||||
|
||||
|
||||
def cosine_similarity(x, y):
|
||||
m, n = x.size(0), y.size(0)
|
||||
|
||||
|
@ -61,12 +69,18 @@ def cosine_similarity(x, y):
|
|||
|
||||
return score
|
||||
|
||||
def evaluate_ranking_list(indices, query_label, query_cam, gallery_label, gallery_cam):
|
||||
|
||||
def evaluate_ranking_list(
|
||||
indices, query_label, query_cam, gallery_label, gallery_cam
|
||||
):
|
||||
CMC = np.zeros((len(gallery_label)), dtype=np.int)
|
||||
ap = 0.0
|
||||
|
||||
for i in range(len(query_label)):
|
||||
ap_tmp, CMC_tmp = evaluate(indices[i],query_label[i], query_cam[i], gallery_label, gallery_cam)
|
||||
ap_tmp, CMC_tmp = evaluate(
|
||||
indices[i], query_label[i], query_cam[i], gallery_label,
|
||||
gallery_cam
|
||||
)
|
||||
if CMC_tmp[0] == -1:
|
||||
continue
|
||||
CMC = CMC + CMC_tmp
|
||||
|
@ -74,7 +88,11 @@ def evaluate_ranking_list(indices, query_label, query_cam, gallery_label, galler
|
|||
|
||||
CMC = CMC.astype(np.float32)
|
||||
CMC = CMC / len(query_label) #average CMC
|
||||
print('Rank@1:%f Rank@5:%f Rank@10:%f mAP:%f'%(CMC[0],CMC[4],CMC[9],ap/len(query_label)))
|
||||
print(
|
||||
'Rank@1:%f Rank@5:%f Rank@10:%f mAP:%f' %
|
||||
(CMC[0], CMC[4], CMC[9], ap / len(query_label))
|
||||
)
|
||||
|
||||
|
||||
def evaluate(index, ql, qc, gl, gc):
|
||||
query_index = np.argwhere(gl == ql)
|
||||
|
|
Loading…
Reference in New Issue