Merge pull request #56 from q147258134/master

An vehicle re-id dataset support
pull/82/head
Hao Luo 2019-07-18 17:57:53 +08:00 committed by GitHub
commit 4bf49c24ff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 85 additions and 0 deletions

View File

@ -7,6 +7,7 @@ from .cuhk03 import CUHK03
from .dukemtmcreid import DukeMTMCreID
from .market1501 import Market1501
from .msmt17 import MSMT17
from .veri import VeRi
from .dataset_loader import ImageDataset
__factory = {
@ -14,6 +15,7 @@ __factory = {
'cuhk03': CUHK03,
'dukemtmc': DukeMTMCreID,
'msmt17': MSMT17,
'veri': VeRi,
}

View File

@ -0,0 +1,83 @@
import glob
import re
import os.path as osp
from .bases import BaseImageDataset
class VeRi(BaseImageDataset):
"""
VeRi-776
Reference:
Liu, Xinchen, et al. "Large-scale vehicle re-identification in urban surveillance videos." ICME 2016.
URL:https://vehiclereid.github.io/VeRi/
Dataset statistics:
# identities: 776
# images: 37778 (train) + 1678 (query) + 11579 (gallery)
# cameras: 20
"""
dataset_dir = 'veri'
def __init__(self, root='../', verbose=True, **kwargs):
super(VeRi, self).__init__()
self.dataset_dir = osp.join(root, self.dataset_dir)
self.train_dir = osp.join(self.dataset_dir, 'image_train')
self.query_dir = osp.join(self.dataset_dir, 'image_query')
self.gallery_dir = osp.join(self.dataset_dir, 'image_test')
self._check_before_run()
train = self._process_dir(self.train_dir, relabel=True)
query = self._process_dir(self.query_dir, relabel=False)
gallery = self._process_dir(self.gallery_dir, relabel=False)
if verbose:
print("=> VeRi-776 loaded")
self.print_dataset_statistics(train, query, gallery)
self.train = train
self.query = query
self.gallery = gallery
self.num_train_pids, self.num_train_imgs, self.num_train_cams = self.get_imagedata_info(self.train)
self.num_query_pids, self.num_query_imgs, self.num_query_cams = self.get_imagedata_info(self.query)
self.num_gallery_pids, self.num_gallery_imgs, self.num_gallery_cams = self.get_imagedata_info(self.gallery)
def _check_before_run(self):
"""Check if all files are available before going deeper"""
if not osp.exists(self.dataset_dir):
raise RuntimeError("'{}' is not available".format(self.dataset_dir))
if not osp.exists(self.train_dir):
raise RuntimeError("'{}' is not available".format(self.train_dir))
if not osp.exists(self.query_dir):
raise RuntimeError("'{}' is not available".format(self.query_dir))
if not osp.exists(self.gallery_dir):
raise RuntimeError("'{}' is not available".format(self.gallery_dir))
def _process_dir(self, dir_path, relabel=False):
img_paths = glob.glob(osp.join(dir_path, '*.jpg'))
pattern = re.compile(r'([-\d]+)_c(\d+)')
pid_container = set()
for img_path in img_paths:
pid, _ = map(int, pattern.search(img_path).groups())
if pid == -1: continue # junk images are just ignored
pid_container.add(pid)
pid2label = {pid: label for label, pid in enumerate(pid_container)}
dataset = []
for img_path in img_paths:
pid, camid = map(int, pattern.search(img_path).groups())
if pid == -1: continue # junk images are just ignored
assert 0 <= pid <= 776 # pid == 0 means background
assert 1 <= camid <= 20
camid -= 1 # index starts from 0
if relabel: pid = pid2label[pid]
dataset.append((img_path, pid, camid))
return dataset