mirror of
https://github.com/JDAI-CV/fast-reid.git
synced 2025-06-03 14:50:47 +08:00
add saivt dataset
This commit is contained in:
parent
e8739a667e
commit
04fe9fb2d8
@ -76,7 +76,7 @@ def build_reid_test_loader(cfg, dataset_name):
|
|||||||
test_loader = DataLoader(
|
test_loader = DataLoader(
|
||||||
test_set,
|
test_set,
|
||||||
batch_sampler=batch_sampler,
|
batch_sampler=batch_sampler,
|
||||||
num_workers=2, # save some memory
|
num_workers=4, # save some memory
|
||||||
collate_fn=fast_batch_collator,
|
collate_fn=fast_batch_collator,
|
||||||
pin_memory=True,
|
pin_memory=True,
|
||||||
)
|
)
|
||||||
|
@ -33,7 +33,10 @@ class CommDataset(Dataset):
|
|||||||
return len(self.img_items)
|
return len(self.img_items)
|
||||||
|
|
||||||
def __getitem__(self, index):
|
def __getitem__(self, index):
|
||||||
img_path, pid, camid = self.img_items[index]
|
img_item = self.img_items[index]
|
||||||
|
img_path = img_item[0]
|
||||||
|
pid = img_item[1]
|
||||||
|
camid = img_item[2]
|
||||||
img = read_image(img_path)
|
img = read_image(img_path)
|
||||||
if self.transform is not None: img = self.transform(img)
|
if self.transform is not None: img = self.transform(img)
|
||||||
if self.relabel:
|
if self.relabel:
|
||||||
|
45
fastreid/data/datasets/saivt.py
Normal file
45
fastreid/data/datasets/saivt.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
# encoding: utf-8
|
||||||
|
"""
|
||||||
|
@author: xingyu liao
|
||||||
|
@contact: sherlockliao01@gmail.com
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from glob import glob
|
||||||
|
|
||||||
|
from fastreid.data.datasets import DATASET_REGISTRY
|
||||||
|
from fastreid.data.datasets.bases import ImageDataset
|
||||||
|
|
||||||
|
__all__ = ['SAIVT', ]
|
||||||
|
|
||||||
|
|
||||||
|
@DATASET_REGISTRY.register()
|
||||||
|
class SAIVT(ImageDataset):
|
||||||
|
dataset_dir = "SAIVT-SoftBio"
|
||||||
|
dataset_name = "saivt"
|
||||||
|
|
||||||
|
def __init__(self, root='datasets', **kwargs):
|
||||||
|
self.root = root
|
||||||
|
self.train_path = os.path.join(self.root, self.dataset_dir)
|
||||||
|
|
||||||
|
required_files = [self.train_path]
|
||||||
|
self.check_before_run(required_files)
|
||||||
|
|
||||||
|
train = self.process_train(self.train_path)
|
||||||
|
|
||||||
|
super().__init__(train, [], [], **kwargs)
|
||||||
|
|
||||||
|
def process_train(self, train_path):
|
||||||
|
data = []
|
||||||
|
|
||||||
|
pid_path = os.path.join(train_path, "cropped_images")
|
||||||
|
pid_list = os.listdir(pid_path)
|
||||||
|
|
||||||
|
for pid_name in pid_list:
|
||||||
|
pid = self.dataset_name + '_' + pid_name
|
||||||
|
img_list = glob(os.path.join(pid_path, pid_name, "*.jpeg"))
|
||||||
|
for img_path in img_list:
|
||||||
|
img_name = os.path.basename(img_path)
|
||||||
|
camid = self.dataset_name + '_' + img_name.split('-')[2]
|
||||||
|
data.append([img_path, pid, camid])
|
||||||
|
return data
|
@ -52,6 +52,7 @@ def build_transforms(cfg, is_train=True):
|
|||||||
|
|
||||||
if do_autoaug:
|
if do_autoaug:
|
||||||
res.append(T.RandomApply([AutoAugment()], p=autoaug_prob))
|
res.append(T.RandomApply([AutoAugment()], p=autoaug_prob))
|
||||||
|
|
||||||
res.append(T.Resize(size_train, interpolation=3))
|
res.append(T.Resize(size_train, interpolation=3))
|
||||||
if do_flip:
|
if do_flip:
|
||||||
res.append(T.RandomHorizontalFlip(p=flip_prob))
|
res.append(T.RandomHorizontalFlip(p=flip_prob))
|
||||||
|
Loading…
x
Reference in New Issue
Block a user