43 lines
1.4 KiB
Python
43 lines
1.4 KiB
Python
import torch
|
|
from torch.utils.data import DistributedSampler as _DistributedSampler
|
|
|
|
|
|
class DistributedSampler(_DistributedSampler):
|
|
|
|
def __init__(self,
|
|
dataset,
|
|
num_replicas=None,
|
|
rank=None,
|
|
shuffle=True,
|
|
round_up=True):
|
|
super().__init__(dataset, num_replicas=num_replicas, rank=rank)
|
|
self.shuffle = shuffle
|
|
self.round_up = round_up
|
|
if self.round_up:
|
|
self.total_size = self.num_samples * self.num_replicas
|
|
else:
|
|
self.total_size = len(self.dataset)
|
|
|
|
def __iter__(self):
|
|
# deterministically shuffle based on epoch
|
|
if self.shuffle:
|
|
g = torch.Generator()
|
|
g.manual_seed(self.epoch)
|
|
indices = torch.randperm(len(self.dataset), generator=g).tolist()
|
|
else:
|
|
indices = torch.arange(len(self.dataset)).tolist()
|
|
|
|
# add extra samples to make it evenly divisible
|
|
if self.round_up:
|
|
indices = (
|
|
indices *
|
|
int(self.total_size / len(indices) + 1))[:self.total_size]
|
|
assert len(indices) == self.total_size
|
|
|
|
# subsample
|
|
indices = indices[self.rank:self.total_size:self.num_replicas]
|
|
if self.round_up:
|
|
assert len(indices) == self.num_samples
|
|
|
|
return iter(indices)
|