Spaces:
Sleeping
Sleeping
import math | |
import torch | |
from torch.utils.data import Sampler | |
from ding.utils import get_rank, get_world_size | |
class DistributedSampler(Sampler): | |
"""Sampler that restricts data loading to a subset of the dataset. | |
It is especially useful in conjunction with | |
:class:`torch.nn.parallel.DistributedDataParallel`. In such case, each | |
process can pass a DistributedSampler instance as a DataLoader sampler, | |
and load a subset of the original dataset that is exclusive to it. | |
.. note:: | |
Dataset is assumed to be of constant size. | |
Arguments: | |
dataset: Dataset used for sampling. | |
world_size (optional): Number of processes participating in | |
distributed training. | |
rank (optional): Rank of the current process within world_size. | |
""" | |
def __init__(self, dataset, world_size=None, rank=None, round_up=True): | |
if world_size is None: | |
world_size = get_world_size() | |
if rank is None: | |
rank = get_rank() | |
self.dataset = dataset | |
self.world_size = world_size | |
self.rank = rank | |
self.round_up = round_up | |
self.epoch = 0 | |
self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.world_size)) | |
if self.round_up: | |
self.total_size = self.num_samples * self.world_size | |
else: | |
self.total_size = len(self.dataset) | |
def __iter__(self): | |
# deterministically shuffle based on epoch | |
g = torch.Generator() | |
g.manual_seed(self.epoch) | |
indices = list(torch.randperm(len(self.dataset), generator=g)) | |
# add extra samples to make it evenly divisible | |
if self.round_up: | |
indices += indices[:(self.total_size - len(indices))] | |
assert len(indices) == self.total_size | |
# subsample | |
offset = self.num_samples * self.rank | |
indices = indices[offset:offset + self.num_samples] | |
if self.round_up or (not self.round_up and self.rank < self.world_size - 1): | |
assert len(indices) == self.num_samples | |
return iter(indices) | |
def __len__(self): | |
return self.num_samples | |
def set_epoch(self, epoch): | |
self.epoch = epoch | |