-
Notifications
You must be signed in to change notification settings - Fork 0
/
samplers.py
31 lines (22 loc) · 917 Bytes
/
samplers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import torch
from torch.utils import data
# src: https://discuss.pytorch.org/t/implementing-an-infinite-loop-dataset-dataloader-combo/35567/5
class InfiniteRandomSampler(data.Sampler):
"""Return random indices from [0-n) infinitely.
Arguments:
dset_size (int): Size of the dataset to sample.
"""
def __init__(self, dset_size):
self.dset_size = dset_size
def __iter__(self):
# Create a random number generator (optional, makes the sampling independent of the base RNG)
rng = torch.Generator()
seed = torch.empty((), dtype=torch.int64).random_().item()
rng.manual_seed(seed)
return _infinite_generator(self.dset_size, rng)
def __len__(self):
return float("inf")
def _infinite_generator(n, rng):
"""Inifinitely returns a number in [0, n)."""
while True:
yield from torch.randperm(n, generator=rng).tolist()