"...lm-evaluation-harness.git" did not exist on "525afc17beeaae17ddb1159b7ec7b1a51992833d"
build_loader.py 1.52 KB
Newer Older
1
import platform
Kai Chen's avatar
Kai Chen committed
2
3
from functools import partial

Kai Chen's avatar
Kai Chen committed
4
from mmcv.runner import get_dist_info
Kai Chen's avatar
Kai Chen committed
5
from mmcv.parallel import collate
Kai Chen's avatar
Kai Chen committed
6
7
from torch.utils.data import DataLoader

8
from .sampler import GroupSampler, DistributedGroupSampler, DistributedSampler
Kai Chen's avatar
Kai Chen committed
9

10
11
12
13
14
if platform.system() != 'Windows':
    # https://github.com/pytorch/pytorch/issues/973
    import resource
    rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
    resource.setrlimit(resource.RLIMIT_NOFILE, (4096, rlimit[1]))
Kai Chen's avatar
Kai Chen committed
15

Kai Chen's avatar
Kai Chen committed
16
17
18
19

def build_dataloader(dataset,
                     imgs_per_gpu,
                     workers_per_gpu,
Kai Chen's avatar
Kai Chen committed
20
                     num_gpus=1,
Kai Chen's avatar
Kai Chen committed
21
22
                     dist=True,
                     **kwargs):
23
    shuffle = kwargs.get('shuffle', True)
Kai Chen's avatar
Kai Chen committed
24
    if dist:
25
        rank, world_size = get_dist_info()
26
27
28
29
        if shuffle:
            sampler = DistributedGroupSampler(dataset, imgs_per_gpu,
                                              world_size, rank)
        else:
30
31
            sampler = DistributedSampler(
                dataset, world_size, rank, shuffle=False)
Kai Chen's avatar
Kai Chen committed
32
33
34
        batch_size = imgs_per_gpu
        num_workers = workers_per_gpu
    else:
35
        sampler = GroupSampler(dataset, imgs_per_gpu) if shuffle else None
Kai Chen's avatar
Kai Chen committed
36
37
38
        batch_size = num_gpus * imgs_per_gpu
        num_workers = num_gpus * workers_per_gpu

39
40
41
42
43
44
45
46
    data_loader = DataLoader(
        dataset,
        batch_size=batch_size,
        sampler=sampler,
        num_workers=num_workers,
        collate_fn=partial(collate, samples_per_gpu=imgs_per_gpu),
        pin_memory=False,
        **kwargs)
Kai Chen's avatar
Kai Chen committed
47
48

    return data_loader