build_loader.py 1.3 KB
Newer Older
Kai Chen's avatar
Kai Chen committed
1
2
from functools import partial

Kai Chen's avatar
Kai Chen committed
3
from mmcv.runner import get_dist_info
Kai Chen's avatar
Kai Chen committed
4
from mmcv.parallel import collate
Kai Chen's avatar
Kai Chen committed
5
6
7
8
from torch.utils.data import DataLoader

from .sampler import GroupSampler, DistributedGroupSampler

Kai Chen's avatar
Kai Chen committed
9
10
11
12
13
# https://github.com/pytorch/pytorch/issues/973
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (4096, rlimit[1]))

Kai Chen's avatar
Kai Chen committed
14
15
16
17

def build_dataloader(dataset,
                     imgs_per_gpu,
                     workers_per_gpu,
Kai Chen's avatar
Kai Chen committed
18
                     num_gpus=1,
Kai Chen's avatar
Kai Chen committed
19
20
21
                     dist=True,
                     **kwargs):
    if dist:
22
        rank, world_size = get_dist_info()
Kai Chen's avatar
Kai Chen committed
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
        sampler = DistributedGroupSampler(dataset, imgs_per_gpu, world_size,
                                          rank)
        batch_size = imgs_per_gpu
        num_workers = workers_per_gpu
    else:
        sampler = GroupSampler(dataset, imgs_per_gpu)
        batch_size = num_gpus * imgs_per_gpu
        num_workers = num_gpus * workers_per_gpu

    if not kwargs.get('shuffle', True):
        sampler = None

    data_loader = DataLoader(
        dataset,
        batch_size=batch_size,
        sampler=sampler,
        num_workers=num_workers,
        collate_fn=partial(collate, samples_per_gpu=imgs_per_gpu),
        pin_memory=False,
        **kwargs)

    return data_loader