test_data_parallel_sampler.py 2.58 KB
Newer Older
zbian's avatar
zbian committed
1
2
3
4
5
6
7
8
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

import os
from functools import partial
from pathlib import Path

import pytest
Frank Lee's avatar
Frank Lee committed
9
import torch
zbian's avatar
zbian committed
10
11
12
13
14
import torch.distributed as dist
import torch.multiprocessing as mp
from torch.utils.data import DataLoader

import colossalai
15
from colossalai.builder import build_dataset, build_transform
Frank Lee's avatar
Frank Lee committed
16
17
from torchvision import transforms
from colossalai.context import ParallelMode, Config
zbian's avatar
zbian committed
18
from colossalai.core import global_context as gpc
19
20
from colossalai.utils import get_dataloader, free_port
from colossalai.testing import rerun_on_exception
zbian's avatar
zbian committed
21

Frank Lee's avatar
Frank Lee committed
22
23
CONFIG = Config(
    dict(
24
25
26
27
28
        train_data=dict(dataset=dict(
            type='CIFAR10',
            root=Path(os.environ['DATA']),
            train=True,
            download=True,
zbian's avatar
zbian committed
29
        ),
30
31
32
33
34
                        dataloader=dict(batch_size=8,),
                        transform_pipeline=[
                            dict(type='ToTensor'),
                            dict(type='Normalize', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5))
                        ]),
Frank Lee's avatar
Frank Lee committed
35
36
37
38
39
40
        parallel=dict(
            pipeline=dict(size=1),
            tensor=dict(size=1, mode=None),
        ),
        seed=1024,
    ))
zbian's avatar
zbian committed
41
42


43
44
def run_data_sampler(rank, world_size, port):
    dist_args = dict(config=CONFIG, rank=rank, world_size=world_size, backend='gloo', port=port, host='localhost')
Frank Lee's avatar
Frank Lee committed
45
    colossalai.launch(**dist_args)
zbian's avatar
zbian committed
46
47
    print('finished initialization')

Frank Lee's avatar
Frank Lee committed
48
49
50
    transform_pipeline = [build_transform(cfg) for cfg in gpc.config.train_data.transform_pipeline]
    transform_pipeline = transforms.Compose(transform_pipeline)
    gpc.config.train_data.dataset['transform'] = transform_pipeline
zbian's avatar
zbian committed
51
    dataset = build_dataset(gpc.config.train_data.dataset)
Frank Lee's avatar
Frank Lee committed
52
    dataloader = get_dataloader(dataset, **gpc.config.train_data.dataloader)
zbian's avatar
zbian committed
53
54
55
56
57
58
59
60
61
62
63
    data_iter = iter(dataloader)
    img, label = data_iter.next()
    img = img[0]

    if gpc.get_local_rank(ParallelMode.DATA) != 0:
        img_to_compare = img.clone()
    else:
        img_to_compare = img
    dist.broadcast(img_to_compare, src=0, group=gpc.get_group(ParallelMode.DATA))

    if gpc.get_local_rank(ParallelMode.DATA) != 0:
64
65
        assert not torch.equal(
            img, img_to_compare), 'Same image was distributed across ranks but expected it to be different'
Frank Lee's avatar
Frank Lee committed
66
    torch.cuda.empty_cache()
zbian's avatar
zbian committed
67
68
69


@pytest.mark.cpu
70
@rerun_on_exception(exception_type=mp.ProcessRaisedException, pattern=".*Address already in use.*")
zbian's avatar
zbian committed
71
72
def test_data_sampler():
    world_size = 4
73
    test_func = partial(run_data_sampler, world_size=world_size, port=free_port())
zbian's avatar
zbian committed
74
75
76
77
78
    mp.spawn(test_func, nprocs=world_size)


if __name__ == '__main__':
    test_data_sampler()