test_ddp_state_dict.py 2.31 KB
Newer Older
1
2
import copy

3
4
5
6
7
8
9
10
11
12
import pytest
import colossalai
import torch
import torch.multiprocessing as mp
from colossalai.testing import rerun_if_address_is_in_use
from colossalai.utils.cuda import get_current_device
from colossalai.utils import free_port
from colossalai.utils.model.colo_init_context import ColoInitContext
from functools import partial
from tests.components_to_test.registry import non_distributed_component_funcs
13
from colossalai.nn.parallel import ColoDDP
14
from collections import OrderedDict
15
from colossalai.tensor import ProcessGroup, ColoParameter
16
17
18
19
20


def check_state_dict_equal(state_dict: OrderedDict, other_state_dict: OrderedDict):
    for (k1, t1), (k2, t2) in zip(state_dict.items(), other_state_dict.items()):
        assert k1 == k2
21
22
23
24
25
26

        if t1.device != t2.device:
            temp_t2 = t2.to(t1.device)
        else:
            temp_t2 = t2

27
28
29
        assert torch.equal(t1, temp_t2), "\t{}\n\t{}".format(t1, temp_t2)


30
def init_ddp(module: torch.nn.Module) -> ColoDDP:
31
32
    pg = ProcessGroup()
    return ColoDDP(module, process_group=pg)
33
34


35
36
def run_ddp_state_dict():
    get_components_func = non_distributed_component_funcs.get_callable('gpt2')
37
38
39
40
    model_builder, train_dataloader, test_dataloader, optimizer_class, criterion = get_components_func()
    torch_model = model_builder().cuda()
    with ColoInitContext(device=get_current_device()):
        model = model_builder()
41
    model = init_ddp(model)
42
    torch_state_dict = torch_model.state_dict()
43

44
45
46
    for param in model.parameters():
        if isinstance(param, ColoParameter):
            assert param.get_process_group() is not None
47
    model.load_state_dict(torch_state_dict)
48
49
50
51
52

    for param in model.parameters():
        if isinstance(param, ColoParameter):
            assert param.get_process_group() is not None

53
54
55
56
57
58
    state_dict = model.state_dict()
    check_state_dict_equal(torch_state_dict, state_dict)


def run_dist(rank, world_size, port):
    colossalai.launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
59
    run_ddp_state_dict()
60
61
62
63
64
65
66
67
68
69
70
71


@pytest.mark.dist
@pytest.mark.parametrize('world_size', [1, 2])
@rerun_if_address_is_in_use()
def test_state_dict(world_size):
    run_func = partial(run_dist, world_size=world_size, port=free_port())
    mp.spawn(run_func, nprocs=world_size)


if __name__ == '__main__':
    test_state_dict(2)