test_ddp_state_dict.py 2.29 KB
Newer Older
1
import copy
2
3
from collections import OrderedDict
from functools import partial
4

5
6
7
import pytest
import torch
import torch.multiprocessing as mp
8
9
10
11

import colossalai
from colossalai.nn.parallel import ColoDDP
from colossalai.tensor import ColoParameter, ProcessGroup
12
13
from colossalai.testing import rerun_if_address_is_in_use
from colossalai.utils import free_port
14
15
from colossalai.utils.cuda import get_current_device
from colossalai.zero import ColoInitContext
16
17
18
19
20
21
from tests.components_to_test.registry import non_distributed_component_funcs


def check_state_dict_equal(state_dict: OrderedDict, other_state_dict: OrderedDict):
    for (k1, t1), (k2, t2) in zip(state_dict.items(), other_state_dict.items()):
        assert k1 == k2
22
23
24
25
26
27

        if t1.device != t2.device:
            temp_t2 = t2.to(t1.device)
        else:
            temp_t2 = t2

28
29
30
        assert torch.equal(t1, temp_t2), "\t{}\n\t{}".format(t1, temp_t2)


31
def init_ddp(module: torch.nn.Module) -> ColoDDP:
32
33
    pg = ProcessGroup()
    return ColoDDP(module, process_group=pg)
34
35


36
37
def run_ddp_state_dict():
    get_components_func = non_distributed_component_funcs.get_callable('gpt2')
38
39
40
41
    model_builder, train_dataloader, test_dataloader, optimizer_class, criterion = get_components_func()
    torch_model = model_builder().cuda()
    with ColoInitContext(device=get_current_device()):
        model = model_builder()
42
    model = init_ddp(model)
43
    torch_state_dict = torch_model.state_dict()
44

45
46
47
    for param in model.parameters():
        if isinstance(param, ColoParameter):
            assert param.get_process_group() is not None
48
    model.load_state_dict(torch_state_dict)
49
50
51
52
53

    for param in model.parameters():
        if isinstance(param, ColoParameter):
            assert param.get_process_group() is not None

54
55
56
57
58
59
    state_dict = model.state_dict()
    check_state_dict_equal(torch_state_dict, state_dict)


def run_dist(rank, world_size, port):
    colossalai.launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
60
    run_ddp_state_dict()
61
62
63
64
65
66
67
68
69
70
71
72


@pytest.mark.dist
@pytest.mark.parametrize('world_size', [1, 2])
@rerun_if_address_is_in_use()
def test_state_dict(world_size):
    run_func = partial(run_dist, world_size=world_size, port=free_port())
    mp.spawn(run_func, nprocs=world_size)


if __name__ == '__main__':
    test_state_dict(2)