test_mem_tracer.py 2.2 KB
Newer Older
1
2
3
from functools import partial

import pytest
4
import torch
5
import torch.multiprocessing as mp
6
7
8

import colossalai
from colossalai.gemini.memory_tracer import MemtracerWrapper
9
10
from colossalai.testing import rerun_if_address_is_in_use
from colossalai.utils import free_port
11
12
13
14
15
16
17
18
19
20
21
22
23
24
from tests.components_to_test.registry import non_distributed_component_funcs


def run_fwd_bwd(model, data, label, criterion, enable_autocast=False):
    with torch.cuda.amp.autocast(enabled=enable_autocast):
        if criterion:
            y = model(data)
            loss = criterion(y, label)
        else:
            loss = model(data, label)
        loss = loss.float()
    model.backward(loss)


25
def run_tracer(rank, world_size, port, use_grad_check=True):
26
27
    colossalai.launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
    test_models = ['repeated_computed_layers', 'resnet18', 'no_leaf_module', 'bert']
28
    # test_models = ['bert']
29
30
31
32
33
    for model_name in test_models:
        get_components_func = non_distributed_component_funcs.get_callable(model_name)
        model_builder, train_dataloader, _, _, criterion = get_components_func()

        # init model on cpu
34
35
        # TODO() memtrace hook can not handle buff registered on a non-leaf module (for example the BertEmbedding).
        # a simple method is that always puts buff on cuda and viewed them as non-model data.
36
        model = MemtracerWrapper(model_builder(checkpoint=use_grad_check))
37

38
39
        for n, buff in model.named_buffers():
            buff.data = buff.data.cuda()
40
41
42
43
44
45
46
47
        for i, (data, label) in enumerate(train_dataloader):
            if i > 1:
                break
            data = data.cuda()
            label = label.cuda()

            run_fwd_bwd(model, data, label, criterion, False)

48
        model._ophook_list[0].print_non_model_data()
49
50


51
52
@pytest.mark.dist
@pytest.mark.parametrize("world_size", [1])
53
@pytest.mark.parametrize("use_grad_check", [True, False])
54
@rerun_if_address_is_in_use()
55
56
def test_tracer(world_size, use_grad_check):
    run_func = partial(run_tracer, world_size=world_size, port=free_port(), use_grad_check=use_grad_check)
57
58
59
    mp.spawn(run_func, nprocs=world_size)


60
if __name__ == '__main__':
61
    test_tracer(1)