test_search_logical_device_mesh.py 1.26 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
from functools import partial

import pytest
import torch.multiprocessing as mp

from colossalai.device import AlphaBetaProfiler
from colossalai.initialize import launch
from colossalai.logging import disable_existing_loggers
from colossalai.testing import parameterize, rerun_if_address_is_in_use
from colossalai.utils import free_port


def check_alpha_beta(rank, physical_devices, world_size, port):
    disable_existing_loggers()
    launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
    profiler = AlphaBetaProfiler(physical_devices)
    best_logical_mesh = profiler.search_best_logical_mesh()

    if physical_devices == [0, 1, 2, 3]:
        assert best_logical_mesh == [[0, 1], [2, 3]]
    elif physical_devices == [0, 3]:
        assert best_logical_mesh == [[0, 3]]


@pytest.mark.skip(reason="Skip because assertion may fail for CI devices")
@pytest.mark.dist
@parameterize('physical_devices', [[0, 1, 2, 3], [0, 3]])
@rerun_if_address_is_in_use()
def test_profile_alpha_beta(physical_devices):
    world_size = 4
    run_func = partial(check_alpha_beta, physical_devices=physical_devices, world_size=world_size, port=free_port())
    mp.spawn(run_func, nprocs=world_size)


if __name__ == '__main__':
    test_profile_alpha_beta()