test_1d.py 1023 Bytes
Newer Older
zbian's avatar
zbian committed
1
2
3
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

4
5
from functools import partial

zbian's avatar
zbian committed
6
import pytest
Frank Lee's avatar
Frank Lee committed
7
8
import torch
import torch.multiprocessing as mp
zbian's avatar
zbian committed
9
from colossalai.core import global_context as gpc
アマデウス's avatar
アマデウス committed
10
from colossalai.initialize import launch
11
12
from colossalai.utils import free_port

Frank Lee's avatar
Frank Lee committed
13
from checks_1d.check_layer_1d import *
zbian's avatar
zbian committed
14
15
16
17
18

CONFIG = dict(
    parallel=dict(
        pipeline=dict(size=1),
        tensor=dict(
アマデウス's avatar
アマデウス committed
19
            size=4,
zbian's avatar
zbian committed
20
21
22
23
24
25
            mode='1d'
        )
    ),
)


26
def check_layer(rank, world_size, port):
Frank Lee's avatar
Frank Lee committed
27
28
29
30
    launch(config=CONFIG,
           rank=rank,
           world_size=world_size,
           host='localhost',
31
           port=port,
Frank Lee's avatar
Frank Lee committed
32
33
           backend='nccl')

zbian's avatar
zbian committed
34
35
36
    check_linear_col()
    check_linear_row()

Frank Lee's avatar
Frank Lee committed
37
38
39
    gpc.destroy()
    torch.cuda.empty_cache()

zbian's avatar
zbian committed
40
41

@pytest.mark.dist
Frank Lee's avatar
Frank Lee committed
42
def test_1d():
アマデウス's avatar
アマデウス committed
43
    world_size = 4
44
    run_func = partial(check_layer, world_size=world_size, port=free_port())
Frank Lee's avatar
Frank Lee committed
45
    mp.spawn(run_func, nprocs=world_size)
zbian's avatar
zbian committed
46
47
48


if __name__ == '__main__':
Frank Lee's avatar
Frank Lee committed
49
    test_1d()