test_sequence.py 1.03 KB
Newer Older
zbian's avatar
zbian committed
1
2
3
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

Frank Lee's avatar
Frank Lee committed
4
5
6
import pytest
import torch
import torch.multiprocessing as mp
Frank Lee's avatar
Frank Lee committed
7
8
from colossalai.initialize import launch, get_default_parser
from colossalai.logging import get_dist_logger
Frank Lee's avatar
Frank Lee committed
9
10
11
from checks_seq.check_layer_seq import *
from functools import partial

zbian's avatar
zbian committed
12
13
14
15
16
17
18
19
20
21
22
23
24

CONFIG = dict(
    parallel=dict(
        pipeline=1,
        tensor=dict(mode='sequence', size=4)
    )
)


def check_layer():
    check_selfattention()


Frank Lee's avatar
Frank Lee committed
25
def run_check_sequence(rank, world_size):
zbian's avatar
zbian committed
26
    # init dist
Frank Lee's avatar
Frank Lee committed
27
    launch(config=CONFIG,
Frank Lee's avatar
Frank Lee committed
28
29
30
31
32
           rank=rank,
           world_size=world_size,
           host='localhost',
           port=29924,
           backend='nccl')
Frank Lee's avatar
Frank Lee committed
33
    logger = get_dist_logger()
zbian's avatar
zbian committed
34
35
36
37
    logger.info('Distributed environment is initialzied.', ranks=[0])

    # check layers
    check_layer()
Frank Lee's avatar
Frank Lee committed
38
39
40
41
42
43
44
45
    torch.cuda.empty_cache()


@pytest.mark.dist
def test_sequence():
    world_size = 4
    run_func = partial(run_check_sequence, world_size=world_size)
    mp.spawn(run_func, nprocs=world_size)
zbian's avatar
zbian committed
46
47
48


if __name__ == '__main__':
Frank Lee's avatar
Frank Lee committed
49
    test_sequence()