test_p2p.py 3.9 KB
Newer Older
zbian's avatar
zbian committed
1
2
3
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

4
5
from functools import partial

zbian's avatar
zbian committed
6
7
8
import pytest
import torch
import torch.distributed as dist
Frank Lee's avatar
Frank Lee committed
9
import torch.multiprocessing as mp
10
from colossalai.communication import (recv_backward, recv_forward, recv_obj_meta, send_backward,
11
                                      send_backward_recv_forward, send_forward, send_forward_recv_backward,
12
                                      send_obj_meta)
zbian's avatar
zbian committed
13
14
from colossalai.context.parallel_mode import ParallelMode
from colossalai.core import global_context as gpc
Frank Lee's avatar
Frank Lee committed
15
from colossalai.initialize import launch
Frank Lee's avatar
Frank Lee committed
16
from colossalai.logging import get_dist_logger
17
from colossalai.utils import free_port, get_current_device
18
from colossalai.testing import rerun_on_exception
zbian's avatar
zbian committed
19

20
21
22
BATCH_SIZE = 4
SEQ_LENGTH = 2
HIDDEN_SIZE = 16
zbian's avatar
zbian committed
23

24
CONFIG = dict(parallel=dict(pipeline=dict(size=4), tensor=dict(size=1, mode=None)), seed=1024)
zbian's avatar
zbian committed
25
26
27
28
29
30
31
32
33
34
35
36


def check_equal(A, B):
    return torch.allclose(A, B, rtol=1e-5, atol=1e-3)


def check_forward(output_tensor, rank, logger):
    dist.barrier()
    if gpc.is_first_rank(ParallelMode.PIPELINE):
        tensor = output_tensor.clone()
    else:
        tensor = recv_forward(output_tensor.shape)
37
        logger.info('Rank {} received forward. Correct tensor: {}'.format(rank, check_equal(tensor, output_tensor)))
zbian's avatar
zbian committed
38
39
40
41
42
43
44
45
46
47
48
    if not gpc.is_last_rank(ParallelMode.PIPELINE):
        send_forward(tensor)
        logger.info('Rank {} sent forward.'.format(rank))


def check_backward(output_grad, rank, logger):
    dist.barrier()
    if gpc.is_last_rank(ParallelMode.PIPELINE):
        grad = output_grad.clone()
    else:
        grad = recv_backward(output_grad.shape)
49
        logger.info('Rank {} received backward. Correct grad: {}'.format(rank, check_equal(grad, output_grad)))
zbian's avatar
zbian committed
50
51
52
53
54
55
56
57
58
    if not gpc.is_first_rank(ParallelMode.PIPELINE):
        send_backward(grad)
        logger.info('Rank {} sent backward.'.format(rank))


def check_forward_backward(output_tensor, output_grad, rank, logger):
    dist.barrier()
    if not gpc.is_first_rank(ParallelMode.PIPELINE):
        tensor = send_backward_recv_forward(output_grad, output_tensor.shape)
59
60
        logger.info('Rank {} sent backward received forward. Correct tensor: {}'.format(
            rank, check_equal(tensor, output_tensor)))
zbian's avatar
zbian committed
61
62
    if not gpc.is_last_rank(ParallelMode.PIPELINE):
        grad = send_forward_recv_backward(output_tensor, output_grad.shape)
63
64
        logger.info('Rank {} sent forward received backward. Correct grad: {}'.format(
            rank, check_equal(grad, output_grad)))
zbian's avatar
zbian committed
65
66


67
def check_comm(size, rank, prev_rank, next_rank, logger):
zbian's avatar
zbian committed
68
69
70
71
72
73
74
75
76
77
78
79
80
    dtype = torch.float32
    device = get_current_device()
    tensor_shape = (BATCH_SIZE, SEQ_LENGTH, HIDDEN_SIZE)
    grad_shape = (BATCH_SIZE, SEQ_LENGTH, HIDDEN_SIZE)
    tensor = torch.randn(tensor_shape, dtype=dtype, device=device)
    dist.all_reduce(tensor)
    grad = torch.randn(grad_shape, dtype=dtype, device=device)
    dist.all_reduce(grad)
    check_forward(tensor, rank, logger)
    check_backward(grad, rank, logger)
    check_forward_backward(tensor, grad, rank, logger)


81
def run_check(rank, world_size, port):
82
    launch(config=CONFIG, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
Frank Lee's avatar
Frank Lee committed
83
    logger = get_dist_logger()
zbian's avatar
zbian committed
84
85
86
    rank = gpc.get_global_rank()
    prev_rank = gpc.get_prev_global_rank(ParallelMode.PIPELINE)
    next_rank = gpc.get_next_global_rank(ParallelMode.PIPELINE)
87
    logger.info('Rank {0}: prev rank {1}, next rank {2}'.format(rank, prev_rank, next_rank))
zbian's avatar
zbian committed
88
89
    logger.info('Distributed environment is initialzied.')

ver217's avatar
ver217 committed
90
    check_comm(world_size, rank, prev_rank, next_rank, logger)
Frank Lee's avatar
Frank Lee committed
91
92
93
94
95
    gpc.destroy()
    torch.cuda.empty_cache()


@pytest.mark.dist
96
@rerun_on_exception(exception_type=mp.ProcessRaisedException, pattern=".*Address already in use.*")
Frank Lee's avatar
Frank Lee committed
97
98
def test_p2p():
    world_size = 4
99
    run_func = partial(run_check, world_size=world_size, port=free_port())
Frank Lee's avatar
Frank Lee committed
100
    mp.spawn(run_func, nprocs=world_size)
zbian's avatar
zbian committed
101
102
103


if __name__ == '__main__':
Frank Lee's avatar
Frank Lee committed
104
    test_p2p()