test_embedding_tp.py 2.05 KB
Newer Older
1
import torch
2
from colossalai.tensor import ColoTensor, ShardSpec
3
from torch.nn import functional as F
4
5
6
7
8
9
from functools import partial

import colossalai
import pytest
import torch
import torch.multiprocessing as mp
10
from colossalai.testing import rerun_if_address_is_in_use
11
from colossalai.utils import free_port
12
from colossalai.tensor import ColoTensorSpec, ComputePattern, ComputeSpec, DistSpecManager, ProcessGroup
13
from _utils import tensor_equal, tensor_shard_equal
14
15


16
def init_1d_row(weight, pg: ProcessGroup):
17
    spec = (ShardSpec([0], [pg.tp_world_size()]), ComputeSpec(ComputePattern.TP1D))
18
    with DistSpecManager.no_grad():
19
        weight.set_tensor_spec(*spec)
20
21


22
def init_1d_col(weight, pg: ProcessGroup):
23
    spec = (ShardSpec([-1], [pg.tp_world_size()]), ComputeSpec(ComputePattern.TP1D))
24
    with DistSpecManager.no_grad():
25
        weight.set_tensor_spec(*spec)
26
27


28
def run_with_spec(spec_init_func, pg: ProcessGroup):
29
    model = torch.nn.Embedding(12, 32).cuda()
30
    weight = ColoTensor(torch.nn.Parameter(model.weight.detach()), ColoTensorSpec(pg))
31
    spec_init_func(weight, pg)
32
33
34
    x = torch.tensor((0, 3, 6, 9)).cuda()
    out = model(x)
    colo_out = F.embedding(x, weight)
35
    assert tensor_equal(out, colo_out)
36
37
38
    grad = torch.rand_like(out)
    out.backward(grad)
    colo_out.backward(grad)
39
40
    # compare grad inside a TP group
    assert tensor_shard_equal(model.weight.grad, weight.grad, pg.tp_local_rank(), pg.tp_world_size())
41
42


43
def run_dist(rank, world_size, port):
44
45
46
47
48
    # config = dict(parallel=dict(tensor=dict(mode="1d", size=world_size),))
    colossalai.launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
    pg = ProcessGroup(tp_degree=world_size)
    run_with_spec(init_1d_row, pg)
    run_with_spec(init_1d_col, pg)
49

50
51

@pytest.mark.dist
52
@pytest.mark.parametrize('world_size', [1, 4])
53
54
55
56
57
58
59
@rerun_if_address_is_in_use()
def test_embedding_1d(world_size):
    run_func = partial(run_dist, world_size=world_size, port=free_port())
    mp.spawn(run_func, nprocs=world_size)


if __name__ == '__main__':
60
    test_embedding_1d(4)