test_embedding_tp.py 2.11 KB
Newer Older
1
import torch
ver217's avatar
ver217 committed
2
from colossalai.tensor import ColoTensor, distspec
3
from torch.nn import functional as F
4
5
6
7
8
9
from functools import partial

import colossalai
import pytest
import torch
import torch.multiprocessing as mp
10
from colossalai.testing import rerun_if_address_is_in_use
11
12
from colossalai.utils import free_port
from colossalai.core import global_context as gpc
13
from colossalai.tensor import TensorSpec, ComputePattern, ComputeSpec, DistSpecManager, ProcessGroup
14
from _utils import tensor_equal, tensor_shard_equal
15
16


17
18
def init_1d_row(weight, pg: ProcessGroup):
    spec = TensorSpec(distspec.shard(pg, [0], [pg.tp_world_size()]), ComputeSpec(ComputePattern.TP1D))
19
    with DistSpecManager.no_grad():
20
        weight.set_tensor_spec(spec)
21
22


23
24
def init_1d_col(weight, pg: ProcessGroup):
    spec = TensorSpec(distspec.shard(pg, [-1], [pg.tp_world_size()]), ComputeSpec(ComputePattern.TP1D))
25
    with DistSpecManager.no_grad():
26
        weight.set_tensor_spec(spec)
27
28


29
def run_with_spec(spec_init_func, pg: ProcessGroup):
30
    model = torch.nn.Embedding(12, 32).cuda()
ver217's avatar
ver217 committed
31
    weight = ColoTensor(torch.nn.Parameter(model.weight.detach()))
32
    spec_init_func(weight, pg)
33
34
35
    x = torch.tensor((0, 3, 6, 9)).cuda()
    out = model(x)
    colo_out = F.embedding(x, weight)
36
    assert tensor_equal(out, colo_out)
37
38
39
    grad = torch.rand_like(out)
    out.backward(grad)
    colo_out.backward(grad)
40
41
    # compare grad inside a TP group
    assert tensor_shard_equal(model.weight.grad, weight.grad, pg.tp_local_rank(), pg.tp_world_size())
42
43


44
def run_dist(rank, world_size, port):
45
46
47
48
49
    # config = dict(parallel=dict(tensor=dict(mode="1d", size=world_size),))
    colossalai.launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
    pg = ProcessGroup(tp_degree=world_size)
    run_with_spec(init_1d_row, pg)
    run_with_spec(init_1d_col, pg)
50

51
52

@pytest.mark.dist
53
@pytest.mark.parametrize('world_size', [1, 4])
54
55
56
57
58
59
60
@rerun_if_address_is_in_use()
def test_embedding_1d(world_size):
    run_func = partial(run_dist, world_size=world_size, port=free_port())
    mp.spawn(run_func, nprocs=world_size)


if __name__ == '__main__':
61
    test_embedding_1d(4)