test_init_context.py 1.52 KB
Newer Older
Jiarui Fang's avatar
Jiarui Fang committed
1
2
3
4
5
6
7
8
9
10
11
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

from functools import partial

import colossalai
import pytest
import torch
import torch.multiprocessing as mp
from colossalai.zero.shard_utils.tensor_shard_strategy import TensorShardStrategy
from colossalai.zero.init_ctx import ZeroInitContext
12
from common import CONFIG
Jiarui Fang's avatar
Jiarui Fang committed
13
from colossalai.utils import free_port
14
from tests.components_to_test.registry import non_distributed_component_funcs
Jiarui Fang's avatar
Jiarui Fang committed
15
16
17
18
19


def run_dist(rank, world_size, port):
    colossalai.launch(config=CONFIG, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')

20
21
22
23
24
25
26
27
28
29
30
31
32
    for get_components_func in non_distributed_component_funcs:
        model_builder, _, _, _, _ = get_components_func()
        with ZeroInitContext(convert_fp16=True,
                             convert_cuda=True,
                             shard_strategy=TensorShardStrategy(),
                             shard_param=True):
            model = model_builder(checkpoint=True)

            for param in model.parameters():
                assert hasattr(param, 'ca_attr')
                assert param.ca_attr.data.dtype == torch.half
                assert param.ca_attr._data_sharded_tensor.is_sharded
                assert param.ca_attr.data.device.type == 'cuda'
Jiarui Fang's avatar
Jiarui Fang committed
33
34
35


@pytest.mark.dist
jiaruifang's avatar
jiaruifang committed
36
37
@pytest.mark.parametrize("world_size", [1, 2, 4])
def test_zero_init_context(world_size):
Jiarui Fang's avatar
Jiarui Fang committed
38
39
40
41
42
    run_func = partial(run_dist, world_size=world_size, port=free_port())
    mp.spawn(run_func, nprocs=world_size)


if __name__ == '__main__':
jiaruifang's avatar
jiaruifang committed
43
    test_zero_init_context(2)