test_context.py 1.03 KB
Newer Older
ver217's avatar
ver217 committed
1
import pytest
2
3
4
5
6
7
8
from colossalai.utils import ColoInitContext

from numpy import allclose, require
import torch
from colossalai.tensor import ColoTensor
from copy import deepcopy

9
from colossalai.utils.cuda import get_current_device
10

11

ver217's avatar
ver217 committed
12
13
@pytest.mark.skip
# FIXME(ver217): support lazy init
14
def test_lazy_init():
15
16
17
18
19
20
21
22
23
24
25
26
27
    in_dim = 4
    out_dim = 5

    with ColoInitContext(lazy_memory_allocate=True) as ctx:
        fc = torch.nn.Linear(in_dim, out_dim, bias=True)

    # lazy_memory_allocate=True, no payload is maintained
    assert fc.weight._torch_tensor.numel() == 0

    fc.weight.torch_tensor()
    assert fc.weight._torch_tensor.numel() == in_dim * out_dim


ver217's avatar
ver217 committed
28
@pytest.mark.skip
29
30
31
32
33
34
35
36
37
38
39
40
def test_device():
    in_dim = 4
    out_dim = 5

    with ColoInitContext(lazy_memory_allocate=True, device=get_current_device()) as ctx:
        fc = torch.nn.Linear(in_dim, out_dim, bias=True)

    # eval an lazy parameter
    fc.weight.torch_tensor()
    assert fc.weight.device == get_current_device()


41
if __name__ == '__main__':
42
43
    test_lazy_init()
    test_device()