test_plugins_huggingface_compatibility.py 2.9 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
import pytest
import torch
import torch.distributed as dist
from utils import shared_tempdir

import colossalai
from colossalai.booster import Booster
from colossalai.booster.plugin import GeminiPlugin, LowLevelZeroPlugin, TorchDDPPlugin
from colossalai.nn.optimizer import HybridAdam
from colossalai.testing import (
    check_state_dict_equal,
    clear_cache_before_run,
    parameterize,
    rerun_if_address_is_in_use,
    spawn,
)
from tests.kit.model_zoo import model_zoo


@clear_cache_before_run()
21
22
@parameterize("model_name", ["transformers_gpt"])
@parameterize("plugin_type", ["ddp", "zero", "gemini"])
23
def exam_from_pretrained(plugin_type: str, model_name: str, shard=True, size_per_shard=32):
24
25
26
    (model_fn, data_gen_fn, output_transform_fn, loss_fn, _) = next(
        iter(model_zoo.get_sub_registry(model_name).values())
    )
27
28
    criterion = loss_fn

29
    if plugin_type == "ddp":
30
        plugin = TorchDDPPlugin()
31
    elif plugin_type == "zero":
32
        plugin = LowLevelZeroPlugin(stage=2, max_norm=1.0, initial_scale=32)
33
    elif plugin_type == "gemini":
34
        plugin = GeminiPlugin(precision="fp16", initial_scale=32)
35
36
37
38
39
40
41
42
43
44
45
    else:
        raise ValueError(f"Plugin with type {plugin_type} is invalid, please check your argument.")

    booster = Booster(plugin=plugin)

    model = model_fn().cuda()
    model_huggingface_cls = model.__class__
    optimizer = HybridAdam(model.parameters(), lr=0.001)
    model, optimizer, criterion, _, _ = booster.boost(model, optimizer, criterion)

    data = data_gen_fn()
46
    data = {k: v.to("cuda") if torch.is_tensor(v) or "Tensor" in v.__class__.__name__ else v for k, v in data.items()}
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
    output = model(**data)
    loss = criterion(output)

    booster.backward(loss, optimizer)
    optimizer.step()

    with shared_tempdir() as tempdir:
        model_ckpt_path = f"{tempdir}/model"
        booster.save_model(model, model_ckpt_path, shard=shard, size_per_shard=size_per_shard)
        dist.barrier()

        new_model = model_huggingface_cls.from_pretrained(model_ckpt_path)
        new_model = new_model.cuda()
        new_optimizer = HybridAdam(new_model.parameters(), lr=0.001)
        new_model, new_optimizer, criterion, _, _ = booster.boost(new_model, new_optimizer, criterion)

63
64
65
66
        if plugin_type == "gemini":
            check_state_dict_equal(
                model.unwrap().state_dict(only_rank_0=False), new_model.unwrap().state_dict(only_rank_0=False), False
            )
67
68
69
70
71
72
73
        else:
            check_state_dict_equal(model.unwrap().state_dict(), new_model.unwrap().state_dict(), False)
        dist.barrier()


def run_dist(rank, world_size, port):
    config = {}
74
    colossalai.launch(config=config, rank=rank, world_size=world_size, host="localhost", port=port, backend="nccl")
75
76
77
78
    exam_from_pretrained()


@pytest.mark.dist
79
@pytest.mark.parametrize("world_size", [2])
80
81
82
@rerun_if_address_is_in_use()
def test_huggingface_compatibility(world_size):
    spawn(run_dist, world_size)