"tests/test_zero/test_legacy/common.py" did not exist on "f552b112946764296e4c11015fd7ad667c648661"
test_trainer_with_pipe_schedule.py 3.32 KB
Newer Older
Frank Lee's avatar
Frank Lee committed
1
import os
アマデウス's avatar
アマデウス committed
2
3
4
5
from functools import partial
from pathlib import Path

import colossalai
Frank Lee's avatar
Frank Lee committed
6
import pytest
Frank Lee's avatar
Frank Lee committed
7
import torch
Frank Lee's avatar
Frank Lee committed
8
import torch.multiprocessing as mp
アマデウス's avatar
アマデウス committed
9
import torch.nn as nn
Frank Lee's avatar
Frank Lee committed
10
from colossalai.context.parallel_mode import ParallelMode
Frank Lee's avatar
Frank Lee committed
11
from colossalai.core import global_context as gpc
アマデウス's avatar
アマデウス committed
12
from colossalai.engine.schedule import PipelineSchedule
Frank Lee's avatar
Frank Lee committed
13
14
from colossalai.logging import get_dist_logger
from colossalai.trainer import Trainer
15
from colossalai.utils import MultiTimer, free_port, get_dataloader
アマデウス's avatar
アマデウス committed
16
17
from torch.optim import Adam
from torchvision import transforms
Frank Lee's avatar
Frank Lee committed
18
from torchvision.datasets import CIFAR10
アマデウス's avatar
アマデウス committed
19
from torchvision.models import resnet18
Frank Lee's avatar
Frank Lee committed
20

21
BATCH_SIZE = 4
Frank Lee's avatar
Frank Lee committed
22
23
24
IMG_SIZE = 32
NUM_EPOCHS = 200

25
CONFIG = dict(parallel=dict(pipeline=2),)
Frank Lee's avatar
Frank Lee committed
26
27


28
29
def run_trainer_with_pipeline(rank, world_size, port):
    colossalai.launch(config=CONFIG, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
Frank Lee's avatar
Frank Lee committed
30
31
32
33
34

    # build model
    model = resnet18(num_classes=10)

    if gpc.get_local_rank(ParallelMode.PIPELINE) == 0:
アマデウス's avatar
アマデウス committed
35
        model = nn.Sequential(model.conv1, model.bn1, model.relu, model.maxpool, model.layer1, model.layer2)
Frank Lee's avatar
Frank Lee committed
36
37
38
    elif gpc.get_local_rank(ParallelMode.PIPELINE) == 1:

        class Flatten(nn.Module):
39

Frank Lee's avatar
Frank Lee committed
40
41
42
            def forward(self, x):
                return torch.flatten(x, 1)

アマデウス's avatar
アマデウス committed
43
        model = nn.Sequential(model.layer3, model.layer4, model.avgpool, Flatten(), model.fc)
Frank Lee's avatar
Frank Lee committed
44
45

    # build dataloaders
アマデウス's avatar
アマデウス committed
46
47
48
49
50
51
52
53
    train_dataset = CIFAR10(root=Path(os.environ['DATA']),
                            download=True,
                            transform=transforms.Compose([
                                transforms.Resize(size=(IMG_SIZE, IMG_SIZE)),
                                transforms.ToTensor(),
                                transforms.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5))
                            ]))

Frank Lee's avatar
Frank Lee committed
54
55
56
57
58
59
60
61
62
63
    train_dataloader = get_dataloader(dataset=train_dataset,
                                      shuffle=True,
                                      batch_size=BATCH_SIZE,
                                      pin_memory=True,
                                      drop_last=True)

    # build optimizer
    optimizer = Adam(model.parameters(), lr=0.001)
    criterion = nn.CrossEntropyLoss()

アマデウス's avatar
アマデウス committed
64
65
66
67
    engine, train_dataloader, *args = colossalai.initialize(model=model,
                                                            optimizer=optimizer,
                                                            criterion=criterion,
                                                            train_dataloader=train_dataloader)
Frank Lee's avatar
Frank Lee committed
68
69
70

    logger = get_dist_logger()
    logger.info("engine is built", ranks=[0])
71
    pipe_schedule = PipelineSchedule(num_microbatches=2)
アマデウス's avatar
アマデウス committed
72
73
    timer = MultiTimer()
    trainer = Trainer(engine=engine, schedule=pipe_schedule, logger=logger, timer=timer)
Frank Lee's avatar
Frank Lee committed
74
75
76
77
    logger.info("trainer is built", ranks=[0])

    logger.info("start training", ranks=[0])

アマデウス's avatar
アマデウス committed
78
79
    trainer.fit(train_dataloader=train_dataloader,
                epochs=NUM_EPOCHS,
80
                max_steps=3,
アマデウス's avatar
アマデウス committed
81
82
                display_progress=True,
                test_interval=5)
Frank Lee's avatar
Frank Lee committed
83
84
85
86
87
88
89
    gpc.destroy()
    torch.cuda.empty_cache()


@pytest.mark.dist
def test_trainer_with_pipeline():
    world_size = 4
90
    run_func = partial(run_trainer_with_pipeline, world_size=world_size, port=free_port())
Frank Lee's avatar
Frank Lee committed
91
    mp.spawn(run_func, nprocs=world_size)
Frank Lee's avatar
Frank Lee committed
92
93
94


if __name__ == '__main__':
Frank Lee's avatar
Frank Lee committed
95
    test_trainer_with_pipeline()