test_parallel.py 1.24 KB
Newer Older
Kai Chen's avatar
Kai Chen committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
from unittest.mock import MagicMock, patch

import torch.nn as nn
from torch.nn.parallel import DataParallel, DistributedDataParallel

from mmcv.parallel import (MMDataParallel, MMDistributedDataParallel,
                           is_parallel_module)
from mmcv.parallel.distributed_deprecated import \
    MMDistributedDataParallel as DeprecatedMMDDP


@patch('torch.distributed._broadcast_coalesced', MagicMock)
@patch('torch.distributed.broadcast', MagicMock)
@patch('torch.nn.parallel.DistributedDataParallel._ddp_init_helper', MagicMock)
def test_is_parallel_module():

    class Model(nn.Module):

        def __init__(self):
            super().__init__()
            self.conv = nn.Conv2d(2, 2, 1)

        def forward(self, x):
            return self.conv(x)

    model = Model()
    assert not is_parallel_module(model)

    dp = DataParallel(model)
    assert is_parallel_module(dp)

    mmdp = MMDataParallel(model)
    assert is_parallel_module(mmdp)

    ddp = DistributedDataParallel(model, process_group=MagicMock())
    assert is_parallel_module(ddp)

    mmddp = MMDistributedDataParallel(model, process_group=MagicMock())
    assert is_parallel_module(mmddp)

    deprecated_mmddp = DeprecatedMMDDP(model)
    assert is_parallel_module(deprecated_mmddp)