test_hooks.py 6.25 KB
Newer Older
Wenwei Zhang's avatar
Wenwei Zhang committed
1
2
3
4
5
6
7
8
"""
Tests the hooks with runners.

CommandLine:
    pytest tests/test_hooks.py
    xdoctest tests/test_hooks.py zero

"""
9
import logging
Jiangmiao Pang's avatar
Jiangmiao Pang committed
10
import os.path as osp
11
import shutil
Jiangmiao Pang's avatar
Jiangmiao Pang committed
12
import sys
13
import tempfile
Wenwei Zhang's avatar
Wenwei Zhang committed
14
from unittest.mock import MagicMock, call
Jiangmiao Pang's avatar
Jiangmiao Pang committed
15

16
17
18
19
20
import pytest
import torch
import torch.nn as nn
from torch.utils.data import DataLoader

21
22
23
24
25
26
from mmcv.runner import (EpochBasedRunner, IterTimerHook, MlflowLoggerHook,
                         PaviLoggerHook, WandbLoggerHook)
from mmcv.runner.hooks.lr_updater import (CosineAnealingLrUpdaterHook,
                                          CyclicLrUpdaterHook)
from mmcv.runner.hooks.momentum_updater import (
    CosineAnealingMomentumUpdaterHook, CyclicMomentumUpdaterHook)
Jiangmiao Pang's avatar
Jiangmiao Pang committed
27
28
29
30
31


def test_pavi_hook():
    sys.modules['pavi'] = MagicMock()

Wenwei Zhang's avatar
Wenwei Zhang committed
32
33
    loader = DataLoader(torch.ones((5, 2)))
    runner = _build_demo_runner()
34
    hook = PaviLoggerHook(add_graph=False, add_last_ckpt=True)
Jiangmiao Pang's avatar
Jiangmiao Pang committed
35
36
    runner.register_hook(hook)
    runner.run([loader, loader], [('train', 1), ('val', 1)], 1)
37
    shutil.rmtree(runner.work_dir)
Jiangmiao Pang's avatar
Jiangmiao Pang committed
38
39

    assert hasattr(hook, 'writer')
Wenwei Zhang's avatar
Wenwei Zhang committed
40
41
42
43
    hook.writer.add_scalars.assert_called_with('val', {
        'learning_rate': 0.02,
        'momentum': 0.95
    }, 5)
Jiangmiao Pang's avatar
Jiangmiao Pang committed
44
    hook.writer.add_snapshot_file.assert_called_with(
45
        tag=runner.work_dir.split('/')[-1],
Wenwei Zhang's avatar
Wenwei Zhang committed
46
        snapshot_file_path=osp.join(runner.work_dir, 'latest.pth'),
Jiangmiao Pang's avatar
Jiangmiao Pang committed
47
        iteration=5)
48
49


Wenwei Zhang's avatar
Wenwei Zhang committed
50
51
52
53
54
55
56
57
58
def test_momentum_runner_hook():
    """
    xdoctest -m tests/test_hooks.py test_momentum_runner_hook
    """
    sys.modules['pavi'] = MagicMock()
    loader = DataLoader(torch.ones((10, 2)))
    runner = _build_demo_runner()

    # add momentum scheduler
59
    hook = CyclicMomentumUpdaterHook(
Wenwei Zhang's avatar
Wenwei Zhang committed
60
61
62
63
64
65
66
        by_epoch=False,
        target_ratio=(0.85 / 0.95, 1),
        cyclic_times=1,
        step_ratio_up=0.4)
    runner.register_hook(hook)

    # add momentum LR scheduler
67
    hook = CyclicLrUpdaterHook(
Wenwei Zhang's avatar
Wenwei Zhang committed
68
69
70
71
72
        by_epoch=False,
        target_ratio=(10, 1),
        cyclic_times=1,
        step_ratio_up=0.4)
    runner.register_hook(hook)
73
    runner.register_hook(IterTimerHook())
Wenwei Zhang's avatar
Wenwei Zhang committed
74
75

    # add pavi hook
76
    hook = PaviLoggerHook(interval=1, add_graph=False, add_last_ckpt=True)
Wenwei Zhang's avatar
Wenwei Zhang committed
77
78
    runner.register_hook(hook)
    runner.run([loader], [('train', 1)], 1)
79
    shutil.rmtree(runner.work_dir)
Wenwei Zhang's avatar
Wenwei Zhang committed
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108

    # TODO: use a more elegant way to check values
    assert hasattr(hook, 'writer')
    calls = [
        call('train', {
            'learning_rate': 0.01999999999999999,
            'momentum': 0.95
        }, 0),
        call('train', {
            'learning_rate': 0.2,
            'momentum': 0.85
        }, 4),
        call('train', {
            'learning_rate': 0.155,
            'momentum': 0.875
        }, 6),
    ]
    hook.writer.add_scalars.assert_has_calls(calls, any_order=True)


def test_cosine_runner_hook():
    """
    xdoctest -m tests/test_hooks.py test_cosine_runner_hook
    """
    sys.modules['pavi'] = MagicMock()
    loader = DataLoader(torch.ones((10, 2)))
    runner = _build_demo_runner()

    # add momentum scheduler
109
110
111
112
113
    hook = CosineAnealingMomentumUpdaterHook(
        min_momentum_ratio=0.99 / 0.95,
        by_epoch=False,
        warmup_iters=2,
        warmup_ratio=0.9 / 0.95)
Wenwei Zhang's avatar
Wenwei Zhang committed
114
115
116
    runner.register_hook(hook)

    # add momentum LR scheduler
117
    hook = CosineAnealingLrUpdaterHook(
Wenwei Zhang's avatar
Wenwei Zhang committed
118
119
        by_epoch=False, min_lr_ratio=0, warmup_iters=2, warmup_ratio=0.9)
    runner.register_hook(hook)
120
    runner.register_hook(IterTimerHook())
Wenwei Zhang's avatar
Wenwei Zhang committed
121
122

    # add pavi hook
123
    hook = PaviLoggerHook(interval=1, add_graph=False, add_last_ckpt=True)
Wenwei Zhang's avatar
Wenwei Zhang committed
124
125
    runner.register_hook(hook)
    runner.run([loader], [('train', 1)], 1)
126
    shutil.rmtree(runner.work_dir)
Wenwei Zhang's avatar
Wenwei Zhang committed
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146

    # TODO: use a more elegant way to check values
    assert hasattr(hook, 'writer')
    calls = [
        call('train', {
            'learning_rate': 0.02,
            'momentum': 0.95
        }, 0),
        call('train', {
            'learning_rate': 0.01,
            'momentum': 0.97
        }, 5),
        call('train', {
            'learning_rate': 0.0004894348370484647,
            'momentum': 0.9890211303259032
        }, 9)
    ]
    hook.writer.add_scalars.assert_has_calls(calls, any_order=True)


147
148
149
150
151
@pytest.mark.parametrize('log_model', (True, False))
def test_mlflow_hook(log_model):
    sys.modules['mlflow'] = MagicMock()
    sys.modules['mlflow.pytorch'] = MagicMock()

Wenwei Zhang's avatar
Wenwei Zhang committed
152
153
    runner = _build_demo_runner()
    loader = DataLoader(torch.ones((5, 2)))
154

155
    hook = MlflowLoggerHook(exp_name='test', log_model=log_model)
156
157
    runner.register_hook(hook)
    runner.run([loader, loader], [('train', 1), ('val', 1)], 1)
158
    shutil.rmtree(runner.work_dir)
159
160

    hook.mlflow.set_experiment.assert_called_with('test')
Wenwei Zhang's avatar
Wenwei Zhang committed
161
162
163
164
165
    hook.mlflow.log_metrics.assert_called_with(
        {
            'learning_rate': 0.02,
            'momentum': 0.95
        }, step=5)
166
167
168
169
170
171
172
173
174
    if log_model:
        hook.mlflow_pytorch.log_model.assert_called_with(
            runner.model, 'models')
    else:
        assert not hook.mlflow_pytorch.log_model.called


def test_wandb_hook():
    sys.modules['wandb'] = MagicMock()
Wenwei Zhang's avatar
Wenwei Zhang committed
175
    runner = _build_demo_runner()
176
    hook = WandbLoggerHook()
Wenwei Zhang's avatar
Wenwei Zhang committed
177
    loader = DataLoader(torch.ones((5, 2)))
178
179
180

    runner.register_hook(hook)
    runner.run([loader, loader], [('train', 1), ('val', 1)], 1)
181
182
    shutil.rmtree(runner.work_dir)

183
    hook.wandb.init.assert_called_with()
Wenwei Zhang's avatar
Wenwei Zhang committed
184
185
186
187
188
    hook.wandb.log.assert_called_with({
        'learning_rate': 0.02,
        'momentum': 0.95
    },
                                      step=5)
189
    hook.wandb.join.assert_called_with()
Wenwei Zhang's avatar
Wenwei Zhang committed
190
191
192


def _build_demo_runner():
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210

    class Model(nn.Module):

        def __init__(self):
            super().__init__()
            self.linear = nn.Linear(2, 1)

        def forward(self, x):
            return self.linear(x)

        def train_step(self, x, optimizer, **kwargs):
            return dict(loss=self(x))

        def val_step(self, x, optimizer, **kwargs):
            return dict(loss=self(x))

    model = Model()

Wenwei Zhang's avatar
Wenwei Zhang committed
211
212
213
214
215
216
217
    optimizer = torch.optim.SGD(model.parameters(), lr=0.02, momentum=0.95)

    log_config = dict(
        interval=1, hooks=[
            dict(type='TextLoggerHook'),
        ])

218
    tmp_dir = tempfile.mkdtemp()
219
    runner = EpochBasedRunner(
Wenwei Zhang's avatar
Wenwei Zhang committed
220
        model=model,
221
222
223
        work_dir=tmp_dir,
        optimizer=optimizer,
        logger=logging.getLogger())
Wenwei Zhang's avatar
Wenwei Zhang committed
224
225
226

    runner.register_logger_hooks(log_config)
    return runner