test_hooks.py 12.8 KB
Newer Older
Kai Chen's avatar
Kai Chen committed
1
"""Tests the hooks with runners.
Wenwei Zhang's avatar
Wenwei Zhang committed
2
3
4
5
6

CommandLine:
    pytest tests/test_hooks.py
    xdoctest tests/test_hooks.py zero
"""
7
import logging
Jiangmiao Pang's avatar
Jiangmiao Pang committed
8
import os.path as osp
9
import shutil
Jiangmiao Pang's avatar
Jiangmiao Pang committed
10
import sys
11
import tempfile
Wenwei Zhang's avatar
Wenwei Zhang committed
12
from unittest.mock import MagicMock, call
Jiangmiao Pang's avatar
Jiangmiao Pang committed
13

14
15
16
import pytest
import torch
import torch.nn as nn
shilong's avatar
shilong committed
17
from torch.nn.init import constant_
18
19
from torch.utils.data import DataLoader

20
21
22
from mmcv.runner import (CheckpointHook, EMAHook, IterTimerHook,
                         MlflowLoggerHook, PaviLoggerHook, WandbLoggerHook,
                         build_runner)
Wang Xinjiang's avatar
Wang Xinjiang committed
23
from mmcv.runner.hooks.lr_updater import CosineRestartLrUpdaterHook
Jiangmiao Pang's avatar
Jiangmiao Pang committed
24
25


26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
def test_checkpoint_hook():
    """xdoctest -m tests/test_runner/test_hooks.py test_checkpoint_hook."""

    # test epoch based runner
    loader = DataLoader(torch.ones((5, 2)))
    runner = _build_demo_runner('EpochBasedRunner', max_epochs=1)
    runner.meta = dict()
    checkpointhook = CheckpointHook(interval=1, by_epoch=True)
    runner.register_hook(checkpointhook)
    runner.run([loader], [('train', 1)])
    assert runner.meta['hook_msgs']['last_ckpt'] == osp.join(
        runner.work_dir, 'epoch_1.pth')
    shutil.rmtree(runner.work_dir)

    # test iter based runner
    runner = _build_demo_runner(
        'IterBasedRunner', max_iters=1, max_epochs=None)
    runner.meta = dict()
    checkpointhook = CheckpointHook(interval=1, by_epoch=False)
    runner.register_hook(checkpointhook)
    runner.run([loader], [('train', 1)])
    assert runner.meta['hook_msgs']['last_ckpt'] == osp.join(
        runner.work_dir, 'iter_1.pth')
    shutil.rmtree(runner.work_dir)


shilong's avatar
shilong committed
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
def test_ema_hook():
    """xdoctest -m tests/test_hooks.py test_ema_hook."""

    class DemoModel(nn.Module):

        def __init__(self):
            super().__init__()
            self.conv = nn.Conv2d(
                in_channels=1,
                out_channels=2,
                kernel_size=1,
                padding=1,
                bias=True)
            self._init_weight()

        def _init_weight(self):
            constant_(self.conv.weight, 0)
            constant_(self.conv.bias, 0)

        def forward(self, x):
            return self.conv(x).sum()

        def train_step(self, x, optimizer, **kwargs):
            return dict(loss=self(x))

        def val_step(self, x, optimizer, **kwargs):
            return dict(loss=self(x))

    loader = DataLoader(torch.ones((1, 1, 1, 1)))
    runner = _build_demo_runner()
    demo_model = DemoModel()
    runner.model = demo_model
    emahook = EMAHook(momentum=0.1, interval=2, warm_up=100, resume_from=None)
    checkpointhook = CheckpointHook(interval=1, by_epoch=True)
    runner.register_hook(emahook, priority='HIGHEST')
    runner.register_hook(checkpointhook)
88
    runner.run([loader, loader], [('train', 1), ('val', 1)])
shilong's avatar
shilong committed
89
90
91
92
93
94
95
96
97
98
99
100
101
102
    checkpoint = torch.load(f'{runner.work_dir}/epoch_1.pth')
    contain_ema_buffer = False
    for name, value in checkpoint['state_dict'].items():
        if 'ema' in name:
            contain_ema_buffer = True
            assert value.sum() == 0
            value.fill_(1)
        else:
            assert value.sum() == 0
    assert contain_ema_buffer
    torch.save(checkpoint, f'{runner.work_dir}/epoch_1.pth')
    work_dir = runner.work_dir
    resume_ema_hook = EMAHook(
        momentum=0.5, warm_up=0, resume_from=f'{work_dir}/epoch_1.pth')
103
    runner = _build_demo_runner(max_epochs=2)
shilong's avatar
shilong committed
104
105
106
107
    runner.model = demo_model
    runner.register_hook(resume_ema_hook, priority='HIGHEST')
    checkpointhook = CheckpointHook(interval=1, by_epoch=True)
    runner.register_hook(checkpointhook)
108
    runner.run([loader, loader], [('train', 1), ('val', 1)])
shilong's avatar
shilong committed
109
110
111
112
113
114
115
116
117
118
119
120
121
    checkpoint = torch.load(f'{runner.work_dir}/epoch_2.pth')
    contain_ema_buffer = False
    for name, value in checkpoint['state_dict'].items():
        if 'ema' in name:
            contain_ema_buffer = True
            assert value.sum() == 2
        else:
            assert value.sum() == 1
    assert contain_ema_buffer
    shutil.rmtree(runner.work_dir)
    shutil.rmtree(work_dir)


Jiangmiao Pang's avatar
Jiangmiao Pang committed
122
123
124
def test_pavi_hook():
    sys.modules['pavi'] = MagicMock()

Wenwei Zhang's avatar
Wenwei Zhang committed
125
126
    loader = DataLoader(torch.ones((5, 2)))
    runner = _build_demo_runner()
127
    runner.meta = dict(config_dict=dict(lr=0.02, gpu_ids=range(1)))
128
    hook = PaviLoggerHook(add_graph=False, add_last_ckpt=True)
Jiangmiao Pang's avatar
Jiangmiao Pang committed
129
    runner.register_hook(hook)
130
    runner.run([loader, loader], [('train', 1), ('val', 1)])
131
    shutil.rmtree(runner.work_dir)
Jiangmiao Pang's avatar
Jiangmiao Pang committed
132
133

    assert hasattr(hook, 'writer')
Wenwei Zhang's avatar
Wenwei Zhang committed
134
135
136
    hook.writer.add_scalars.assert_called_with('val', {
        'learning_rate': 0.02,
        'momentum': 0.95
137
    }, 1)
Jiangmiao Pang's avatar
Jiangmiao Pang committed
138
    hook.writer.add_snapshot_file.assert_called_with(
139
        tag=runner.work_dir.split('/')[-1],
140
141
        snapshot_file_path=osp.join(runner.work_dir, 'epoch_1.pth'),
        iteration=1)
142
143


Wang Xinjiang's avatar
Wang Xinjiang committed
144
145
146
147
def test_sync_buffers_hook():
    loader = DataLoader(torch.ones((5, 2)))
    runner = _build_demo_runner()
    runner.register_hook_from_cfg(dict(type='SyncBuffersHook'))
148
    runner.run([loader, loader], [('train', 1), ('val', 1)])
Wang Xinjiang's avatar
Wang Xinjiang committed
149
150
151
    shutil.rmtree(runner.work_dir)


Wenwei Zhang's avatar
Wenwei Zhang committed
152
def test_momentum_runner_hook():
Kai Chen's avatar
Kai Chen committed
153
    """xdoctest -m tests/test_hooks.py test_momentum_runner_hook."""
Wenwei Zhang's avatar
Wenwei Zhang committed
154
155
156
157
158
    sys.modules['pavi'] = MagicMock()
    loader = DataLoader(torch.ones((10, 2)))
    runner = _build_demo_runner()

    # add momentum scheduler
Wang Xinjiang's avatar
Wang Xinjiang committed
159
160
    hook_cfg = dict(
        type='CyclicMomentumUpdaterHook',
Wenwei Zhang's avatar
Wenwei Zhang committed
161
162
163
164
        by_epoch=False,
        target_ratio=(0.85 / 0.95, 1),
        cyclic_times=1,
        step_ratio_up=0.4)
Wang Xinjiang's avatar
Wang Xinjiang committed
165
    runner.register_hook_from_cfg(hook_cfg)
Wenwei Zhang's avatar
Wenwei Zhang committed
166
167

    # add momentum LR scheduler
Wang Xinjiang's avatar
Wang Xinjiang committed
168
169
    hook_cfg = dict(
        type='CyclicLrUpdaterHook',
Wenwei Zhang's avatar
Wenwei Zhang committed
170
171
172
173
        by_epoch=False,
        target_ratio=(10, 1),
        cyclic_times=1,
        step_ratio_up=0.4)
Wang Xinjiang's avatar
Wang Xinjiang committed
174
175
    runner.register_hook_from_cfg(hook_cfg)
    runner.register_hook_from_cfg(dict(type='IterTimerHook'))
Wenwei Zhang's avatar
Wenwei Zhang committed
176
177

    # add pavi hook
178
    hook = PaviLoggerHook(interval=1, add_graph=False, add_last_ckpt=True)
Wenwei Zhang's avatar
Wenwei Zhang committed
179
    runner.register_hook(hook)
180
    runner.run([loader], [('train', 1)])
181
    shutil.rmtree(runner.work_dir)
Wenwei Zhang's avatar
Wenwei Zhang committed
182
183
184
185
186
187
188

    # TODO: use a more elegant way to check values
    assert hasattr(hook, 'writer')
    calls = [
        call('train', {
            'learning_rate': 0.01999999999999999,
            'momentum': 0.95
189
        }, 1),
Wenwei Zhang's avatar
Wenwei Zhang committed
190
191
192
        call('train', {
            'learning_rate': 0.2,
            'momentum': 0.85
193
        }, 5),
Wenwei Zhang's avatar
Wenwei Zhang committed
194
195
196
        call('train', {
            'learning_rate': 0.155,
            'momentum': 0.875
197
        }, 7),
Wenwei Zhang's avatar
Wenwei Zhang committed
198
199
200
201
202
    ]
    hook.writer.add_scalars.assert_has_calls(calls, any_order=True)


def test_cosine_runner_hook():
Kai Chen's avatar
Kai Chen committed
203
    """xdoctest -m tests/test_hooks.py test_cosine_runner_hook."""
Wenwei Zhang's avatar
Wenwei Zhang committed
204
205
206
207
208
    sys.modules['pavi'] = MagicMock()
    loader = DataLoader(torch.ones((10, 2)))
    runner = _build_demo_runner()

    # add momentum scheduler
Wang Xinjiang's avatar
Wang Xinjiang committed
209
210
211

    hook_cfg = dict(
        type='CosineAnnealingMomentumUpdaterHook',
212
213
214
215
        min_momentum_ratio=0.99 / 0.95,
        by_epoch=False,
        warmup_iters=2,
        warmup_ratio=0.9 / 0.95)
Wang Xinjiang's avatar
Wang Xinjiang committed
216
    runner.register_hook_from_cfg(hook_cfg)
Wenwei Zhang's avatar
Wenwei Zhang committed
217
218

    # add momentum LR scheduler
Wang Xinjiang's avatar
Wang Xinjiang committed
219
220
221
222
223
224
225
226
    hook_cfg = dict(
        type='CosineAnnealingLrUpdaterHook',
        by_epoch=False,
        min_lr_ratio=0,
        warmup_iters=2,
        warmup_ratio=0.9)
    runner.register_hook_from_cfg(hook_cfg)
    runner.register_hook_from_cfg(dict(type='IterTimerHook'))
227
    runner.register_hook(IterTimerHook())
Wenwei Zhang's avatar
Wenwei Zhang committed
228
    # add pavi hook
229
    hook = PaviLoggerHook(interval=1, add_graph=False, add_last_ckpt=True)
Wenwei Zhang's avatar
Wenwei Zhang committed
230
    runner.register_hook(hook)
231
    runner.run([loader], [('train', 1)])
232
    shutil.rmtree(runner.work_dir)
Wenwei Zhang's avatar
Wenwei Zhang committed
233
234
235
236
237
238
239

    # TODO: use a more elegant way to check values
    assert hasattr(hook, 'writer')
    calls = [
        call('train', {
            'learning_rate': 0.02,
            'momentum': 0.95
240
        }, 1),
Wenwei Zhang's avatar
Wenwei Zhang committed
241
242
243
        call('train', {
            'learning_rate': 0.01,
            'momentum': 0.97
244
        }, 6),
Wenwei Zhang's avatar
Wenwei Zhang committed
245
246
247
        call('train', {
            'learning_rate': 0.0004894348370484647,
            'momentum': 0.9890211303259032
248
        }, 10)
Wenwei Zhang's avatar
Wenwei Zhang committed
249
250
251
252
    ]
    hook.writer.add_scalars.assert_has_calls(calls, any_order=True)


Harry's avatar
Harry committed
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
def test_cosine_restart_lr_update_hook():
    """Test CosineRestartLrUpdaterHook."""
    with pytest.raises(AssertionError):
        # either `min_lr` or `min_lr_ratio` should be specified
        CosineRestartLrUpdaterHook(
            by_epoch=False,
            periods=[2, 10],
            restart_weights=[0.5, 0.5],
            min_lr=0.1,
            min_lr_ratio=0)

    with pytest.raises(AssertionError):
        # periods and restart_weights should have the same length
        CosineRestartLrUpdaterHook(
            by_epoch=False,
            periods=[2, 10],
            restart_weights=[0.5],
            min_lr_ratio=0)

    with pytest.raises(ValueError):
        # the last cumulative_periods 7 (out of [5, 7]) should >= 10
        sys.modules['pavi'] = MagicMock()
        loader = DataLoader(torch.ones((10, 2)))
        runner = _build_demo_runner()

        # add cosine restart LR scheduler
        hook = CosineRestartLrUpdaterHook(
            by_epoch=False,
            periods=[5, 2],  # cumulative_periods [5, 7 (5 + 2)]
            restart_weights=[0.5, 0.5],
            min_lr=0.0001)
        runner.register_hook(hook)
        runner.register_hook(IterTimerHook())

        # add pavi hook
        hook = PaviLoggerHook(interval=1, add_graph=False, add_last_ckpt=True)
        runner.register_hook(hook)
290
        runner.run([loader], [('train', 1)])
Harry's avatar
Harry committed
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
        shutil.rmtree(runner.work_dir)

    sys.modules['pavi'] = MagicMock()
    loader = DataLoader(torch.ones((10, 2)))
    runner = _build_demo_runner()

    # add cosine restart LR scheduler
    hook = CosineRestartLrUpdaterHook(
        by_epoch=False,
        periods=[5, 5],
        restart_weights=[0.5, 0.5],
        min_lr_ratio=0)
    runner.register_hook(hook)
    runner.register_hook(IterTimerHook())

    # add pavi hook
    hook = PaviLoggerHook(interval=1, add_graph=False, add_last_ckpt=True)
    runner.register_hook(hook)
309
    runner.run([loader], [('train', 1)])
Harry's avatar
Harry committed
310
311
312
313
314
315
316
317
    shutil.rmtree(runner.work_dir)

    # TODO: use a more elegant way to check values
    assert hasattr(hook, 'writer')
    calls = [
        call('train', {
            'learning_rate': 0.01,
            'momentum': 0.95
318
        }, 1),
Harry's avatar
Harry committed
319
        call('train', {
Kuro Latency's avatar
Kuro Latency committed
320
            'learning_rate': 0.01,
Harry's avatar
Harry committed
321
            'momentum': 0.95
322
        }, 6),
Harry's avatar
Harry committed
323
324
325
        call('train', {
            'learning_rate': 0.0009549150281252633,
            'momentum': 0.95
326
        }, 10)
Harry's avatar
Harry committed
327
328
329
330
    ]
    hook.writer.add_scalars.assert_has_calls(calls, any_order=True)


331
332
333
334
335
@pytest.mark.parametrize('log_model', (True, False))
def test_mlflow_hook(log_model):
    sys.modules['mlflow'] = MagicMock()
    sys.modules['mlflow.pytorch'] = MagicMock()

Wenwei Zhang's avatar
Wenwei Zhang committed
336
337
    runner = _build_demo_runner()
    loader = DataLoader(torch.ones((5, 2)))
338

339
    hook = MlflowLoggerHook(exp_name='test', log_model=log_model)
340
    runner.register_hook(hook)
341
    runner.run([loader, loader], [('train', 1), ('val', 1)])
342
    shutil.rmtree(runner.work_dir)
343
344

    hook.mlflow.set_experiment.assert_called_with('test')
Wenwei Zhang's avatar
Wenwei Zhang committed
345
346
347
348
    hook.mlflow.log_metrics.assert_called_with(
        {
            'learning_rate': 0.02,
            'momentum': 0.95
349
        }, step=1)
350
351
352
353
354
355
356
357
358
    if log_model:
        hook.mlflow_pytorch.log_model.assert_called_with(
            runner.model, 'models')
    else:
        assert not hook.mlflow_pytorch.log_model.called


def test_wandb_hook():
    sys.modules['wandb'] = MagicMock()
Wenwei Zhang's avatar
Wenwei Zhang committed
359
    runner = _build_demo_runner()
360
    hook = WandbLoggerHook()
Wenwei Zhang's avatar
Wenwei Zhang committed
361
    loader = DataLoader(torch.ones((5, 2)))
362
363

    runner.register_hook(hook)
364
    runner.run([loader, loader], [('train', 1), ('val', 1)])
365
366
    shutil.rmtree(runner.work_dir)

367
    hook.wandb.init.assert_called_with()
Wenwei Zhang's avatar
Wenwei Zhang committed
368
369
370
371
    hook.wandb.log.assert_called_with({
        'learning_rate': 0.02,
        'momentum': 0.95
    },
372
                                      step=1)
373
    hook.wandb.join.assert_called_with()
Wenwei Zhang's avatar
Wenwei Zhang committed
374
375


376
377
378
def _build_demo_runner(runner_type='EpochBasedRunner',
                       max_epochs=1,
                       max_iters=None):
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396

    class Model(nn.Module):

        def __init__(self):
            super().__init__()
            self.linear = nn.Linear(2, 1)

        def forward(self, x):
            return self.linear(x)

        def train_step(self, x, optimizer, **kwargs):
            return dict(loss=self(x))

        def val_step(self, x, optimizer, **kwargs):
            return dict(loss=self(x))

    model = Model()

Wenwei Zhang's avatar
Wenwei Zhang committed
397
398
399
400
401
402
403
    optimizer = torch.optim.SGD(model.parameters(), lr=0.02, momentum=0.95)

    log_config = dict(
        interval=1, hooks=[
            dict(type='TextLoggerHook'),
        ])

404
    tmp_dir = tempfile.mkdtemp()
405
406
407
408
409
410
411
412
413
    runner = build_runner(
        dict(type=runner_type),
        default_args=dict(
            model=model,
            work_dir=tmp_dir,
            optimizer=optimizer,
            logger=logging.getLogger(),
            max_epochs=max_epochs,
            max_iters=max_iters))
414
    runner.register_checkpoint_hook(dict(interval=1))
Wenwei Zhang's avatar
Wenwei Zhang committed
415
416
    runner.register_logger_hooks(log_config)
    return runner