_log_hook.py 12.5 KB
Newer Older
zbian's avatar
zbian committed
1
2
3
4
5
6
7
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

import os
import os.path as osp

import torch
Frank Lee's avatar
Frank Lee committed
8
9
from typing import List
from decimal import Decimal
zbian's avatar
zbian committed
10
11
12
from colossalai.context import ParallelMode
from colossalai.core import global_context as gpc
from colossalai.registry import HOOKS
Frank Lee's avatar
Frank Lee committed
13
14
15
from colossalai.logging import DistributedLogger
from colossalai.utils import report_memory_usage, is_dp_rank_0, \
    is_tp_rank_0, is_no_pp_or_last_stage, MultiTimer
16
from ._base_hook import BaseHook
zbian's avatar
zbian committed
17
18


アマデウス's avatar
アマデウス committed
19
def _format_number(val, prec=5):
zbian's avatar
zbian committed
20
    if isinstance(val, float):
アマデウス's avatar
アマデウス committed
21
        return f'{val:.{prec}g}'
Frank Lee's avatar
Frank Lee committed
22
    elif torch.is_tensor(val) and torch.is_floating_point(val):
アマデウス's avatar
アマデウス committed
23
        return f'{val.item():.{prec}g}'
zbian's avatar
zbian committed
24
25
26
    return val


Frank Lee's avatar
Frank Lee committed
27
class LogByEpochHook(BaseHook):
28
29
30
31
32
33
34
35
36
    """hook to log by epoch

    :param logger: logger for the log
    :param interval: Recording interval, defaults to 1
    :type interval: int, optional
    :param priority: Priority in the printing, hooks with small priority will be printed in front, defaults to 1
    :type priority: int, optional
    :param trainer: Trainer attached with current hook
    """
Frank Lee's avatar
Frank Lee committed
37
38
39
40
41
42
    def __init__(self,
                 logger,
                 interval: int = 1,
                 priority: int = 1):
        super().__init__(priority)
        self.logger = logger
zbian's avatar
zbian committed
43
44
        self._interval = interval

Frank Lee's avatar
Frank Lee committed
45
46
    def _is_epoch_to_log(self, trainer):
        return trainer.cur_epoch % self._interval == 0
zbian's avatar
zbian committed
47
48


アマデウス's avatar
アマデウス committed
49
50
@HOOKS.register_module
class LogMetricByStepHook(BaseHook):
51
52
53
54
55
56
    """hook to log metric by step

    :param priority: Priority in the printing, hooks with small priority will be printed in front, defaults to 10
    :type priority: int, optional
    :param trainer: Trainer attached with current hook
    """
アマデウス's avatar
アマデウス committed
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
    def __init__(self, priority: int = 10):
        super().__init__(priority)

    def after_train_iter(self, trainer, *args):
        trainer.states['step_metrics'] = dict()
        for metric_name, metric_calculator in trainer.states['metrics']['train'].items():
            trainer.states['step_metrics'][metric_name.lower()] = \
                f'{_format_number(metric_calculator.get_last_step_value())}'
    
    def after_test_iter(self, trainer, *args):
        trainer.states['step_metrics'] = dict()
        for metric_name, metric_calculator in trainer.states['metrics']['test'].items():
            trainer.states['step_metrics'][metric_name.lower()] = \
                f'{_format_number(metric_calculator.get_last_step_value())}'


zbian's avatar
zbian committed
73
@HOOKS.register_module
Frank Lee's avatar
Frank Lee committed
74
class LogMetricByEpochHook(LogByEpochHook):
zbian's avatar
zbian committed
75
76
    """Specialized Hook to record the metric to log.

77
78
    :param logger: logger for the log
    :param interval: Recording interval, defaults to 1
zbian's avatar
zbian committed
79
    :type interval: int, optional
80
    :param priority: Priority in the printing, hooks with small priority will be printed in front, defaults to 10
zbian's avatar
zbian committed
81
    :type priority: int, optional
82
83
    :param trainer: Trainer attached with current hook
    :param mode: Mode of metrics, 'train' and 'test'
zbian's avatar
zbian committed
84
85
    """

Frank Lee's avatar
Frank Lee committed
86
87
88
89
90
    def __init__(self,
                 logger,
                 interval: int = 1,
                 priority: int = 10) -> None:
        super().__init__(logger, interval, priority)
zbian's avatar
zbian committed
91
92
        self._is_rank_to_log = is_dp_rank_0() and is_tp_rank_0() and is_no_pp_or_last_stage()

Frank Lee's avatar
Frank Lee committed
93
    def _get_str(self, trainer, mode):
zbian's avatar
zbian committed
94
        msg = []
Frank Lee's avatar
Frank Lee committed
95
        for metric_name, metric_calculator in trainer.states['metrics'][mode].items():
zbian's avatar
zbian committed
96
97
            msg.append(
                f'{metric_name} = {_format_number(metric_calculator.get_accumulated_value())}')
アマデウス's avatar
アマデウス committed
98
        msg = ' | '.join(msg)
zbian's avatar
zbian committed
99
100
        return msg

Frank Lee's avatar
Frank Lee committed
101
102
103
    def after_train_epoch(self, trainer):
        if self._is_epoch_to_log(trainer):
            msg = self._get_str(trainer=trainer, mode='train')
zbian's avatar
zbian committed
104
105

            if self._is_rank_to_log:
アマデウス's avatar
アマデウス committed
106
107
                self.logger.info(f'[Epoch {trainer.cur_epoch} / Train]: {msg}')
                # f'Training - Epoch {trainer.cur_epoch} - {self.__class__.__name__}: {msg}')
zbian's avatar
zbian committed
108

Frank Lee's avatar
Frank Lee committed
109
110
111
    def after_test_epoch(self, trainer):
        if self._is_epoch_to_log(trainer):
            msg = self._get_str(trainer=trainer, mode='test')
zbian's avatar
zbian committed
112
            if self._is_rank_to_log:
アマデウス's avatar
アマデウス committed
113
114
                self.logger.info(f'[Epoch {trainer.cur_epoch} / Test]: {msg}')
                # f'Testing - Epoch {trainer.cur_epoch} - {self.__class__.__name__}: {msg}')
zbian's avatar
zbian committed
115
116
117


@HOOKS.register_module
118
class TensorboardHook(BaseHook):
zbian's avatar
zbian committed
119
120
121
    """Specialized Hook to record the metric to Tensorboard.

    :param log_dir: Directory of log
122
123
124
125
126
127
    :type log_dir: str
    :param ranks: ranks of processors
    :type ranks: typing.List
    :param parallel_mode: Parallel mode, defaults to colossalai.context.parallel_mode.ParallelMode.GLOBAL
    :type parallel_mode: colossalai.context.parallel_mode.ParallelMode, optional
    :param priority: Priority in the printing, hooks with small priority will be printed in front, defaults to 10
zbian's avatar
zbian committed
128
    :type priority: int, optional
129
130
131
    :param trainer: Trainer attached with current hook
    :param mode: Mode of metrics, 'train' and 'test'
    :type mode: str
zbian's avatar
zbian committed
132
133
    """

134
135
    def __init__(self,
                 log_dir: str,
Frank Lee's avatar
Frank Lee committed
136
137
                 ranks: List = None,
                 parallel_mode: ParallelMode = ParallelMode.GLOBAL,
138
139
                 priority: int = 10,
                 ) -> None:
Frank Lee's avatar
Frank Lee committed
140
        super().__init__(priority=priority)
Frank Lee's avatar
Frank Lee committed
141
        from torch.utils.tensorboard import SummaryWriter
zbian's avatar
zbian committed
142

143
144
145
146
147
        # create log dir
        if not gpc.is_initialized(ParallelMode.GLOBAL) or gpc.get_global_rank() == 0:
            os.makedirs(log_dir, exist_ok=True)

        # determine the ranks to generate tensorboard logs
Frank Lee's avatar
Frank Lee committed
148
149
150
151
152
        self._is_valid_rank_to_log = False
        if not gpc.is_initialized(parallel_mode):
            self._is_valid_rank_to_log = True
        else:
            local_rank = gpc.get_local_rank(parallel_mode)
153

Frank Lee's avatar
Frank Lee committed
154
155
            if ranks is None or local_rank in ranks:
                self._is_valid_rank_to_log = True
156

Frank Lee's avatar
Frank Lee committed
157
158
159
160
        # check for
        if gpc.is_initialized(ParallelMode.PIPELINE) and \
                not gpc.is_last_rank(ParallelMode.PIPELINE) and self._is_valid_rank_to_log:
            raise ValueError("Tensorboard hook can only log on the last rank of pipeline process group")
161
162

        if self._is_valid_rank_to_log:
zbian's avatar
zbian committed
163
            # create workspace on only one rank
Frank Lee's avatar
Frank Lee committed
164
165
            if gpc.is_initialized(parallel_mode):
                rank = gpc.get_local_rank(parallel_mode)
zbian's avatar
zbian committed
166
167
168
169
            else:
                rank = 0

            # create workspace
Frank Lee's avatar
Frank Lee committed
170
            log_dir = osp.join(log_dir, f'{parallel_mode}_rank_{rank}')
171
            os.makedirs(log_dir, exist_ok=True)
zbian's avatar
zbian committed
172

アマデウス's avatar
アマデウス committed
173
            self.writer = SummaryWriter(log_dir=log_dir, filename_suffix=f'_rank_{rank}')
zbian's avatar
zbian committed
174

Frank Lee's avatar
Frank Lee committed
175
176
    def _log_by_iter(self, trainer, mode: str):
        for metric_name, metric_calculator in trainer.states['metrics'][mode].items():
zbian's avatar
zbian committed
177
178
179
180
            if metric_calculator.epoch_only:
                continue
            val = metric_calculator.get_last_step_value()

181
            if self._is_valid_rank_to_log:
アマデウス's avatar
アマデウス committed
182
                self.writer.add_scalar(f'{metric_name}/{mode}', val, trainer.cur_step)
zbian's avatar
zbian committed
183

Frank Lee's avatar
Frank Lee committed
184
185
    def _log_by_epoch(self, trainer, mode: str):
        for metric_name, metric_calculator in trainer.states['metrics'][mode].items():
zbian's avatar
zbian committed
186
187
            if metric_calculator.epoch_only:
                val = metric_calculator.get_accumulated_value()
188
                if self._is_valid_rank_to_log:
アマデウス's avatar
アマデウス committed
189
                    self.writer.add_scalar(f'{metric_name}/{mode}', val, trainer.cur_step)
zbian's avatar
zbian committed
190

Frank Lee's avatar
Frank Lee committed
191
192
    def after_test_iter(self, trainer, *args):
        self._log_by_iter(trainer, mode='test')
193

Frank Lee's avatar
Frank Lee committed
194
195
    def after_test_epoch(self, trainer):
        self._log_by_epoch(trainer, mode='test')
196

Frank Lee's avatar
Frank Lee committed
197
198
    def after_train_iter(self, trainer, *args):
        self._log_by_iter(trainer, mode='train')
199

Frank Lee's avatar
Frank Lee committed
200
201
    def after_train_epoch(self, trainer):
        self._log_by_epoch(trainer, mode='train')
zbian's avatar
zbian committed
202
203
204


@HOOKS.register_module
Frank Lee's avatar
Frank Lee committed
205
class LogTimingByEpochHook(LogByEpochHook):
zbian's avatar
zbian committed
206
207
    """Specialized Hook to write timing record to log.

208
209
210
211
212
    :param timer: Timer for the hook
    :type timer: colossalai.utils.MultiTimer
    :param logger: Logger for the log
    :type logger: colossalai.logging.DistributedLogger
    :param interval: Recording interval, defaults to 1
zbian's avatar
zbian committed
213
    :type interval: int, optional
214
    :param priority: Priority in the printing, hooks with small priority will be printed in front, defaults to 10
zbian's avatar
zbian committed
215
    :type priority: int, optional
216
    :param log_eval: Whether writes in evaluation, defaults to True
zbian's avatar
zbian committed
217
    :type log_eval: bool, optional
218
219
220
221
    :param ignore_num_train_steps: Number of training steps to ignore, defaults to 0
    :type ignore_num_train_steps: int, optional
    :param mode: Mode of metrics, 'train' and 'test'
    :param trainer: Trainer attached with current hook
zbian's avatar
zbian committed
222
223
    """
    def __init__(self,
Frank Lee's avatar
Frank Lee committed
224
225
                 timer: MultiTimer,
                 logger: DistributedLogger,
zbian's avatar
zbian committed
226
                 interval: int = 1,
227
                 priority: int = 10,
Frank Lee's avatar
Frank Lee committed
228
                 log_eval: bool = True,
アマデウス's avatar
アマデウス committed
229
                 ignore_num_train_steps: int = 0) -> None:
Frank Lee's avatar
Frank Lee committed
230
231
        super().__init__(logger=logger, interval=interval, priority=priority)
        self._timer = timer
zbian's avatar
zbian committed
232
233
234
        self._log_eval = log_eval
        self._is_rank_to_log = is_dp_rank_0() and is_tp_rank_0()

Frank Lee's avatar
Frank Lee committed
235
236
237
238
239
        # extra handling to avoid the unstable readings of the first
        # few training steps to affect the history mean time
        self._ignore_num_train_steps = ignore_num_train_steps
        self._is_train_step_history_trimmed = False

アマデウス's avatar
アマデウス committed
240
    def _get_message(self, mode):
zbian's avatar
zbian committed
241
        msg = []
Frank Lee's avatar
Frank Lee committed
242
        for timer_name, timer in self._timer:
アマデウス's avatar
アマデウス committed
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
            if timer_name.startswith(mode):
                last_elapsed_time = timer.get_elapsed_time()
                if timer.has_history:
                    if timer_name == 'Train-step' and not self._is_train_step_history_trimmed:
                        timer._history = timer._history[self._ignore_num_train_steps:]
                        self._is_train_step_history_trimmed = True
                    history_mean = timer.get_history_mean()
                    history_sum = timer.get_history_sum()
                    msg.append(
                        f'{timer_name}: last = {_format_number(last_elapsed_time)} s, mean = {_format_number(history_mean)} s'
                    )
                else:
                    msg.append(f'{timer_name}: last = {_format_number(last_elapsed_time)} s')

        msg = ' | '.join(msg)
zbian's avatar
zbian committed
258
259
        return msg

Frank Lee's avatar
Frank Lee committed
260
    def after_train_epoch(self, trainer):
zbian's avatar
zbian committed
261
262
        """Writes log after finishing a training epoch.
        """
Frank Lee's avatar
Frank Lee committed
263
        if self._is_epoch_to_log(trainer) and self._is_rank_to_log:
アマデウス's avatar
アマデウス committed
264
265
            msg = self._get_message('Train')
            self.logger.info(f'[Epoch {trainer.cur_epoch} / Train]: {msg}, #steps/epoch = {trainer.steps_per_epoch}')
zbian's avatar
zbian committed
266

Frank Lee's avatar
Frank Lee committed
267
    def after_test_epoch(self, trainer):
zbian's avatar
zbian committed
268
269
        """Writes log after finishing a testing epoch.
        """
Frank Lee's avatar
Frank Lee committed
270
        if self._is_epoch_to_log(trainer) and self._is_rank_to_log and self._log_eval:
アマデウス's avatar
アマデウス committed
271
272
            msg = self._get_message('Test')
            self.logger.info(f'[Epoch {trainer.cur_epoch} / Test]: {msg}')
zbian's avatar
zbian committed
273
274
275


@HOOKS.register_module
Frank Lee's avatar
Frank Lee committed
276
class LogMemoryByEpochHook(LogByEpochHook):
zbian's avatar
zbian committed
277
278
    """Specialized Hook to write memory usage record to log.

279
280
281
    :param logger: Logger for the log
    :type logger: colossalai.logging.DistributedLogger
    :param interval: Recording interval, defaults to 1
zbian's avatar
zbian committed
282
    :type interval: int, optional
283
    :param priority: Priority in the printing, hooks with small priority will be printed in front, defaults to 10
zbian's avatar
zbian committed
284
    :type priority: int, optional
285
    :param log_eval: Whether writes in evaluation, defaults to True
zbian's avatar
zbian committed
286
287
288
    :type log_eval: bool, optional
    """
    def __init__(self,
Frank Lee's avatar
Frank Lee committed
289
                 logger: DistributedLogger,
zbian's avatar
zbian committed
290
                 interval: int = 1,
291
                 priority: int = 10,
Frank Lee's avatar
Frank Lee committed
292
                 log_eval: bool = True,
293
294
                 report_cpu: bool = False, # no reference
                 ) -> None:
Frank Lee's avatar
Frank Lee committed
295
        super().__init__(logger=logger, interval=interval, priority=priority)
zbian's avatar
zbian committed
296
297
298
        self._log_eval = log_eval
        self._is_rank_to_log = is_dp_rank_0() and is_tp_rank_0()

Frank Lee's avatar
Frank Lee committed
299
    def before_train(self, trainer):
zbian's avatar
zbian committed
300
301
        """Resets before training.
        """
Frank Lee's avatar
Frank Lee committed
302
        if self._is_epoch_to_log(trainer) and self._is_rank_to_log:
アマデウス's avatar
アマデウス committed
303
            report_memory_usage('Before-train', self.logger)
zbian's avatar
zbian committed
304

Frank Lee's avatar
Frank Lee committed
305
    def after_train_epoch(self, trainer):
zbian's avatar
zbian committed
306
307
        """Writes log after finishing a training epoch.
        """
Frank Lee's avatar
Frank Lee committed
308
        if self._is_epoch_to_log(trainer) and self._is_rank_to_log:
アマデウス's avatar
アマデウス committed
309
            report_memory_usage(f'[Epoch {trainer.cur_epoch} / Train]', self.logger)
zbian's avatar
zbian committed
310

Frank Lee's avatar
Frank Lee committed
311
    def after_test(self, trainer):
zbian's avatar
zbian committed
312
313
        """Reports after testing.
        """
Frank Lee's avatar
Frank Lee committed
314
        if self._is_epoch_to_log(trainer) and self._is_rank_to_log and self._log_eval:
アマデウス's avatar
アマデウス committed
315
            report_memory_usage(f'[Epoch {trainer.cur_epoch} / Test]', self.logger)