_log_hook.py 12.1 KB
Newer Older
zbian's avatar
zbian committed
1
2
3
4
5
6
7
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

import os
import os.path as osp

import torch
Frank Lee's avatar
Frank Lee committed
8
9
from typing import List
from decimal import Decimal
zbian's avatar
zbian committed
10
11
12
from colossalai.context import ParallelMode
from colossalai.core import global_context as gpc
from colossalai.registry import HOOKS
Frank Lee's avatar
Frank Lee committed
13
14
15
from colossalai.logging import DistributedLogger
from colossalai.utils import report_memory_usage, is_dp_rank_0, \
    is_tp_rank_0, is_no_pp_or_last_stage, MultiTimer
16
from ._base_hook import BaseHook
zbian's avatar
zbian committed
17
18


アマデウス's avatar
アマデウス committed
19
def _format_number(val, prec=5):
zbian's avatar
zbian committed
20
    if isinstance(val, float):
アマデウス's avatar
アマデウス committed
21
        return f'{val:.{prec}g}'
Frank Lee's avatar
Frank Lee committed
22
    elif torch.is_tensor(val) and torch.is_floating_point(val):
アマデウス's avatar
アマデウス committed
23
        return f'{val.item():.{prec}g}'
zbian's avatar
zbian committed
24
25
26
    return val


Frank Lee's avatar
Frank Lee committed
27
class LogByEpochHook(BaseHook):
HELSON's avatar
HELSON committed
28
    """Hook to log by epoch
29

HELSON's avatar
HELSON committed
30
    :param logger: Logger for the log
31
32
33
34
35
    :param interval: Recording interval, defaults to 1
    :type interval: int, optional
    :param priority: Priority in the printing, hooks with small priority will be printed in front, defaults to 1
    :type priority: int, optional
    """
HELSON's avatar
HELSON committed
36

Frank Lee's avatar
Frank Lee committed
37
38
39
40
41
42
    def __init__(self,
                 logger,
                 interval: int = 1,
                 priority: int = 1):
        super().__init__(priority)
        self.logger = logger
zbian's avatar
zbian committed
43
44
        self._interval = interval

Frank Lee's avatar
Frank Lee committed
45
46
    def _is_epoch_to_log(self, trainer):
        return trainer.cur_epoch % self._interval == 0
zbian's avatar
zbian committed
47
48


アマデウス's avatar
アマデウス committed
49
50
@HOOKS.register_module
class LogMetricByStepHook(BaseHook):
HELSON's avatar
HELSON committed
51
    """Hook to log metric by step
52
53
54
55

    :param priority: Priority in the printing, hooks with small priority will be printed in front, defaults to 10
    :type priority: int, optional
    """
HELSON's avatar
HELSON committed
56

アマデウス's avatar
アマデウス committed
57
58
59
60
61
62
63
64
    def __init__(self, priority: int = 10):
        super().__init__(priority)

    def after_train_iter(self, trainer, *args):
        trainer.states['step_metrics'] = dict()
        for metric_name, metric_calculator in trainer.states['metrics']['train'].items():
            trainer.states['step_metrics'][metric_name.lower()] = \
                f'{_format_number(metric_calculator.get_last_step_value())}'
HELSON's avatar
HELSON committed
65

アマデウス's avatar
アマデウス committed
66
67
68
69
70
71
72
    def after_test_iter(self, trainer, *args):
        trainer.states['step_metrics'] = dict()
        for metric_name, metric_calculator in trainer.states['metrics']['test'].items():
            trainer.states['step_metrics'][metric_name.lower()] = \
                f'{_format_number(metric_calculator.get_last_step_value())}'


zbian's avatar
zbian committed
73
@HOOKS.register_module
Frank Lee's avatar
Frank Lee committed
74
class LogMetricByEpochHook(LogByEpochHook):
HELSON's avatar
HELSON committed
75
    """Specialized hook to record the metric to log.
zbian's avatar
zbian committed
76

HELSON's avatar
HELSON committed
77
    :param logger: Logger for the log
78
    :param interval: Recording interval, defaults to 1
zbian's avatar
zbian committed
79
    :type interval: int, optional
80
    :param priority: Priority in the printing, hooks with small priority will be printed in front, defaults to 10
zbian's avatar
zbian committed
81
82
83
    :type priority: int, optional
    """

Frank Lee's avatar
Frank Lee committed
84
85
86
87
88
    def __init__(self,
                 logger,
                 interval: int = 1,
                 priority: int = 10) -> None:
        super().__init__(logger, interval, priority)
zbian's avatar
zbian committed
89
90
        self._is_rank_to_log = is_dp_rank_0() and is_tp_rank_0() and is_no_pp_or_last_stage()

Frank Lee's avatar
Frank Lee committed
91
    def _get_str(self, trainer, mode):
zbian's avatar
zbian committed
92
        msg = []
Frank Lee's avatar
Frank Lee committed
93
        for metric_name, metric_calculator in trainer.states['metrics'][mode].items():
zbian's avatar
zbian committed
94
95
            msg.append(
                f'{metric_name} = {_format_number(metric_calculator.get_accumulated_value())}')
アマデウス's avatar
アマデウス committed
96
        msg = ' | '.join(msg)
zbian's avatar
zbian committed
97
98
        return msg

Frank Lee's avatar
Frank Lee committed
99
100
101
    def after_train_epoch(self, trainer):
        if self._is_epoch_to_log(trainer):
            msg = self._get_str(trainer=trainer, mode='train')
zbian's avatar
zbian committed
102
103

            if self._is_rank_to_log:
アマデウス's avatar
アマデウス committed
104
105
                self.logger.info(f'[Epoch {trainer.cur_epoch} / Train]: {msg}')
                # f'Training - Epoch {trainer.cur_epoch} - {self.__class__.__name__}: {msg}')
zbian's avatar
zbian committed
106

Frank Lee's avatar
Frank Lee committed
107
108
109
    def after_test_epoch(self, trainer):
        if self._is_epoch_to_log(trainer):
            msg = self._get_str(trainer=trainer, mode='test')
zbian's avatar
zbian committed
110
            if self._is_rank_to_log:
アマデウス's avatar
アマデウス committed
111
112
                self.logger.info(f'[Epoch {trainer.cur_epoch} / Test]: {msg}')
                # f'Testing - Epoch {trainer.cur_epoch} - {self.__class__.__name__}: {msg}')
zbian's avatar
zbian committed
113
114
115


@HOOKS.register_module
116
class TensorboardHook(BaseHook):
HELSON's avatar
HELSON committed
117
    """Specialized hook to record the metric to Tensorboard.
zbian's avatar
zbian committed
118
119

    :param log_dir: Directory of log
120
    :type log_dir: str
HELSON's avatar
HELSON committed
121
    :param ranks: Ranks of processors
122
123
    :type ranks: typing.List
    :param parallel_mode: Parallel mode, defaults to colossalai.context.parallel_mode.ParallelMode.GLOBAL
HELSON's avatar
HELSON committed
124
    :type parallel_mode: :class:`colossalai.context.parallel_mode.ParallelMode`, optional
125
    :param priority: Priority in the printing, hooks with small priority will be printed in front, defaults to 10
zbian's avatar
zbian committed
126
127
128
    :type priority: int, optional
    """

129
130
    def __init__(self,
                 log_dir: str,
Frank Lee's avatar
Frank Lee committed
131
132
                 ranks: List = None,
                 parallel_mode: ParallelMode = ParallelMode.GLOBAL,
133
134
                 priority: int = 10,
                 ) -> None:
Frank Lee's avatar
Frank Lee committed
135
        super().__init__(priority=priority)
Frank Lee's avatar
Frank Lee committed
136
        from torch.utils.tensorboard import SummaryWriter
zbian's avatar
zbian committed
137

138
139
140
141
142
        # create log dir
        if not gpc.is_initialized(ParallelMode.GLOBAL) or gpc.get_global_rank() == 0:
            os.makedirs(log_dir, exist_ok=True)

        # determine the ranks to generate tensorboard logs
Frank Lee's avatar
Frank Lee committed
143
144
145
146
147
        self._is_valid_rank_to_log = False
        if not gpc.is_initialized(parallel_mode):
            self._is_valid_rank_to_log = True
        else:
            local_rank = gpc.get_local_rank(parallel_mode)
148

Frank Lee's avatar
Frank Lee committed
149
150
            if ranks is None or local_rank in ranks:
                self._is_valid_rank_to_log = True
151

Frank Lee's avatar
Frank Lee committed
152
153
154
155
        # check for
        if gpc.is_initialized(ParallelMode.PIPELINE) and \
                not gpc.is_last_rank(ParallelMode.PIPELINE) and self._is_valid_rank_to_log:
            raise ValueError("Tensorboard hook can only log on the last rank of pipeline process group")
156
157

        if self._is_valid_rank_to_log:
zbian's avatar
zbian committed
158
            # create workspace on only one rank
Frank Lee's avatar
Frank Lee committed
159
160
            if gpc.is_initialized(parallel_mode):
                rank = gpc.get_local_rank(parallel_mode)
zbian's avatar
zbian committed
161
162
163
164
            else:
                rank = 0

            # create workspace
Frank Lee's avatar
Frank Lee committed
165
            log_dir = osp.join(log_dir, f'{parallel_mode}_rank_{rank}')
166
            os.makedirs(log_dir, exist_ok=True)
zbian's avatar
zbian committed
167

アマデウス's avatar
アマデウス committed
168
            self.writer = SummaryWriter(log_dir=log_dir, filename_suffix=f'_rank_{rank}')
zbian's avatar
zbian committed
169

Frank Lee's avatar
Frank Lee committed
170
171
    def _log_by_iter(self, trainer, mode: str):
        for metric_name, metric_calculator in trainer.states['metrics'][mode].items():
zbian's avatar
zbian committed
172
173
174
175
            if metric_calculator.epoch_only:
                continue
            val = metric_calculator.get_last_step_value()

176
            if self._is_valid_rank_to_log:
アマデウス's avatar
アマデウス committed
177
                self.writer.add_scalar(f'{metric_name}/{mode}', val, trainer.cur_step)
zbian's avatar
zbian committed
178

Frank Lee's avatar
Frank Lee committed
179
180
    def _log_by_epoch(self, trainer, mode: str):
        for metric_name, metric_calculator in trainer.states['metrics'][mode].items():
zbian's avatar
zbian committed
181
182
            if metric_calculator.epoch_only:
                val = metric_calculator.get_accumulated_value()
183
                if self._is_valid_rank_to_log:
アマデウス's avatar
アマデウス committed
184
                    self.writer.add_scalar(f'{metric_name}/{mode}', val, trainer.cur_step)
zbian's avatar
zbian committed
185

Frank Lee's avatar
Frank Lee committed
186
187
    def after_test_iter(self, trainer, *args):
        self._log_by_iter(trainer, mode='test')
188

Frank Lee's avatar
Frank Lee committed
189
190
    def after_test_epoch(self, trainer):
        self._log_by_epoch(trainer, mode='test')
191

Frank Lee's avatar
Frank Lee committed
192
193
    def after_train_iter(self, trainer, *args):
        self._log_by_iter(trainer, mode='train')
194

Frank Lee's avatar
Frank Lee committed
195
196
    def after_train_epoch(self, trainer):
        self._log_by_epoch(trainer, mode='train')
zbian's avatar
zbian committed
197
198
199


@HOOKS.register_module
Frank Lee's avatar
Frank Lee committed
200
class LogTimingByEpochHook(LogByEpochHook):
HELSON's avatar
HELSON committed
201
    """Specialized hook to write timing record to log.
zbian's avatar
zbian committed
202

203
    :param timer: Timer for the hook
HELSON's avatar
HELSON committed
204
    :type timer: :class:`colossalai.utils.MultiTimer`
205
    :param logger: Logger for the log
HELSON's avatar
HELSON committed
206
    :type logger: :class:`colossalai.logging.DistributedLogger`
207
    :param interval: Recording interval, defaults to 1
zbian's avatar
zbian committed
208
    :type interval: int, optional
209
    :param priority: Priority in the printing, hooks with small priority will be printed in front, defaults to 10
zbian's avatar
zbian committed
210
    :type priority: int, optional
211
    :param log_eval: Whether writes in evaluation, defaults to True
zbian's avatar
zbian committed
212
    :type log_eval: bool, optional
213
214
    :param ignore_num_train_steps: Number of training steps to ignore, defaults to 0
    :type ignore_num_train_steps: int, optional
zbian's avatar
zbian committed
215
    """
HELSON's avatar
HELSON committed
216

zbian's avatar
zbian committed
217
    def __init__(self,
Frank Lee's avatar
Frank Lee committed
218
219
                 timer: MultiTimer,
                 logger: DistributedLogger,
zbian's avatar
zbian committed
220
                 interval: int = 1,
221
                 priority: int = 10,
Frank Lee's avatar
Frank Lee committed
222
                 log_eval: bool = True,
アマデウス's avatar
アマデウス committed
223
                 ignore_num_train_steps: int = 0) -> None:
Frank Lee's avatar
Frank Lee committed
224
225
        super().__init__(logger=logger, interval=interval, priority=priority)
        self._timer = timer
zbian's avatar
zbian committed
226
        self._log_eval = log_eval
227
        self._is_rank_to_log = is_dp_rank_0() and is_tp_rank_0() and is_no_pp_or_last_stage()
zbian's avatar
zbian committed
228

Frank Lee's avatar
Frank Lee committed
229
230
231
232
233
        # extra handling to avoid the unstable readings of the first
        # few training steps to affect the history mean time
        self._ignore_num_train_steps = ignore_num_train_steps
        self._is_train_step_history_trimmed = False

アマデウス's avatar
アマデウス committed
234
    def _get_message(self, mode):
zbian's avatar
zbian committed
235
        msg = []
Frank Lee's avatar
Frank Lee committed
236
        for timer_name, timer in self._timer:
アマデウス's avatar
アマデウス committed
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
            if timer_name.startswith(mode):
                last_elapsed_time = timer.get_elapsed_time()
                if timer.has_history:
                    if timer_name == 'Train-step' and not self._is_train_step_history_trimmed:
                        timer._history = timer._history[self._ignore_num_train_steps:]
                        self._is_train_step_history_trimmed = True
                    history_mean = timer.get_history_mean()
                    history_sum = timer.get_history_sum()
                    msg.append(
                        f'{timer_name}: last = {_format_number(last_elapsed_time)} s, mean = {_format_number(history_mean)} s'
                    )
                else:
                    msg.append(f'{timer_name}: last = {_format_number(last_elapsed_time)} s')

        msg = ' | '.join(msg)
zbian's avatar
zbian committed
252
253
        return msg

Frank Lee's avatar
Frank Lee committed
254
    def after_train_epoch(self, trainer):
zbian's avatar
zbian committed
255
256
        """Writes log after finishing a training epoch.
        """
Frank Lee's avatar
Frank Lee committed
257
        if self._is_epoch_to_log(trainer) and self._is_rank_to_log:
アマデウス's avatar
アマデウス committed
258
            msg = self._get_message('Train')
259
            self.logger.info(f'[Epoch {trainer.cur_epoch} / Train]: {msg} | #steps/epoch = {trainer.steps_per_epoch}')
zbian's avatar
zbian committed
260

Frank Lee's avatar
Frank Lee committed
261
    def after_test_epoch(self, trainer):
zbian's avatar
zbian committed
262
263
        """Writes log after finishing a testing epoch.
        """
Frank Lee's avatar
Frank Lee committed
264
        if self._is_epoch_to_log(trainer) and self._is_rank_to_log and self._log_eval:
アマデウス's avatar
アマデウス committed
265
266
            msg = self._get_message('Test')
            self.logger.info(f'[Epoch {trainer.cur_epoch} / Test]: {msg}')
zbian's avatar
zbian committed
267
268
269


@HOOKS.register_module
Frank Lee's avatar
Frank Lee committed
270
class LogMemoryByEpochHook(LogByEpochHook):
zbian's avatar
zbian committed
271
272
    """Specialized Hook to write memory usage record to log.

273
274
275
    :param logger: Logger for the log
    :type logger: colossalai.logging.DistributedLogger
    :param interval: Recording interval, defaults to 1
zbian's avatar
zbian committed
276
    :type interval: int, optional
277
    :param priority: Priority in the printing, hooks with small priority will be printed in front, defaults to 10
zbian's avatar
zbian committed
278
    :type priority: int, optional
279
    :param log_eval: Whether writes in evaluation, defaults to True
zbian's avatar
zbian committed
280
281
    :type log_eval: bool, optional
    """
HELSON's avatar
HELSON committed
282

zbian's avatar
zbian committed
283
    def __init__(self,
Frank Lee's avatar
Frank Lee committed
284
                 logger: DistributedLogger,
zbian's avatar
zbian committed
285
                 interval: int = 1,
286
                 priority: int = 10,
Frank Lee's avatar
Frank Lee committed
287
                 log_eval: bool = True,
HELSON's avatar
HELSON committed
288
                 report_cpu: bool = False,  # no reference
289
                 ) -> None:
Frank Lee's avatar
Frank Lee committed
290
        super().__init__(logger=logger, interval=interval, priority=priority)
zbian's avatar
zbian committed
291
292
293
        self._log_eval = log_eval
        self._is_rank_to_log = is_dp_rank_0() and is_tp_rank_0()

Frank Lee's avatar
Frank Lee committed
294
    def before_train(self, trainer):
zbian's avatar
zbian committed
295
296
        """Resets before training.
        """
Frank Lee's avatar
Frank Lee committed
297
        if self._is_epoch_to_log(trainer) and self._is_rank_to_log:
アマデウス's avatar
アマデウス committed
298
            report_memory_usage('Before-train', self.logger)
zbian's avatar
zbian committed
299

Frank Lee's avatar
Frank Lee committed
300
    def after_train_epoch(self, trainer):
zbian's avatar
zbian committed
301
302
        """Writes log after finishing a training epoch.
        """
Frank Lee's avatar
Frank Lee committed
303
        if self._is_epoch_to_log(trainer) and self._is_rank_to_log:
アマデウス's avatar
アマデウス committed
304
            report_memory_usage(f'[Epoch {trainer.cur_epoch} / Train]', self.logger)
zbian's avatar
zbian committed
305

Frank Lee's avatar
Frank Lee committed
306
    def after_test(self, trainer):
zbian's avatar
zbian committed
307
308
        """Reports after testing.
        """
Frank Lee's avatar
Frank Lee committed
309
        if self._is_epoch_to_log(trainer) and self._is_rank_to_log and self._log_eval:
アマデウス's avatar
アマデウス committed
310
            report_memory_usage(f'[Epoch {trainer.cur_epoch} / Test]', self.logger)