global_vars.py 5.13 KB
Newer Older
Jared Casper's avatar
Jared Casper committed
1
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
Mohammad's avatar
Mohammad committed
2
3
4
5
6

"""Megatron global variables."""

import os
import sys
Mohammad's avatar
Mohammad committed
7
import torch
Mohammad's avatar
Mohammad committed
8

9
from megatron import dist_signal_handler
10
from megatron.tokenizer import build_tokenizer
mohammad's avatar
mohammad committed
11
from .microbatches import build_num_microbatches_calculator
Mohammad Shoeybi's avatar
Mohammad Shoeybi committed
12
from .timers import Timers
Mohammad's avatar
Mohammad committed
13
14

_GLOBAL_ARGS = None
mohammad's avatar
mohammad committed
15
_GLOBAL_NUM_MICROBATCHES_CALCULATOR = None
Mohammad's avatar
Mohammad committed
16
17
18
19
_GLOBAL_TOKENIZER = None
_GLOBAL_TENSORBOARD_WRITER = None
_GLOBAL_ADLR_AUTORESUME = None
_GLOBAL_TIMERS = None
20
_GLOBAL_SIGNAL_HANDLER = None
Mohammad's avatar
Mohammad committed
21
22
23
24
25
26
27

def get_args():
    """Return arguments."""
    _ensure_var_is_initialized(_GLOBAL_ARGS, 'args')
    return _GLOBAL_ARGS


mohammad's avatar
mohammad committed
28
29
30
31
def get_num_microbatches():
    return _GLOBAL_NUM_MICROBATCHES_CALCULATOR.get()


32
33
34
35
36
37
38
def get_current_global_batch_size():
    return _GLOBAL_NUM_MICROBATCHES_CALCULATOR.get_current_global_batch_size()


def update_num_microbatches(consumed_samples, consistency_check=True):
    _GLOBAL_NUM_MICROBATCHES_CALCULATOR.update(consumed_samples,
                                               consistency_check)
mohammad's avatar
mohammad committed
39
40


Mohammad's avatar
Mohammad committed
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
def get_tokenizer():
    """Return tokenizer."""
    _ensure_var_is_initialized(_GLOBAL_TOKENIZER, 'tokenizer')
    return _GLOBAL_TOKENIZER


def get_tensorboard_writer():
    """Return tensorboard writer. It can be None so no need
    to check if it is initialized."""
    return _GLOBAL_TENSORBOARD_WRITER


def get_adlr_autoresume():
    """ADLR autoresume object. It can be None so no need
    to check if it is initialized."""
    return _GLOBAL_ADLR_AUTORESUME


def get_timers():
    """Return timers."""
    _ensure_var_is_initialized(_GLOBAL_TIMERS, 'timers')
    return _GLOBAL_TIMERS

64

65
66
67
68
def get_signal_handler():
    _ensure_var_is_initialized(_GLOBAL_SIGNAL_HANDLER, 'signal handler')
    return _GLOBAL_SIGNAL_HANDLER

69

70
71
72
73
def _set_signal_handler():
    global _GLOBAL_SIGNAL_HANDLER
    _ensure_var_is_not_initialized(_GLOBAL_SIGNAL_HANDLER, 'signal handler')
    _GLOBAL_SIGNAL_HANDLER = dist_signal_handler.DistributedSignalHandler().__enter__()
Mohammad's avatar
Mohammad committed
74

75

76

77
def set_global_variables(args):
Mohammad's avatar
Mohammad committed
78
    """Set args, tokenizer, tensorboard-writer, adlr-autoresume, and timers."""
79
80
81
82
83
84

    assert args is not None

    _ensure_var_is_not_initialized(_GLOBAL_ARGS, 'args')
    set_args(args)

mohammad's avatar
mohammad committed
85
    _build_num_microbatches_calculator(args)
86
87
    if args.vocab_file:
        _ = _build_tokenizer(args)
Mohammad's avatar
Mohammad committed
88
89
    _set_tensorboard_writer(args)
    _set_adlr_autoresume(args)
Mohammad Shoeybi's avatar
Mohammad Shoeybi committed
90
    _set_timers(args)
Mohammad's avatar
Mohammad committed
91

92
93
    if args.exit_signal_handler:
        _set_signal_handler()
94
95
    

96
97
98
def set_args(args):
    global _GLOBAL_ARGS
    _GLOBAL_ARGS = args
Mohammad's avatar
Mohammad committed
99
100


mohammad's avatar
mohammad committed
101
102
103
104
105
106
def _build_num_microbatches_calculator(args):

    global _GLOBAL_NUM_MICROBATCHES_CALCULATOR
    _ensure_var_is_not_initialized(_GLOBAL_NUM_MICROBATCHES_CALCULATOR,
                                   'num microbatches calculator')

mohammad's avatar
mohammad committed
107
108
    _GLOBAL_NUM_MICROBATCHES_CALCULATOR = build_num_microbatches_calculator(
        args)
mohammad's avatar
mohammad committed
109
110


Mohammad's avatar
Mohammad committed
111
def _build_tokenizer(args):
Mohammad's avatar
Mohammad committed
112
113
114
    """Initialize tokenizer."""
    global _GLOBAL_TOKENIZER
    _ensure_var_is_not_initialized(_GLOBAL_TOKENIZER, 'tokenizer')
Mohammad's avatar
Mohammad committed
115
    _GLOBAL_TOKENIZER = build_tokenizer(args)
Mohammad's avatar
Mohammad committed
116
117
118
119
120
121
122
    return _GLOBAL_TOKENIZER


def rebuild_tokenizer(args):
    global _GLOBAL_TOKENIZER
    _GLOBAL_TOKENIZER = None
    return _build_tokenizer(args)
Mohammad's avatar
Mohammad committed
123
124


Mohammad's avatar
Mohammad committed
125
def _set_tensorboard_writer(args):
Mohammad's avatar
Mohammad committed
126
127
128
129
130
131
    """Set tensorboard writer."""
    global _GLOBAL_TENSORBOARD_WRITER
    _ensure_var_is_not_initialized(_GLOBAL_TENSORBOARD_WRITER,
                                   'tensorboard writer')

    if hasattr(args, 'tensorboard_dir') and \
132
       args.tensorboard_dir and args.rank == (args.world_size - 1):
Mohammad's avatar
Mohammad committed
133
134
135
136
        try:
            from torch.utils.tensorboard import SummaryWriter
            print('> setting tensorboard ...')
            _GLOBAL_TENSORBOARD_WRITER = SummaryWriter(
137
138
                log_dir=args.tensorboard_dir,
                max_queue=args.tensorboard_queue_size)
Mohammad's avatar
Mohammad committed
139
140
141
142
143
144
        except ModuleNotFoundError:
            print('WARNING: TensorBoard writing requested but is not '
                  'available (are you using PyTorch 1.1.0 or later?), '
                  'no TensorBoard logs will be written.', flush=True)


Mohammad's avatar
Mohammad committed
145
def _set_adlr_autoresume(args):
Mohammad's avatar
Mohammad committed
146
147
148
149
150
151
152
153
154
155
    """Initialize ADLR autoresume."""
    global _GLOBAL_ADLR_AUTORESUME
    _ensure_var_is_not_initialized(_GLOBAL_ADLR_AUTORESUME, 'adlr autoresume')

    if args.adlr_autoresume:
        if args.rank == 0:
            print('enabling autoresume ...', flush=True)
        sys.path.append(os.environ.get('SUBMIT_SCRIPTS', '.'))
        try:
            from userlib.auto_resume import AutoResume
Neel Kant's avatar
Neel Kant committed
156
        except BaseException:
Mohammad's avatar
Mohammad committed
157
158
159
160
161
162
            print('ADLR autoresume is not available, exiting ...')
            sys.exit()

        _GLOBAL_ADLR_AUTORESUME = AutoResume


Mohammad Shoeybi's avatar
Mohammad Shoeybi committed
163
def _set_timers(args):
Mohammad's avatar
Mohammad committed
164
165
166
    """Initialize timers."""
    global _GLOBAL_TIMERS
    _ensure_var_is_not_initialized(_GLOBAL_TIMERS, 'timers')
Mohammad Shoeybi's avatar
Mohammad Shoeybi committed
167
168
    _GLOBAL_TIMERS = Timers(args.timing_log_level, args.timing_log_option)

Mohammad's avatar
Mohammad committed
169
170
171
172
173
174
175
176
177

def _ensure_var_is_initialized(var, name):
    """Make sure the input variable is not None."""
    assert var is not None, '{} is not initialized.'.format(name)


def _ensure_var_is_not_initialized(var, name):
    """Make sure the input variable is not None."""
    assert var is None, '{} is already initialized.'.format(name)
Mohammad's avatar
Mohammad committed
178
179


180