"...fontawesome-free-5.12.0-desktop/svgs/solid/feather.svg" did not exist on "204be94bfa2c6a9d9260125908cce162f4cf90f6"
global_vars.py 7.34 KB
Newer Older
liangjing's avatar
liangjing committed
1
# Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved.
Mohammad's avatar
Mohammad committed
2
3
4
5
6

"""Megatron global variables."""

import os
import sys
Mohammad's avatar
Mohammad committed
7
import torch
Mohammad's avatar
Mohammad committed
8

liangjing's avatar
liangjing committed
9
10
11
12
from megatron.core import Timers
from megatron.core.num_microbatches_calculator import init_num_microbatches_calculator
from megatron.training import dist_signal_handler
from megatron.training.tokenizer import build_tokenizer
Mohammad's avatar
Mohammad committed
13
14
15
16

_GLOBAL_ARGS = None
_GLOBAL_TOKENIZER = None
_GLOBAL_TENSORBOARD_WRITER = None
liangjing's avatar
liangjing committed
17
18
_GLOBAL_WANDB_WRITER = None
_GLOBAL_ONE_LOGGER = None
Mohammad's avatar
Mohammad committed
19
20
_GLOBAL_ADLR_AUTORESUME = None
_GLOBAL_TIMERS = None
21
_GLOBAL_SIGNAL_HANDLER = None
Mohammad's avatar
Mohammad committed
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40

def get_args():
    """Return arguments."""
    _ensure_var_is_initialized(_GLOBAL_ARGS, 'args')
    return _GLOBAL_ARGS


def get_tokenizer():
    """Return tokenizer."""
    _ensure_var_is_initialized(_GLOBAL_TOKENIZER, 'tokenizer')
    return _GLOBAL_TOKENIZER


def get_tensorboard_writer():
    """Return tensorboard writer. It can be None so no need
    to check if it is initialized."""
    return _GLOBAL_TENSORBOARD_WRITER


liangjing's avatar
liangjing committed
41
42
43
44
45
46
47
48
49
50
51
def get_wandb_writer():
    """Return tensorboard writer. It can be None so no need
    to check if it is initialized."""
    return _GLOBAL_WANDB_WRITER


def get_one_logger():
    """Return one logger. It can be None so no need
    to check if it is initialized."""
    return _GLOBAL_ONE_LOGGER

Mohammad's avatar
Mohammad committed
52
53
54
55
56
57
58
59
60
61
62
def get_adlr_autoresume():
    """ADLR autoresume object. It can be None so no need
    to check if it is initialized."""
    return _GLOBAL_ADLR_AUTORESUME


def get_timers():
    """Return timers."""
    _ensure_var_is_initialized(_GLOBAL_TIMERS, 'timers')
    return _GLOBAL_TIMERS

63

64
65
66
67
def get_signal_handler():
    _ensure_var_is_initialized(_GLOBAL_SIGNAL_HANDLER, 'signal handler')
    return _GLOBAL_SIGNAL_HANDLER

68

69
70
71
72
def _set_signal_handler():
    global _GLOBAL_SIGNAL_HANDLER
    _ensure_var_is_not_initialized(_GLOBAL_SIGNAL_HANDLER, 'signal handler')
    _GLOBAL_SIGNAL_HANDLER = dist_signal_handler.DistributedSignalHandler().__enter__()
Mohammad's avatar
Mohammad committed
73

74

75

liangjing's avatar
v1  
liangjing committed
76
def set_global_variables(args, build_tokenizer=True):
Mohammad's avatar
Mohammad committed
77
    """Set args, tokenizer, tensorboard-writer, adlr-autoresume, and timers."""
78
79
80
81
82
83

    assert args is not None

    _ensure_var_is_not_initialized(_GLOBAL_ARGS, 'args')
    set_args(args)

liangjing's avatar
liangjing committed
84
85
86
87
88
89
90
91
    init_num_microbatches_calculator(
        args.rank,
        args.rampup_batch_size,
        args.global_batch_size,
        args.micro_batch_size,
        args.data_parallel_size,
        args.decrease_batch_size_if_needed,
    )
liangjing's avatar
v1  
liangjing committed
92
    if build_tokenizer:
93
        _ = _build_tokenizer(args)
Mohammad's avatar
Mohammad committed
94
    _set_tensorboard_writer(args)
liangjing's avatar
liangjing committed
95
96
    _set_wandb_writer(args)
    _set_one_logger(args)
Mohammad's avatar
Mohammad committed
97
    _set_adlr_autoresume(args)
Mohammad Shoeybi's avatar
Mohammad Shoeybi committed
98
    _set_timers(args)
Mohammad's avatar
Mohammad committed
99

100
101
    if args.exit_signal_handler:
        _set_signal_handler()
liangjing's avatar
liangjing committed
102

103

104
105
106
def set_args(args):
    global _GLOBAL_ARGS
    _GLOBAL_ARGS = args
Mohammad's avatar
Mohammad committed
107
108


Mohammad's avatar
Mohammad committed
109
def _build_tokenizer(args):
Mohammad's avatar
Mohammad committed
110
111
112
    """Initialize tokenizer."""
    global _GLOBAL_TOKENIZER
    _ensure_var_is_not_initialized(_GLOBAL_TOKENIZER, 'tokenizer')
Mohammad's avatar
Mohammad committed
113
    _GLOBAL_TOKENIZER = build_tokenizer(args)
Mohammad's avatar
Mohammad committed
114
115
116
117
118
119
120
    return _GLOBAL_TOKENIZER


def rebuild_tokenizer(args):
    global _GLOBAL_TOKENIZER
    _GLOBAL_TOKENIZER = None
    return _build_tokenizer(args)
Mohammad's avatar
Mohammad committed
121
122


Mohammad's avatar
Mohammad committed
123
def _set_tensorboard_writer(args):
Mohammad's avatar
Mohammad committed
124
125
126
127
128
129
    """Set tensorboard writer."""
    global _GLOBAL_TENSORBOARD_WRITER
    _ensure_var_is_not_initialized(_GLOBAL_TENSORBOARD_WRITER,
                                   'tensorboard writer')

    if hasattr(args, 'tensorboard_dir') and \
130
       args.tensorboard_dir and args.rank == (args.world_size - 1):
Mohammad's avatar
Mohammad committed
131
132
133
134
        try:
            from torch.utils.tensorboard import SummaryWriter
            print('> setting tensorboard ...')
            _GLOBAL_TENSORBOARD_WRITER = SummaryWriter(
135
136
                log_dir=args.tensorboard_dir,
                max_queue=args.tensorboard_queue_size)
Mohammad's avatar
Mohammad committed
137
138
139
140
141
142
        except ModuleNotFoundError:
            print('WARNING: TensorBoard writing requested but is not '
                  'available (are you using PyTorch 1.1.0 or later?), '
                  'no TensorBoard logs will be written.', flush=True)


liangjing's avatar
liangjing committed
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
def _set_wandb_writer(args):
    global _GLOBAL_WANDB_WRITER
    _ensure_var_is_not_initialized(_GLOBAL_WANDB_WRITER,
                                   'wandb writer')
    if getattr(args, 'wandb_project', '') and args.rank == (args.world_size - 1):
        if args.wandb_exp_name == '':
            raise ValueError("Please specify the wandb experiment name!")

        import wandb
        if args.wandb_save_dir:
            save_dir = args.wandb_save_dir
        else:
            # Defaults to the save dir.
            save_dir = os.path.join(args.save, 'wandb')
        wandb_kwargs = {
            'dir': save_dir,
            'name': args.wandb_exp_name,
            'project': args.wandb_project,
            'config': vars(args)}
        os.makedirs(wandb_kwargs['dir'], exist_ok=True)
        wandb.init(**wandb_kwargs)
        _GLOBAL_WANDB_WRITER = wandb


def _set_one_logger(args):
    global _GLOBAL_ONE_LOGGER
    _ensure_var_is_not_initialized(_GLOBAL_ONE_LOGGER, 'one logger')

    if args.enable_one_logger and args.rank == (args.world_size - 1):
        if args.one_logger_async or getattr(args, 'wandb_project', ''):
            one_logger_async = True
        else:
            one_logger_async = False
        try:
            from one_logger import OneLogger
            config = {
               'project': args.one_logger_project,
               'name': args.one_logger_run_name,
               'async': one_logger_async,
            }
            one_logger = OneLogger(config=config)
            _GLOBAL_ONE_LOGGER = one_logger
        except Exception:
            print('WARNING: one_logger package is required to enable e2e metrics '
                  'tracking. please go to '
                  'https://confluence.nvidia.com/display/MLWFO/Package+Repositories'
                  ' for details to install it')

Mohammad's avatar
Mohammad committed
191
def _set_adlr_autoresume(args):
Mohammad's avatar
Mohammad committed
192
193
194
195
196
197
198
199
200
201
    """Initialize ADLR autoresume."""
    global _GLOBAL_ADLR_AUTORESUME
    _ensure_var_is_not_initialized(_GLOBAL_ADLR_AUTORESUME, 'adlr autoresume')

    if args.adlr_autoresume:
        if args.rank == 0:
            print('enabling autoresume ...', flush=True)
        sys.path.append(os.environ.get('SUBMIT_SCRIPTS', '.'))
        try:
            from userlib.auto_resume import AutoResume
liangjing's avatar
liangjing committed
202
        except ImportError:
Mohammad's avatar
Mohammad committed
203
204
205
206
207
208
            print('ADLR autoresume is not available, exiting ...')
            sys.exit()

        _GLOBAL_ADLR_AUTORESUME = AutoResume


Mohammad Shoeybi's avatar
Mohammad Shoeybi committed
209
def _set_timers(args):
Mohammad's avatar
Mohammad committed
210
211
212
    """Initialize timers."""
    global _GLOBAL_TIMERS
    _ensure_var_is_not_initialized(_GLOBAL_TIMERS, 'timers')
Mohammad Shoeybi's avatar
Mohammad Shoeybi committed
213
214
    _GLOBAL_TIMERS = Timers(args.timing_log_level, args.timing_log_option)

Mohammad's avatar
Mohammad committed
215
216
217
218
219
220
221
222
223

def _ensure_var_is_initialized(var, name):
    """Make sure the input variable is not None."""
    assert var is not None, '{} is not initialized.'.format(name)


def _ensure_var_is_not_initialized(var, name):
    """Make sure the input variable is not None."""
    assert var is None, '{} is already initialized.'.format(name)
Mohammad's avatar
Mohammad committed
224

liangjing's avatar
liangjing committed
225
226
227
def destroy_global_vars():
    global _GLOBAL_ARGS
    _GLOBAL_ARGS = None
Mohammad's avatar
Mohammad committed
228

liangjing's avatar
liangjing committed
229
230
    global _GLOBAL_TOKENIZER
    _GLOBAL_TOKENIZER = None
231

liangjing's avatar
liangjing committed
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
    global _GLOBAL_TENSORBOARD_WRITER
    _GLOBAL_TENSORBOARD_WRITER = None

    global _GLOBAL_WANDB_WRITER
    _GLOBAL_WANDB_WRITER = None

    global _GLOBAL_ONE_LOGGER
    _GLOBAL_ONE_LOGGER = None

    global _GLOBAL_ADLR_AUTORESUME
    _GLOBAL_ADLR_AUTORESUME = None

    global _GLOBAL_TIMERS
    _GLOBAL_TIMERS = None

    global _GLOBAL_SIGNAL_HANDLER
    _GLOBAL_SIGNAL_HANDLER = None