constants.py 1.9 KB
Newer Older
1
# Copyright (c) 2022-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
Przemek Tredak's avatar
Przemek Tredak committed
2
3
4
5
6
#
# See LICENSE for license information.

"""Enums for e2e transformer"""
import torch
Jan Bielak's avatar
Jan Bielak committed
7
import torch.distributed
8
import transformer_engine_torch as tex
Przemek Tredak's avatar
Przemek Tredak committed
9
10
11
12
13
14
15
16
17


"""
This is a map: torch.dtype -> int
Used for passing dtypes into cuda
extension. Has one to one mapping
with enum in transformer_engine.h
"""
TE_DType = {
cyanguwa's avatar
cyanguwa committed
18
    torch.uint8: tex.DType.kByte,
19
20
    torch.float8_e4m3fn: tex.DType.kFloat8E4M3,
    torch.float8_e5m2: tex.DType.kFloat8E5M2,
Przemek Tredak's avatar
Przemek Tredak committed
21
22
23
24
25
26
    torch.int32: tex.DType.kInt32,
    torch.float32: tex.DType.kFloat32,
    torch.half: tex.DType.kFloat16,
    torch.bfloat16: tex.DType.kBFloat16,
}

27
28
29
30
31
32
33
34
35
36
TE_DType_To_Torch = {
    tex.DType.kByte: torch.uint8,
    tex.DType.kFloat8E4M3: torch.float8_e4m3fn,
    tex.DType.kFloat8E5M2: torch.float8_e5m2,
    tex.DType.kInt32: torch.int32,
    tex.DType.kFloat32: torch.float32,
    tex.DType.kFloat16: torch.half,
    tex.DType.kBFloat16: torch.bfloat16,
}

37
38
39
40
41
42
43
44
45
AttnMaskTypes = (
    "no_mask",
    "padding",
    "causal",
    "padding_causal",
    "causal_bottom_right",
    "padding_causal_bottom_right",
    "arbitrary",
)
Przemek Tredak's avatar
Przemek Tredak committed
46
47
48

AttnTypes = ("self", "cross")

49
AttnBiasTypes = ("pre_scale_bias", "post_scale_bias", "no_bias", "alibi")
50

51
QKVLayouts = (
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
    "sb3hd",
    "sbh3d",
    "sbhd_sb2hd",
    "sbhd_sbh2d",
    "sbhd_sbhd_sbhd",
    "bs3hd",
    "bsh3d",
    "bshd_bs2hd",
    "bshd_bsh2d",
    "bshd_bshd_bshd",
    "t3hd",
    "th3d",
    "thd_t2hd",
    "thd_th2d",
    "thd_thd_thd",
67
68
69
70
71
72
73
74
75
76
    "sbhd_bshd_bshd",
    "bshd_sbhd_sbhd",
    "thd_bshd_bshd",
    "thd_sbhd_sbhd",
    "paged_kv_bshd_bshd_bshd",
    "paged_kv_bshd_sbhd_sbhd",
    "paged_kv_sbhd_bshd_bshd",
    "paged_kv_sbhd_sbhd_sbhd",
    "paged_kv_thd_bshd_bshd",
    "paged_kv_thd_sbhd_sbhd",
77
)
78

Przemek Tredak's avatar
Przemek Tredak committed
79
80
81
82
LayerTypes = ("encoder", "decoder")

GemmParallelModes = ("row", "column", None)

Jan Bielak's avatar
Jan Bielak committed
83
dist_group_type = torch.distributed.ProcessGroup
84
85

MXFP8_BLOCK_SCALING_SIZE = 32