"hook_breaker_ac10a0.py" did not exist on "394e7a416603d4f9b60e60554ac5194c58f3b359"
rope.py 7.23 KB
Newer Older
xgqdut2016's avatar
xgqdut2016 committed
1
import torch
PanZezhongQY's avatar
PanZezhongQY committed
2
import ctypes
PanZezhong's avatar
PanZezhong committed
3
from ctypes import POINTER, Structure, c_int32, c_uint64, c_void_p
4
from libinfiniop import (
PanZezhongQY's avatar
PanZezhongQY committed
5
6
    infiniopHandle_t,
    infiniopTensorDescriptor_t,
7
8
9
    open_lib,
    to_tensor,
    get_test_devices,
PanZezhongQY's avatar
PanZezhongQY committed
10
    check_error,
11
    rearrange_if_needed,
PanZezhongQY's avatar
PanZezhongQY committed
12
    create_workspace,
13
14
15
    test_operator,
    get_args,
    debug,
xgqdut2016's avatar
xgqdut2016 committed
16
    get_tolerance,
17
    profile_operation,
xgqdut2016's avatar
xgqdut2016 committed
18
    synchronize_device,
PanZezhongQY's avatar
PanZezhongQY committed
19
)
PanZezhong's avatar
PanZezhong committed
20
from enum import Enum, auto
xgqdut2016's avatar
xgqdut2016 committed
21
22
23
24
25

# ==============================================================================
#  Configuration (Internal Use Only)
# ==============================================================================
# These are not meant to be imported from other modules
PanZezhong's avatar
PanZezhong committed
26
27
28
_TEST_CASES_ = [
    # (shape, x_strides, y_strides)
    ((1, 32, 128), None, None),
PanZezhong's avatar
PanZezhong committed
29
    ((10, 32, 64), None, None),
xgqdut2016's avatar
xgqdut2016 committed
30
31
    # 昇腾暂不满足这个用例,最后一维度 <=32 会有问题,可能与其核心
    # 接口 GatherMask 的内部实现相关,目前 48 64 128 都可以支持
PanZezhong's avatar
PanZezhong committed
32
33
34
    ((4, 1, 32), (64, 64, 1), None),
    ((11, 33, 128), None, (8000, 200, 1)),
    ((3, 32, 128), (8000, 200, 1), (7000, 128, 1)),
xgqdut2016's avatar
xgqdut2016 committed
35
36
37
]

# Data types used for testing
38
_TENSOR_DTYPES = [torch.float16, torch.bfloat16, torch.float32]
xgqdut2016's avatar
xgqdut2016 committed
39
40
41

# Tolerance map for different data types
_TOLERANCE_MAP = {
PanZezhong's avatar
PanZezhong committed
42
    torch.float16: {"atol": 1e-3, "rtol": 1e-2},
43
    torch.bfloat16: {"atol": 5e-3, "rtol": 5e-2},
PanZezhong's avatar
PanZezhong committed
44
    torch.float32: {"atol": 1e-4, "rtol": 1e-3},
xgqdut2016's avatar
xgqdut2016 committed
45
}
PanZezhongQY's avatar
PanZezhongQY committed
46

PanZezhong's avatar
PanZezhong committed
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63

class Inplace(Enum):
    OUT_OF_PLACE = auto()
    INPLACE_X = auto()


_INPLACE = [
    Inplace.OUT_OF_PLACE,
    Inplace.INPLACE_X,
]

_TEST_CASES = [
    test_case + (inplace_item,)
    for test_case in _TEST_CASES_
    for inplace_item in _INPLACE
]

64
65
66
67
68
DEBUG = False
PROFILE = False
NUM_PRERUN = 10
NUM_ITERATIONS = 1000

PanZezhongQY's avatar
PanZezhongQY committed
69
70
71
72
73
74
75
76

class RoPEDescriptor(Structure):
    _fields_ = [("device", c_int32)]


infiniopRoPEDescriptor_t = POINTER(RoPEDescriptor)


PanZezhong's avatar
PanZezhong committed
77
def rotary_embedding(t, sin, cos, torch_device):
PanZezhongQY's avatar
PanZezhongQY committed
78
    dh = t.shape[2]
PanZezhong's avatar
PanZezhong committed
79
    dt = t.dtype
80
    assert dh % 2 == 0, "Embedding dimension must be even."
PanZezhong's avatar
PanZezhong committed
81
82
83
84
85
86
87
88
89
90
91
    t_even = t[..., 0::2]  # [seq_len, n_head, dh // 2]
    t_odd = t[..., 1::2]  # [seq_len, n_head, dh // 2]
    cos = cos.unsqueeze(1)  # [seq_len, 1, dh // 2]
    sin = sin.unsqueeze(1)  # [seq_len, 1, dh // 2]
    if torch_device == "cpu":
        (t_even, t_odd, cos, sin) = (
            t_even.float(),
            t_odd.float(),
            cos.float(),
            sin.float(),
        )
92
93
94
95
96
97
98

    t_out_even = t_even * cos - t_odd * sin
    t_out_odd = t_even * sin + t_odd * cos

    t_out = torch.empty_like(t)
    t_out[..., 0::2] = t_out_even
    t_out[..., 1::2] = t_out_odd
99

PanZezhong's avatar
PanZezhong committed
100
    return t_out.to(dt).to(torch_device)
PanZezhongQY's avatar
PanZezhongQY committed
101

102

PanZezhong's avatar
PanZezhong committed
103
104
def sin_cos_table(pos, dim, torch_device, theta, dtype):
    assert dim % 2 == 0, "Embedding dimension must be even."
PanZezhongQY's avatar
PanZezhongQY committed
105
106
107
108
    freqs = (1.0 / (theta ** (torch.arange(0, dim, 2)[: (dim // 2)].float() / dim))).to(
        torch_device
    )
    angles = torch.outer(pos, freqs)
PanZezhong's avatar
PanZezhong committed
109
110
111
112
113
114
115
116
117
118
119
120
    return torch.sin(angles).to(dtype), torch.cos(angles).to(dtype)


def test(
    lib,
    handle,
    torch_device,
    shape,
    x_strides=None,
    y_strides=None,
    inplace=Inplace.OUT_OF_PLACE,
    dtype=torch.float32,
121
    sync=None,
PanZezhong's avatar
PanZezhong committed
122
123
124
):
    if inplace == Inplace.INPLACE_X:
        y_strides = x_strides
PanZezhongQY's avatar
PanZezhongQY committed
125
    print(
PanZezhong's avatar
PanZezhong committed
126
        f"Testing Rotary Positional Embedding on {torch_device} with shape:{shape} x_strides:{x_strides} y_strides:{y_strides} and dtype:{dtype} inplace:{inplace}"
PanZezhongQY's avatar
PanZezhongQY committed
127
128
    )

PanZezhong's avatar
PanZezhong committed
129
130
131
132
133
134
135
136
137
138
    x = torch.rand(shape, dtype=dtype).to(torch_device)
    x = rearrange_if_needed(x, x_strides)
    if inplace == Inplace.INPLACE_X:
        y = x
    else:
        y = torch.rand(shape, dtype=dtype).to(torch_device)
        y = rearrange_if_needed(y, y_strides)
    theta = 1e5
    pos = torch.arange(0, x.shape[0], dtype=torch.int32).to(torch_device)
    sin_table, cos_table = sin_cos_table(pos, x.shape[2], x.device, theta, dtype)
xgqdut2016's avatar
xgqdut2016 committed
139

PanZezhong's avatar
PanZezhong committed
140
    ans = rotary_embedding(x, sin_table, cos_table, torch_device)
PanZezhongQY's avatar
PanZezhongQY committed
141
142

    descriptor = infiniopRoPEDescriptor_t()
PanZezhong's avatar
PanZezhong committed
143
144
145
    x_tensor, pos_tensor, sin_table_tensor, cos_table_tensor = [
        to_tensor(tensor, lib, force_unsigned=True)
        for tensor in [x, pos, sin_table, cos_table]
xgqdut2016's avatar
xgqdut2016 committed
146
    ]
PanZezhong's avatar
PanZezhong committed
147
148
149
150
    if inplace == Inplace.INPLACE_X:
        y_tensor = x_tensor
    else:
        y_tensor = to_tensor(y, lib)
PanZezhongQY's avatar
PanZezhongQY committed
151

152
153
    if sync is not None:
        sync()
PanZezhongQY's avatar
PanZezhongQY committed
154
155
156
157

    check_error(
        lib.infiniopCreateRoPEDescriptor(
            handle,
158
            ctypes.byref(descriptor),
PanZezhong's avatar
PanZezhong committed
159
160
            y_tensor.descriptor,
            x_tensor.descriptor,
PanZezhongQY's avatar
PanZezhongQY committed
161
162
163
164
165
166
167
            pos_tensor.descriptor,
            sin_table_tensor.descriptor,
            cos_table_tensor.descriptor,
        )
    )

    # Invalidate the shape and strides in the descriptor to prevent them from being directly used by the kernel
PanZezhong's avatar
PanZezhong committed
168
169
    for tensor in [y_tensor, x_tensor, pos_tensor, sin_table_tensor, cos_table_tensor]:
        tensor.destroyDesc(lib)
PanZezhongQY's avatar
PanZezhongQY committed
170
171
172
173
174

    workspace_size = c_uint64(0)
    check_error(
        lib.infiniopGetRoPEWorkspaceSize(descriptor, ctypes.byref(workspace_size))
    )
PanZezhong's avatar
PanZezhong committed
175
    workspace = create_workspace(workspace_size.value, x.device)
176
177
178
179
180
181
182

    def lib_rope():
        check_error(
            lib.infiniopRoPE(
                descriptor,
                workspace.data_ptr() if workspace is not None else None,
                workspace_size.value,
PanZezhong's avatar
PanZezhong committed
183
184
                y_tensor.data,
                x_tensor.data,
185
186
187
188
189
                pos_tensor.data,
                sin_table_tensor.data,
                cos_table_tensor.data,
                None,
            )
PanZezhongQY's avatar
PanZezhongQY committed
190
191
        )

192
    lib_rope()
193

zhangyue's avatar
zhangyue committed
194
195
    if sync is not None:
        sync()
xgqdut2016's avatar
xgqdut2016 committed
196

xgqdut2016's avatar
xgqdut2016 committed
197
    atol, rtol = get_tolerance(_TOLERANCE_MAP, dtype)
198
    if DEBUG:
PanZezhong's avatar
PanZezhong committed
199
200
        debug(y, ans, atol=atol, rtol=rtol)
    assert torch.allclose(y, ans, atol=atol, rtol=rtol)
xgqdut2016's avatar
xgqdut2016 committed
201

202
203
204
    if PROFILE:
        profile_operation(
            "PyTorch",
205
            lambda: rotary_embedding(x, sin_table, cos_table, torch_device),
206
207
208
209
210
211
212
            torch_device,
            NUM_PRERUN,
            NUM_ITERATIONS,
        )
        profile_operation(
            "    lib", lambda: lib_rope(), torch_device, NUM_PRERUN, NUM_ITERATIONS
        )
PanZezhongQY's avatar
PanZezhongQY committed
213

214
    check_error(lib.infiniopDestroyRoPEDescriptor(descriptor))
PanZezhongQY's avatar
PanZezhongQY committed
215

216

PanZezhongQY's avatar
PanZezhongQY committed
217
218
219
if __name__ == "__main__":
    args = get_args()
    lib = open_lib()
xgqdut2016's avatar
xgqdut2016 committed
220

PanZezhongQY's avatar
PanZezhongQY committed
221
222
223
224
225
226
227
228
229
    lib.infiniopCreateRoPEDescriptor.restype = c_int32
    lib.infiniopCreateRoPEDescriptor.argtypes = [
        infiniopHandle_t,
        POINTER(infiniopRoPEDescriptor_t),
        infiniopTensorDescriptor_t,
        infiniopTensorDescriptor_t,
        infiniopTensorDescriptor_t,
        infiniopTensorDescriptor_t,
    ]
xgqdut2016's avatar
xgqdut2016 committed
230

PanZezhongQY's avatar
PanZezhongQY committed
231
232
233
234
235
    lib.infiniopGetRoPEWorkspaceSize.restype = c_int32
    lib.infiniopGetRoPEWorkspaceSize.argtypes = [
        infiniopRoPEDescriptor_t,
        POINTER(c_uint64),
    ]
xgqdut2016's avatar
xgqdut2016 committed
236

PanZezhongQY's avatar
PanZezhongQY committed
237
238
239
240
241
242
243
244
245
246
247
    lib.infiniopRoPE.restype = c_int32
    lib.infiniopRoPE.argtypes = [
        infiniopRoPEDescriptor_t,
        c_void_p,
        c_uint64,
        c_void_p,
        c_void_p,
        c_void_p,
        c_void_p,
        c_void_p,
    ]
xgqdut2016's avatar
xgqdut2016 committed
248

PanZezhongQY's avatar
PanZezhongQY committed
249
250
251
252
    lib.infiniopDestroyRoPEDescriptor.restype = c_int32
    lib.infiniopDestroyRoPEDescriptor.argtypes = [
        infiniopRoPEDescriptor_t,
    ]
xgqdut2016's avatar
xgqdut2016 committed
253

254
255
256
257
258
259
260
261
    # Configure testing options
    DEBUG = args.debug
    PROFILE = args.profile
    NUM_PRERUN = args.num_prerun
    NUM_ITERATIONS = args.num_iterations

    # Execute tests
    for device in get_test_devices(args):
xgqdut2016's avatar
xgqdut2016 committed
262
        test_operator(lib, device, test, _TEST_CASES, _TENSOR_DTYPES)
263

PanZezhongQY's avatar
PanZezhongQY committed
264
    print("\033[92mTest passed!\033[0m")