test_activation.py 1.64 KB
Newer Older
1
2
3
4
5
6
import itertools
import unittest

import sgl_kernel
import torch
import torch.nn.functional as F
7
from utils import GeluAndMul, SiluAndMul, precision
8
9
10

from sglang.test.test_utils import CustomTestCase

11
torch.manual_seed(1234)
12

13
14
15
16
17
18

class TestActivation(CustomTestCase):
    M = [128, 129, 257]
    N = [22016, 22018]
    dtype = [torch.float16, torch.bfloat16]

19
    def _silu_and_mul_test(self, m, n, dtype):
20
21
22
23
24
25
        x = torch.randn([m, n], dtype=dtype)

        out = torch.ops.sgl_kernel.silu_and_mul_cpu(x)
        ref_out = SiluAndMul(x)

        atol = rtol = precision[ref_out.dtype]
26
        torch.testing.assert_close(ref_out, out, atol=atol, rtol=rtol)
27

28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
    def _gelu_and_mul_test(self, m, n, dtype):
        x = torch.randn([m, n], dtype=dtype)

        out = torch.ops.sgl_kernel.gelu_and_mul_cpu(x)
        ref_out = GeluAndMul(x, approximate="none")

        atol = rtol = precision[ref_out.dtype]
        torch.testing.assert_close(ref_out, out, atol=atol, rtol=rtol)

    def _gelu_tanh_and_mul_test(self, m, n, dtype):
        x = torch.randn([m, n], dtype=dtype)

        out = torch.ops.sgl_kernel.gelu_tanh_and_mul_cpu(x)
        ref_out = GeluAndMul(x, approximate="tanh")

        atol = rtol = precision[ref_out.dtype]
        torch.testing.assert_close(ref_out, out, atol=atol, rtol=rtol)

46
47
48
    def test_activation(self):
        for params in itertools.product(self.M, self.N, self.dtype):
            with self.subTest(m=params[0], n=params[1], dtype=params[2]):
49
50
51
                self._silu_and_mul_test(*params)
                self._gelu_and_mul_test(*params)
                self._gelu_tanh_and_mul_test(*params)
52
53
54
55


if __name__ == "__main__":
    unittest.main()