activation.py 1.14 KB
Newer Older
1
"""Custom activation functions."""
Woosuk Kwon's avatar
Woosuk Kwon committed
2
3
4
import torch
import torch.nn as nn

Woosuk Kwon's avatar
Woosuk Kwon committed
5
from vllm import activation_ops
Woosuk Kwon's avatar
Woosuk Kwon committed
6

Woosuk Kwon's avatar
Woosuk Kwon committed
7
8
_ACTIVATION_REGISTRY = {
    "gelu": nn.GELU(),
9
10
11
12
    # NOTE: The following GELU functions may introduce small rounding errors.
    "gelu_new": nn.GELU(approximate="tanh"),
    "gelu_fast": nn.GELU(approximate="tanh"),
    "gelu_pytorch_tanh": nn.GELU(approximate="tanh"),
Woosuk Kwon's avatar
Woosuk Kwon committed
13
14
15
16
17
18
19
20
21
22
23
    "relu": nn.ReLU(),
}


def get_act_fn(act_fn: str) -> nn.Module:
    """Get an activation function by name."""
    act_fn = act_fn.lower()
    if act_fn in _ACTIVATION_REGISTRY:
        return _ACTIVATION_REGISTRY[act_fn]
    raise ValueError(f"Activation function {act_fn!r} is not supported.")

Woosuk Kwon's avatar
Woosuk Kwon committed
24
25

class SiluAndMul(nn.Module):
26
27
28
    """An activation function for SwiGLU.

    The function computes x -> silu(x[:d]) * x[d:] where d = x.shape[1] // 2.
Woosuk Kwon's avatar
Woosuk Kwon committed
29

30
31
32
33
    Shapes:
        x: (num_tokens, 2 * d)
        return: (num_tokens, d)
    """
Woosuk Kwon's avatar
Woosuk Kwon committed
34

35
    def forward(self, x: torch.Tensor) -> torch.Tensor:
Woosuk Kwon's avatar
Woosuk Kwon committed
36
37
38
39
40
        num_tokens = x.shape[0]
        d = x.shape[1] // 2
        out = torch.empty(num_tokens, d, dtype=x.dtype, device=x.device)
        activation_ops.silu_and_mul(out, x)
        return out