layernorm.py 778 Bytes
Newer Older
1
"""Custom normalization layers."""
2
3
4
5
6
7
8
import torch
import torch.nn as nn

from cacheflow import layernorm_ops


class RMSNorm(nn.Module):
9
10
11
12
13
    """Root mean square normalization.

    Computes x -> w * x / sqrt(E[x^2] + eps) where w is the learned weight.
    Refer to https://arxiv.org/abs/1910.07467
    """
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32

    def __init__(
        self,
        hidden_size: int,
        eps: float = 1e-6,
    ) -> None:
        super().__init__()
        self.weight = nn.Parameter(torch.ones(hidden_size))
        self.variance_epsilon = eps

    def forward(self, x: torch.Tensor) -> torch.Tensor:
        out = torch.empty_like(x)
        layernorm_ops.rms_norm(
            out,
            x,
            self.weight.data,
            self.variance_epsilon,
        )
        return out