Skip to content

Layer Normalization

LayerNormalization

Bases: Module

Source code in src/transformer/modules/layer_norm.py
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
class LayerNormalization(nn.Module):

    def __init__(self, eps: float = 10e-6):
        """Layer normalization

        Args:
            eps: a small epsilon for numerical stability
        """
        super().__init__()
        self.eps = eps
        self.alpha = nn.Parameter(torch.ones(1))
        self.bias = nn.Parameter(torch.zeros(1))

    def forward(self, x):
        mean = x.mean(dim = -1, keepdim = True)
        std = x.std(dim = -1, keepdim = True)
        return self.alpha * (x - mean) / (std + self.eps) + self.bias

__init__(eps=1e-05)

Layer normalization

Parameters:

Name Type Description Default
eps float

a small epsilon for numerical stability

1e-05
Source code in src/transformer/modules/layer_norm.py
 7
 8
 9
10
11
12
13
14
15
16
def __init__(self, eps: float = 10e-6):
    """Layer normalization

    Args:
        eps: a small epsilon for numerical stability
    """
    super().__init__()
    self.eps = eps
    self.alpha = nn.Parameter(torch.ones(1))
    self.bias = nn.Parameter(torch.zeros(1))