"third_party/vscode:/vscode.git/clone" did not exist on "ee631d6bb4d4e1a4a6273c14607f570118e08bca"
Unverified Commit db079567 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

Fix missing eps arg for LayerNorm in ElectraGeneratorPredictions (#15332)



* fix missing eps

* Same fix for ConvBertGeneratorPredictions

* Same fix for AlbertMLMHead
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent 297602c7
......@@ -856,7 +856,7 @@ class AlbertMLMHead(nn.Module):
def __init__(self, config):
super().__init__()
self.LayerNorm = nn.LayerNorm(config.embedding_size)
self.LayerNorm = nn.LayerNorm(config.embedding_size, eps=config.layer_norm_eps)
self.bias = nn.Parameter(torch.zeros(config.vocab_size))
self.dense = nn.Linear(config.hidden_size, config.embedding_size)
self.decoder = nn.Linear(config.embedding_size, config.vocab_size)
......
......@@ -865,7 +865,7 @@ class ConvBertGeneratorPredictions(nn.Module):
def __init__(self, config):
super().__init__()
self.LayerNorm = nn.LayerNorm(config.embedding_size)
self.LayerNorm = nn.LayerNorm(config.embedding_size, eps=config.layer_norm_eps)
self.dense = nn.Linear(config.hidden_size, config.embedding_size)
def forward(self, generator_hidden_states):
......
......@@ -647,7 +647,7 @@ class ElectraGeneratorPredictions(nn.Module):
def __init__(self, config):
super().__init__()
self.LayerNorm = nn.LayerNorm(config.embedding_size)
self.LayerNorm = nn.LayerNorm(config.embedding_size, eps=config.layer_norm_eps)
self.dense = nn.Linear(config.hidden_size, config.embedding_size)
def forward(self, generator_hidden_states):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment