"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "4f24058c58ed9fcde0d9e5629e66c5500f67c7c8"
Unverified Commit db079567 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

Fix missing eps arg for LayerNorm in ElectraGeneratorPredictions (#15332)



* fix missing eps

* Same fix for ConvBertGeneratorPredictions

* Same fix for AlbertMLMHead
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent 297602c7
......@@ -856,7 +856,7 @@ class AlbertMLMHead(nn.Module):
def __init__(self, config):
super().__init__()
self.LayerNorm = nn.LayerNorm(config.embedding_size)
self.LayerNorm = nn.LayerNorm(config.embedding_size, eps=config.layer_norm_eps)
self.bias = nn.Parameter(torch.zeros(config.vocab_size))
self.dense = nn.Linear(config.hidden_size, config.embedding_size)
self.decoder = nn.Linear(config.embedding_size, config.vocab_size)
......
......@@ -865,7 +865,7 @@ class ConvBertGeneratorPredictions(nn.Module):
def __init__(self, config):
super().__init__()
self.LayerNorm = nn.LayerNorm(config.embedding_size)
self.LayerNorm = nn.LayerNorm(config.embedding_size, eps=config.layer_norm_eps)
self.dense = nn.Linear(config.hidden_size, config.embedding_size)
def forward(self, generator_hidden_states):
......
......@@ -647,7 +647,7 @@ class ElectraGeneratorPredictions(nn.Module):
def __init__(self, config):
super().__init__()
self.LayerNorm = nn.LayerNorm(config.embedding_size)
self.LayerNorm = nn.LayerNorm(config.embedding_size, eps=config.layer_norm_eps)
self.dense = nn.Linear(config.hidden_size, config.embedding_size)
def forward(self, generator_hidden_states):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment