Unverified Commit 0067da9f authored by Gustaf Ahdritz's avatar Gustaf Ahdritz Committed by GitHub
Browse files

Remove tiny epsilon from default optimizer

parent c392b21c
......@@ -89,7 +89,7 @@ class OpenFoldWrapper(pl.LightningModule):
def configure_optimizers(self,
learning_rate: float = 1e-3,
eps: float = 1e-8
eps: float = 1e-5,
) -> torch.optim.Adam:
# Ignored as long as a DeepSpeed optimizer is configured
return torch.optim.Adam(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment