Commit 86a1f756 authored by Gustaf Ahdritz's avatar Gustaf Ahdritz
Browse files

Fix default in LR scheduler

parent 3dcc01a7
...@@ -17,7 +17,7 @@ class AlphaFoldLRScheduler(torch.optim.lr_scheduler._LRScheduler): ...@@ -17,7 +17,7 @@ class AlphaFoldLRScheduler(torch.optim.lr_scheduler._LRScheduler):
base_lr: float = 0., base_lr: float = 0.,
max_lr: float = 0.001, max_lr: float = 0.001,
warmup_no_steps: int = 1000, warmup_no_steps: int = 1000,
start_decay_after_n_steps: int = 10000, start_decay_after_n_steps: int = 50000,
decay_every_n_steps: int = 50000, decay_every_n_steps: int = 50000,
decay_factor: float = 0.95, decay_factor: float = 0.95,
): ):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment