self._warned_for_t_total_at_progress=-1ifschedule=="warmup_linear"elsefloat("inf")# warning is not active with other schedules (since it doesn't break them)
self._warned_for_t_total_at_progress=-1ifschedule=="warmup_linear"elsefloat("inf")# warning is not active with other schedules (since it doesn't break them)
defget_lr(self):
defget_lr(self):
lr=[]
lr=[]
...
@@ -100,6 +98,8 @@ class OpenAIAdam(Optimizer):
...
@@ -100,6 +98,8 @@ class OpenAIAdam(Optimizer):
ifclosureisnotNone:
ifclosureisnotNone:
loss=closure()
loss=closure()
warned_for_t_total=False
forgroupinself.param_groups:
forgroupinself.param_groups:
forpingroup['params']:
forpingroup['params']:
ifp.gradisNone:
ifp.gradisNone:
...
@@ -140,11 +140,11 @@ class OpenAIAdam(Optimizer):
...
@@ -140,11 +140,11 @@ class OpenAIAdam(Optimizer):