Commit ceb95303 authored by Gustaf Ahdritz's avatar Gustaf Ahdritz
Browse files

Remove removed function call

parent 82e02065
......@@ -443,7 +443,6 @@ class AlphaFold(nn.Module):
is_final_iter = cycle_no == (num_iters - 1)
with torch.set_grad_enabled(is_grad_enabled and is_final_iter):
if is_final_iter:
self._enable_activation_checkpointing()
# Sidestep AMP bug (PyTorch issue #65766)
if torch.is_autocast_enabled():
torch.clear_autocast_cache()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment