"...git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "47a551d17b6ed2eaf03301f049006d559fca5cf3"
Unverified Commit 0ce5236d authored by Karim Foda's avatar Karim Foda Committed by GitHub
Browse files

Fix gradient checkpointing bug in ESM (#21980)

parent de496ef0
......@@ -583,6 +583,13 @@ class EsmEncoder(nn.Module):
output_hidden_states=False,
return_dict=True,
):
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
use_cache = False
all_hidden_states = () if output_hidden_states else None
all_self_attentions = () if output_attentions else None
all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None
......@@ -596,12 +603,6 @@ class EsmEncoder(nn.Module):
past_key_value = past_key_values[i] if past_key_values is not None else None
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment