"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "3a134f7c67c0eb4ac6887050964c9b6285651df8"
Unverified Commit de496ef0 authored by Karim Foda's avatar Karim Foda Committed by GitHub
Browse files

Fix gradient checkpointing bug in Codegen (#21979)

parent 4a545d18
......@@ -539,6 +539,14 @@ class CodeGenModel(CodeGenPreTrainedModel):
output_shape = input_shape + (hidden_states.size(-1),)
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
use_cache = False
presents = () if use_cache else None
all_self_attentions = () if output_attentions else None
all_hidden_states = () if output_hidden_states else None
......@@ -547,12 +555,6 @@ class CodeGenModel(CodeGenPreTrainedModel):
all_hidden_states = all_hidden_states + (hidden_states,)
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment