"...git@developer.sourcefind.cn:chenpangpang/open-webui.git" did not exist on "2ce6535a2d276b74005c8056741bdb5b3e663940"
Unverified Commit 4c14c1f4 authored by Karim Foda's avatar Karim Foda Committed by GitHub
Browse files

Fix gradient checkpointing bug in Trajectory Transformer (#22125)

parent d0876a09
...@@ -533,6 +533,13 @@ class TrajectoryTransformerModel(TrajectoryTransformerPreTrainedModel): ...@@ -533,6 +533,13 @@ class TrajectoryTransformerModel(TrajectoryTransformerPreTrainedModel):
hidden_states = self.drop(token_embeddings + position_embeddings) hidden_states = self.drop(token_embeddings + position_embeddings)
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
presents = () if use_cache else None presents = () if use_cache else None
all_self_attentions = () if output_attentions else None all_self_attentions = () if output_attentions else None
all_hidden_states = () if output_hidden_states else None all_hidden_states = () if output_hidden_states else None
...@@ -542,11 +549,6 @@ class TrajectoryTransformerModel(TrajectoryTransformerPreTrainedModel): ...@@ -542,11 +549,6 @@ class TrajectoryTransformerModel(TrajectoryTransformerPreTrainedModel):
all_hidden_states = all_hidden_states + (hidden_states,) all_hidden_states = all_hidden_states + (hidden_states,)
if self.gradient_checkpointing and self.training: if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
def create_custom_forward(module): def create_custom_forward(module):
def custom_forward(*inputs): def custom_forward(*inputs):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment