Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
4a545d18
"...linfusion_pytorch.git" did not exist on "dc3b93c93f74ce816aa1a893d91549ec8fd9e585"
Unverified
Commit
4a545d18
authored
Mar 06, 2023
by
Karim Foda
Committed by
GitHub
Mar 06, 2023
Browse files
Fix gradient checkpointing bug in BlipText (#21978)
Make Format
parent
451263b8
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
6 additions
and
5 deletions
+6
-5
src/transformers/models/blip/modeling_blip_text.py
src/transformers/models/blip/modeling_blip_text.py
+6
-5
No files found.
src/transformers/models/blip/modeling_blip_text.py
View file @
4a545d18
...
@@ -393,6 +393,12 @@ class BlipTextEncoder(nn.Module):
...
@@ -393,6 +393,12 @@ class BlipTextEncoder(nn.Module):
output_hidden_states
=
False
,
output_hidden_states
=
False
,
return_dict
=
True
,
return_dict
=
True
,
):
):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
all_hidden_states
=
()
if
output_hidden_states
else
None
all_hidden_states
=
()
if
output_hidden_states
else
None
all_self_attentions
=
()
if
output_attentions
else
None
all_self_attentions
=
()
if
output_attentions
else
None
all_cross_attentions
=
()
if
output_attentions
and
self
.
config
.
is_decoder
else
None
all_cross_attentions
=
()
if
output_attentions
and
self
.
config
.
is_decoder
else
None
...
@@ -408,11 +414,6 @@ class BlipTextEncoder(nn.Module):
...
@@ -408,11 +414,6 @@ class BlipTextEncoder(nn.Module):
past_key_value
=
past_key_values
[
i
]
if
past_key_values
is
not
None
else
None
past_key_value
=
past_key_values
[
i
]
if
past_key_values
is
not
None
else
None
if
self
.
gradient_checkpointing
and
self
.
training
:
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
def
create_custom_forward
(
module
):
def
create_custom_forward
(
module
):
def
custom_forward
(
*
inputs
):
def
custom_forward
(
*
inputs
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment