Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
c1db6a3b
"tests/test_modeling_tf_rembert.py" did not exist on "c8d3fa0dfd191c0272f8de5027430e2fc789b22c"
Unverified
Commit
c1db6a3b
authored
Mar 13, 2023
by
Karim Foda
Committed by
GitHub
Mar 13, 2023
Browse files
Fix gradient checkpointing bug in xmod (#22129)
parent
6652e7da
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
6 additions
and
5 deletions
+6
-5
src/transformers/models/xmod/modeling_xmod.py
src/transformers/models/xmod/modeling_xmod.py
+6
-5
No files found.
src/transformers/models/xmod/modeling_xmod.py
View file @
c1db6a3b
...
@@ -552,6 +552,12 @@ class XmodEncoder(nn.Module):
...
@@ -552,6 +552,12 @@ class XmodEncoder(nn.Module):
output_hidden_states
:
Optional
[
bool
]
=
False
,
output_hidden_states
:
Optional
[
bool
]
=
False
,
return_dict
:
Optional
[
bool
]
=
True
,
return_dict
:
Optional
[
bool
]
=
True
,
)
->
Union
[
Tuple
[
torch
.
Tensor
],
BaseModelOutputWithPastAndCrossAttentions
]:
)
->
Union
[
Tuple
[
torch
.
Tensor
],
BaseModelOutputWithPastAndCrossAttentions
]:
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
all_hidden_states
=
()
if
output_hidden_states
else
None
all_hidden_states
=
()
if
output_hidden_states
else
None
all_self_attentions
=
()
if
output_attentions
else
None
all_self_attentions
=
()
if
output_attentions
else
None
all_cross_attentions
=
()
if
output_attentions
and
self
.
config
.
add_cross_attention
else
None
all_cross_attentions
=
()
if
output_attentions
and
self
.
config
.
add_cross_attention
else
None
...
@@ -565,11 +571,6 @@ class XmodEncoder(nn.Module):
...
@@ -565,11 +571,6 @@ class XmodEncoder(nn.Module):
past_key_value
=
past_key_values
[
i
]
if
past_key_values
is
not
None
else
None
past_key_value
=
past_key_values
[
i
]
if
past_key_values
is
not
None
else
None
if
self
.
gradient_checkpointing
and
self
.
training
:
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
def
create_custom_forward
(
module
):
def
create_custom_forward
(
module
):
def
custom_forward
(
*
inputs
):
def
custom_forward
(
*
inputs
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment