Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
72e9ca75
Unverified
Commit
72e9ca75
authored
Mar 01, 2023
by
saswatmeher
Committed by
GitHub
Mar 01, 2023
Browse files
Fix gradient checkpointing bug Bart (#21866)
Co-authored-by:
saswatmeher
<
saswatmeher@cse.iitb.ac.in
>
parent
5e6cd51b
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
14 additions
and
10 deletions
+14
-10
src/transformers/models/bart/modeling_bart.py
src/transformers/models/bart/modeling_bart.py
+7
-5
src/transformers/models/plbart/modeling_plbart.py
src/transformers/models/plbart/modeling_plbart.py
+7
-5
No files found.
src/transformers/models/bart/modeling_bart.py
View file @
72e9ca75
...
@@ -1058,6 +1058,13 @@ class BartDecoder(BartPretrainedModel):
...
@@ -1058,6 +1058,13 @@ class BartDecoder(BartPretrainedModel):
hidden_states
=
nn
.
functional
.
dropout
(
hidden_states
,
p
=
self
.
dropout
,
training
=
self
.
training
)
hidden_states
=
nn
.
functional
.
dropout
(
hidden_states
,
p
=
self
.
dropout
,
training
=
self
.
training
)
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
# decoder layers
# decoder layers
all_hidden_states
=
()
if
output_hidden_states
else
None
all_hidden_states
=
()
if
output_hidden_states
else
None
all_self_attns
=
()
if
output_attentions
else
None
all_self_attns
=
()
if
output_attentions
else
None
...
@@ -1084,11 +1091,6 @@ class BartDecoder(BartPretrainedModel):
...
@@ -1084,11 +1091,6 @@ class BartDecoder(BartPretrainedModel):
past_key_value
=
past_key_values
[
idx
]
if
past_key_values
is
not
None
else
None
past_key_value
=
past_key_values
[
idx
]
if
past_key_values
is
not
None
else
None
if
self
.
gradient_checkpointing
and
self
.
training
:
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
def
create_custom_forward
(
module
):
def
create_custom_forward
(
module
):
def
custom_forward
(
*
inputs
):
def
custom_forward
(
*
inputs
):
...
...
src/transformers/models/plbart/modeling_plbart.py
View file @
72e9ca75
...
@@ -1021,6 +1021,13 @@ class PLBartDecoder(PLBartPreTrainedModel):
...
@@ -1021,6 +1021,13 @@ class PLBartDecoder(PLBartPreTrainedModel):
hidden_states
=
nn
.
functional
.
dropout
(
hidden_states
,
p
=
self
.
dropout
,
training
=
self
.
training
)
hidden_states
=
nn
.
functional
.
dropout
(
hidden_states
,
p
=
self
.
dropout
,
training
=
self
.
training
)
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
# decoder layers
# decoder layers
all_hidden_states
=
()
if
output_hidden_states
else
None
all_hidden_states
=
()
if
output_hidden_states
else
None
all_self_attns
=
()
if
output_attentions
else
None
all_self_attns
=
()
if
output_attentions
else
None
...
@@ -1047,11 +1054,6 @@ class PLBartDecoder(PLBartPreTrainedModel):
...
@@ -1047,11 +1054,6 @@ class PLBartDecoder(PLBartPreTrainedModel):
past_key_value
=
past_key_values
[
idx
]
if
past_key_values
is
not
None
else
None
past_key_value
=
past_key_values
[
idx
]
if
past_key_values
is
not
None
else
None
if
self
.
gradient_checkpointing
and
self
.
training
:
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
def
create_custom_forward
(
module
):
def
create_custom_forward
(
module
):
def
custom_forward
(
*
inputs
):
def
custom_forward
(
*
inputs
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment