Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
f12c74f5
"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "0ba94aceb6e1ab448e0acc896764a4496759cb14"
Unverified
Commit
f12c74f5
authored
Mar 04, 2023
by
Karim Foda
Committed by
GitHub
Mar 04, 2023
Browse files
Fix gradient checkpointing bug in Pegasus (#21944)
parent
f932ee61
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
14 additions
and
10 deletions
+14
-10
src/transformers/models/pegasus/modeling_pegasus.py
src/transformers/models/pegasus/modeling_pegasus.py
+7
-5
src/transformers/models/pegasus_x/modeling_pegasus_x.py
src/transformers/models/pegasus_x/modeling_pegasus_x.py
+7
-5
No files found.
src/transformers/models/pegasus/modeling_pegasus.py
View file @
f12c74f5
...
@@ -1044,6 +1044,13 @@ class PegasusDecoder(PegasusPreTrainedModel):
...
@@ -1044,6 +1044,13 @@ class PegasusDecoder(PegasusPreTrainedModel):
hidden_states
=
nn
.
functional
.
dropout
(
hidden_states
,
p
=
self
.
dropout
,
training
=
self
.
training
)
hidden_states
=
nn
.
functional
.
dropout
(
hidden_states
,
p
=
self
.
dropout
,
training
=
self
.
training
)
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
# decoder layers
# decoder layers
all_hidden_states
=
()
if
output_hidden_states
else
None
all_hidden_states
=
()
if
output_hidden_states
else
None
all_self_attns
=
()
if
output_attentions
else
None
all_self_attns
=
()
if
output_attentions
else
None
...
@@ -1069,11 +1076,6 @@ class PegasusDecoder(PegasusPreTrainedModel):
...
@@ -1069,11 +1076,6 @@ class PegasusDecoder(PegasusPreTrainedModel):
past_key_value
=
past_key_values
[
idx
]
if
past_key_values
is
not
None
else
None
past_key_value
=
past_key_values
[
idx
]
if
past_key_values
is
not
None
else
None
if
self
.
gradient_checkpointing
and
self
.
training
:
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
def
create_custom_forward
(
module
):
def
create_custom_forward
(
module
):
def
custom_forward
(
*
inputs
):
def
custom_forward
(
*
inputs
):
...
...
src/transformers/models/pegasus_x/modeling_pegasus_x.py
View file @
f12c74f5
...
@@ -1293,6 +1293,13 @@ class PegasusXDecoder(PegasusXPreTrainedModel):
...
@@ -1293,6 +1293,13 @@ class PegasusXDecoder(PegasusXPreTrainedModel):
hidden_states
=
nn
.
functional
.
dropout
(
hidden_states
,
p
=
self
.
dropout
,
training
=
self
.
training
)
hidden_states
=
nn
.
functional
.
dropout
(
hidden_states
,
p
=
self
.
dropout
,
training
=
self
.
training
)
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
# decoder layers
# decoder layers
all_hidden_states
=
()
if
output_hidden_states
else
None
all_hidden_states
=
()
if
output_hidden_states
else
None
all_self_attns
=
()
if
output_attentions
else
None
all_self_attns
=
()
if
output_attentions
else
None
...
@@ -1310,11 +1317,6 @@ class PegasusXDecoder(PegasusXPreTrainedModel):
...
@@ -1310,11 +1317,6 @@ class PegasusXDecoder(PegasusXPreTrainedModel):
past_key_value
=
past_key_values
[
idx
]
if
past_key_values
is
not
None
else
None
past_key_value
=
past_key_values
[
idx
]
if
past_key_values
is
not
None
else
None
if
self
.
gradient_checkpointing
and
self
.
training
:
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
def
create_custom_forward
(
module
):
def
create_custom_forward
(
module
):
def
custom_forward
(
*
inputs
):
def
custom_forward
(
*
inputs
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment