Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
71688a88
"next_docs/git@developer.sourcefind.cn:wangsen/mineru.git" did not exist on "99d5c022c4919322868cd2ffb59d4c208ab5e8d6"
Unverified
Commit
71688a88
authored
Dec 04, 2020
by
Lysandre Debut
Committed by
GitHub
Dec 04, 2020
Browse files
Fix TF T5 only encoder model with booleans (#8925)
parent
dcd3046f
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
6 additions
and
19 deletions
+6
-19
src/transformers/models/t5/modeling_tf_t5.py
src/transformers/models/t5/modeling_tf_t5.py
+6
-19
No files found.
src/transformers/models/t5/modeling_tf_t5.py
View file @
71688a88
...
@@ -1336,14 +1336,6 @@ class TFT5ForConditionalGeneration(TFT5PreTrainedModel, TFCausalLanguageModeling
...
@@ -1336,14 +1336,6 @@ class TFT5ForConditionalGeneration(TFT5PreTrainedModel, TFCausalLanguageModeling
output
=
(
logits
,)
+
decoder_outputs
[
1
:]
+
inputs
[
"encoder_outputs"
]
output
=
(
logits
,)
+
decoder_outputs
[
1
:]
+
inputs
[
"encoder_outputs"
]
return
((
loss
,)
+
output
)
if
loss
is
not
None
else
output
return
((
loss
,)
+
output
)
if
loss
is
not
None
else
output
# Putting this before breaks tf compilation.
output_attentions
=
(
output_attentions
if
inputs
[
"output_attentions"
]
is
not
None
else
self
.
config
.
output_attentions
)
output_hidden_states
=
(
output_hidden_states
if
inputs
[
"output_hidden_states"
]
is
not
None
else
self
.
config
.
output_hidden_states
)
# This is long and annoying but if we introduce return_dict at the TFT5MainLayer level (like in PyTorch)
# This is long and annoying but if we introduce return_dict at the TFT5MainLayer level (like in PyTorch)
# TF refuses to compile anymore.
# TF refuses to compile anymore.
if
not
cast_bool_to_primitive
(
inputs
[
"use_cache"
],
self
.
config
.
use_cache
):
if
not
cast_bool_to_primitive
(
inputs
[
"use_cache"
],
self
.
config
.
use_cache
):
...
@@ -1481,6 +1473,7 @@ class TFT5EncoderModel(TFT5PreTrainedModel):
...
@@ -1481,6 +1473,7 @@ class TFT5EncoderModel(TFT5PreTrainedModel):
"""
"""
inputs
=
input_processing
(
inputs
=
input_processing
(
func
=
self
.
call
,
func
=
self
.
call
,
config
=
self
.
config
,
input_ids
=
input_ids
,
input_ids
=
input_ids
,
attention_mask
=
attention_mask
,
attention_mask
=
attention_mask
,
head_mask
=
head_mask
,
head_mask
=
head_mask
,
...
@@ -1492,12 +1485,6 @@ class TFT5EncoderModel(TFT5PreTrainedModel):
...
@@ -1492,12 +1485,6 @@ class TFT5EncoderModel(TFT5PreTrainedModel):
kwargs_call
=
kwargs
,
kwargs_call
=
kwargs
,
)
)
output_attentions
=
inputs
[
"output_attentions"
]
if
output_attentions
else
self
.
config
.
output_attentions
output_hidden_states
=
(
inputs
[
"output_hidden_states"
]
if
output_hidden_states
else
self
.
config
.
output_hidden_states
)
return_dict
=
return_dict
if
inputs
[
"return_dict"
]
is
not
None
else
self
.
config
.
return_dict
encoder_outputs
=
self
.
encoder
(
encoder_outputs
=
self
.
encoder
(
input_ids
,
input_ids
,
attention_mask
=
inputs
[
"attention_mask"
],
attention_mask
=
inputs
[
"attention_mask"
],
...
@@ -1507,17 +1494,17 @@ class TFT5EncoderModel(TFT5PreTrainedModel):
...
@@ -1507,17 +1494,17 @@ class TFT5EncoderModel(TFT5PreTrainedModel):
head_mask
=
head_mask
,
head_mask
=
head_mask
,
past_key_values
=
None
,
past_key_values
=
None
,
use_cache
=
False
,
use_cache
=
False
,
output_attentions
=
output_attentions
,
output_attentions
=
inputs
[
"
output_attentions
"
]
,
output_hidden_states
=
output_hidden_states
,
output_hidden_states
=
inputs
[
"
output_hidden_states
"
]
,
training
=
inputs
[
"training"
],
training
=
inputs
[
"training"
],
)
)
if
not
return_dict
:
if
not
inputs
[
"
return_dict
"
]
:
return
encoder_outputs
return
encoder_outputs
if
not
cast_bool_to_primitive
(
output_hidden_states
,
self
.
config
.
output_hidden_states
):
if
not
cast_bool_to_primitive
(
inputs
[
"
output_hidden_states
"
]
,
self
.
config
.
output_hidden_states
):
encoder_outputs
=
encoder_outputs
[:
1
]
+
(
None
,)
+
encoder_outputs
[
1
:]
encoder_outputs
=
encoder_outputs
[:
1
]
+
(
None
,)
+
encoder_outputs
[
1
:]
if
not
cast_bool_to_primitive
(
output_attentions
,
self
.
config
.
output_attentions
):
if
not
cast_bool_to_primitive
(
inputs
[
"
output_attentions
"
]
,
self
.
config
.
output_attentions
):
encoder_outputs
=
encoder_outputs
+
(
None
,)
encoder_outputs
=
encoder_outputs
+
(
None
,)
return
TFBaseModelOutput
(
return
TFBaseModelOutput
(
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment