Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
a163c9ca
".github/vscode:/vscode.git/clone" did not exist on "add415124f8c6814763125a0d98411bf61222e30"
Unverified
Commit
a163c9ca
authored
May 26, 2020
by
ZhuBaohe
Committed by
GitHub
May 26, 2020
Browse files
[T5] Fix Cross Attention position bias (#4499)
* fix * fix1
parent
1d690289
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
2 additions
and
2 deletions
+2
-2
src/transformers/modeling_t5.py
src/transformers/modeling_t5.py
+1
-1
src/transformers/modeling_tf_t5.py
src/transformers/modeling_tf_t5.py
+1
-1
No files found.
src/transformers/modeling_t5.py
View file @
a163c9ca
...
@@ -745,7 +745,7 @@ class T5Stack(T5PreTrainedModel):
...
@@ -745,7 +745,7 @@ class T5Stack(T5PreTrainedModel):
# layer_outputs = hidden-states, key-value-states (self-attention weights), (self-attention position bias), (cross-attention weights), (cross-attention position bias)
# layer_outputs = hidden-states, key-value-states (self-attention weights), (self-attention position bias), (cross-attention weights), (cross-attention position bias)
position_bias
=
layer_outputs
[
3
if
self
.
output_attentions
else
2
]
position_bias
=
layer_outputs
[
3
if
self
.
output_attentions
else
2
]
if
self
.
is_decoder
and
encoder_hidden_states
is
not
None
:
if
self
.
is_decoder
and
encoder_hidden_states
is
not
None
:
encoder_decoder_position_bias
=
layer_outputs
[
4
if
self
.
output_attentions
else
3
]
encoder_decoder_position_bias
=
layer_outputs
[
5
if
self
.
output_attentions
else
3
]
# append next layer key value states
# append next layer key value states
present_key_value_states
=
present_key_value_states
+
(
present_key_value_state
,)
present_key_value_states
=
present_key_value_states
+
(
present_key_value_state
,)
...
...
src/transformers/modeling_tf_t5.py
View file @
a163c9ca
...
@@ -682,7 +682,7 @@ class TFT5MainLayer(tf.keras.layers.Layer):
...
@@ -682,7 +682,7 @@ class TFT5MainLayer(tf.keras.layers.Layer):
# layer_outputs = hidden-states, (self-attention weights), (self-attention position bias), (cross-attention weights), (cross-attention position bias)
# layer_outputs = hidden-states, (self-attention weights), (self-attention position bias), (cross-attention weights), (cross-attention position bias)
position_bias
=
layer_outputs
[
3
if
self
.
output_attentions
else
2
]
position_bias
=
layer_outputs
[
3
if
self
.
output_attentions
else
2
]
if
self
.
is_decoder
and
encoder_hidden_states
is
not
None
:
if
self
.
is_decoder
and
encoder_hidden_states
is
not
None
:
encoder_decoder_position_bias
=
layer_outputs
[
4
if
self
.
output_attentions
else
3
]
encoder_decoder_position_bias
=
layer_outputs
[
5
if
self
.
output_attentions
else
3
]
# append next layer key value states
# append next layer key value states
present_key_value_states
=
present_key_value_states
+
(
present_key_value_state
,)
present_key_value_states
=
present_key_value_states
+
(
present_key_value_state
,)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment