Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
d8d53ba8
Commit
d8d53ba8
authored
May 05, 2022
by
Scott Zhu
Committed by
A. Unique TensorFlower
May 05, 2022
Browse files
Prepare for upcoming keras initializer change.
PiperOrigin-RevId: 446878547
parent
2600f792
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
5 additions
and
3 deletions
+5
-3
official/legacy/transformer/attention_layer.py
official/legacy/transformer/attention_layer.py
+5
-3
No files found.
official/legacy/transformer/attention_layer.py
View file @
d8d53ba8
...
@@ -17,6 +17,8 @@ import math
...
@@ -17,6 +17,8 @@ import math
import
tensorflow
as
tf
import
tensorflow
as
tf
from
official.modeling
import
tf_utils
class
Attention
(
tf
.
keras
.
layers
.
Layer
):
class
Attention
(
tf
.
keras
.
layers
.
Layer
):
"""Multi-headed attention layer."""
"""Multi-headed attention layer."""
...
@@ -53,19 +55,19 @@ class Attention(tf.keras.layers.Layer):
...
@@ -53,19 +55,19 @@ class Attention(tf.keras.layers.Layer):
self
.
query_dense_layer
=
tf
.
keras
.
layers
.
experimental
.
EinsumDense
(
self
.
query_dense_layer
=
tf
.
keras
.
layers
.
experimental
.
EinsumDense
(
"BTE,ENH->BTNH"
,
"BTE,ENH->BTNH"
,
output_shape
=
(
None
,
self
.
num_heads
,
size_per_head
),
output_shape
=
(
None
,
self
.
num_heads
,
size_per_head
),
kernel_initializer
=
attention_initializer
,
kernel_initializer
=
tf_utils
.
clone_initializer
(
attention_initializer
)
,
bias_axes
=
None
,
bias_axes
=
None
,
name
=
"query"
)
name
=
"query"
)
self
.
key_dense_layer
=
tf
.
keras
.
layers
.
experimental
.
EinsumDense
(
self
.
key_dense_layer
=
tf
.
keras
.
layers
.
experimental
.
EinsumDense
(
"BTE,ENH->BTNH"
,
"BTE,ENH->BTNH"
,
output_shape
=
(
None
,
self
.
num_heads
,
size_per_head
),
output_shape
=
(
None
,
self
.
num_heads
,
size_per_head
),
kernel_initializer
=
attention_initializer
,
kernel_initializer
=
tf_utils
.
clone_initializer
(
attention_initializer
)
,
bias_axes
=
None
,
bias_axes
=
None
,
name
=
"key"
)
name
=
"key"
)
self
.
value_dense_layer
=
tf
.
keras
.
layers
.
experimental
.
EinsumDense
(
self
.
value_dense_layer
=
tf
.
keras
.
layers
.
experimental
.
EinsumDense
(
"BTE,ENH->BTNH"
,
"BTE,ENH->BTNH"
,
output_shape
=
(
None
,
self
.
num_heads
,
size_per_head
),
output_shape
=
(
None
,
self
.
num_heads
,
size_per_head
),
kernel_initializer
=
attention_initializer
,
kernel_initializer
=
tf_utils
.
clone_initializer
(
attention_initializer
)
,
bias_axes
=
None
,
bias_axes
=
None
,
name
=
"value"
)
name
=
"value"
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment