Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
fbfa69c8
Commit
fbfa69c8
authored
Sep 14, 2020
by
Zhenyu Tan
Committed by
A. Unique TensorFlower
Sep 14, 2020
Browse files
Replace keras_nlp.module with keras_nlp.layers.module
PiperOrigin-RevId: 331620370
parent
4445edb3
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
4 additions
and
4 deletions
+4
-4
official/nlp/keras_nlp/__init__.py
official/nlp/keras_nlp/__init__.py
+1
-1
official/nlp/modeling/layers/transformer.py
official/nlp/modeling/layers/transformer.py
+1
-1
official/nlp/modeling/networks/albert_transformer_encoder.py
official/nlp/modeling/networks/albert_transformer_encoder.py
+2
-2
No files found.
official/nlp/keras_nlp/__init__.py
View file @
fbfa69c8
...
@@ -14,4 +14,4 @@
...
@@ -14,4 +14,4 @@
# ==============================================================================
# ==============================================================================
"""Keras-NLP package definition."""
"""Keras-NLP package definition."""
# pylint: disable=wildcard-import
# pylint: disable=wildcard-import
from
official.nlp.keras_nlp
.layers
import
*
from
official.nlp.keras_nlp
import
layers
official/nlp/modeling/layers/transformer.py
View file @
fbfa69c8
...
@@ -25,7 +25,7 @@ from official.nlp.modeling.layers.util import tf_function_if_eager
...
@@ -25,7 +25,7 @@ from official.nlp.modeling.layers.util import tf_function_if_eager
@
tf
.
keras
.
utils
.
register_keras_serializable
(
package
=
"Text"
)
@
tf
.
keras
.
utils
.
register_keras_serializable
(
package
=
"Text"
)
class
Transformer
(
keras_nlp
.
TransformerEncoderBlock
):
class
Transformer
(
keras_nlp
.
layers
.
TransformerEncoderBlock
):
"""Transformer layer.
"""Transformer layer.
This layer implements the Transformer from "Attention Is All You Need".
This layer implements the Transformer from "Attention Is All You Need".
...
...
official/nlp/modeling/networks/albert_transformer_encoder.py
View file @
fbfa69c8
...
@@ -114,7 +114,7 @@ class AlbertTransformerEncoder(tf.keras.Model):
...
@@ -114,7 +114,7 @@ class AlbertTransformerEncoder(tf.keras.Model):
word_embeddings
=
self
.
_embedding_layer
(
word_ids
)
word_embeddings
=
self
.
_embedding_layer
(
word_ids
)
# Always uses dynamic slicing for simplicity.
# Always uses dynamic slicing for simplicity.
self
.
_position_embedding_layer
=
keras_nlp
.
PositionEmbedding
(
self
.
_position_embedding_layer
=
keras_nlp
.
layers
.
PositionEmbedding
(
initializer
=
initializer
,
initializer
=
initializer
,
max_length
=
max_sequence_length
,
max_length
=
max_sequence_length
,
name
=
'position_embedding'
)
name
=
'position_embedding'
)
...
@@ -150,7 +150,7 @@ class AlbertTransformerEncoder(tf.keras.Model):
...
@@ -150,7 +150,7 @@ class AlbertTransformerEncoder(tf.keras.Model):
data
=
embeddings
data
=
embeddings
attention_mask
=
layers
.
SelfAttentionMask
()([
data
,
mask
])
attention_mask
=
layers
.
SelfAttentionMask
()([
data
,
mask
])
shared_layer
=
keras_nlp
.
TransformerEncoderBlock
(
shared_layer
=
keras_nlp
.
layers
.
TransformerEncoderBlock
(
num_attention_heads
=
num_attention_heads
,
num_attention_heads
=
num_attention_heads
,
inner_dim
=
intermediate_size
,
inner_dim
=
intermediate_size
,
inner_activation
=
activation
,
inner_activation
=
activation
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment