"...git@developer.sourcefind.cn:OpenDAS/mmdetection3d.git" did not exist on "c81426a3a0b8c3cd08124fa4a26eb8b8e483b163"
Commit 83c4844e authored by Zhenyu Tan's avatar Zhenyu Tan Committed by A. Unique TensorFlower
Browse files

Replace keras_nlp.module with keras_nlp.layers.module

PiperOrigin-RevId: 331620370
parent aa7749c5
......@@ -14,4 +14,4 @@
# ==============================================================================
"""Keras-NLP package definition."""
# pylint: disable=wildcard-import
from official.nlp.keras_nlp.layers import *
from official.nlp.keras_nlp import layers
......@@ -25,7 +25,7 @@ from official.nlp.modeling.layers.util import tf_function_if_eager
@tf.keras.utils.register_keras_serializable(package="Text")
class Transformer(keras_nlp.TransformerEncoderBlock):
class Transformer(keras_nlp.layers.TransformerEncoderBlock):
"""Transformer layer.
This layer implements the Transformer from "Attention Is All You Need".
......
......@@ -114,7 +114,7 @@ class AlbertTransformerEncoder(tf.keras.Model):
word_embeddings = self._embedding_layer(word_ids)
# Always uses dynamic slicing for simplicity.
self._position_embedding_layer = keras_nlp.PositionEmbedding(
self._position_embedding_layer = keras_nlp.layers.PositionEmbedding(
initializer=initializer,
max_length=max_sequence_length,
name='position_embedding')
......@@ -150,7 +150,7 @@ class AlbertTransformerEncoder(tf.keras.Model):
data = embeddings
attention_mask = layers.SelfAttentionMask()([data, mask])
shared_layer = keras_nlp.TransformerEncoderBlock(
shared_layer = keras_nlp.layers.TransformerEncoderBlock(
num_attention_heads=num_attention_heads,
inner_dim=intermediate_size,
inner_activation=activation,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment