Commit c303344e authored by Frederick Liu's avatar Frederick Liu Committed by A. Unique TensorFlower
Browse files

Internal change

PiperOrigin-RevId: 401913091
parent 96aff5f1
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
import tensorflow as tf import tensorflow as tf
from official.modeling import activations from official.modeling import activations
from official.nlp import keras_nlp from official.nlp import modeling
from official.nlp.modeling import layers from official.nlp.modeling import layers
from official.nlp.projects.bigbird import recompute_grad from official.nlp.projects.bigbird import recompute_grad
from official.nlp.projects.bigbird import recomputing_dropout from official.nlp.projects.bigbird import recomputing_dropout
...@@ -136,7 +136,7 @@ class BigBirdEncoder(tf.keras.Model): ...@@ -136,7 +136,7 @@ class BigBirdEncoder(tf.keras.Model):
if embedding_width is None: if embedding_width is None:
embedding_width = hidden_size embedding_width = hidden_size
self._embedding_layer = keras_nlp.layers.OnDeviceEmbedding( self._embedding_layer = modeling.layers.OnDeviceEmbedding(
vocab_size=vocab_size, vocab_size=vocab_size,
embedding_width=embedding_width, embedding_width=embedding_width,
initializer=initializer, initializer=initializer,
...@@ -144,12 +144,12 @@ class BigBirdEncoder(tf.keras.Model): ...@@ -144,12 +144,12 @@ class BigBirdEncoder(tf.keras.Model):
word_embeddings = self._embedding_layer(word_ids) word_embeddings = self._embedding_layer(word_ids)
# Always uses dynamic slicing for simplicity. # Always uses dynamic slicing for simplicity.
self._position_embedding_layer = keras_nlp.layers.PositionEmbedding( self._position_embedding_layer = modeling.layers.PositionEmbedding(
initializer=initializer, initializer=initializer,
max_length=max_position_embeddings, max_length=max_position_embeddings,
name='position_embedding') name='position_embedding')
position_embeddings = self._position_embedding_layer(word_embeddings) position_embeddings = self._position_embedding_layer(word_embeddings)
self._type_embedding_layer = keras_nlp.layers.OnDeviceEmbedding( self._type_embedding_layer = modeling.layers.OnDeviceEmbedding(
vocab_size=type_vocab_size, vocab_size=type_vocab_size,
embedding_width=embedding_width, embedding_width=embedding_width,
initializer=initializer, initializer=initializer,
......
...@@ -25,7 +25,7 @@ from official.modeling import optimization ...@@ -25,7 +25,7 @@ from official.modeling import optimization
from official.modeling import tf_utils from official.modeling import tf_utils
from official.modeling.fast_training.progressive import policies from official.modeling.fast_training.progressive import policies
from official.modeling.hyperparams import base_config from official.modeling.hyperparams import base_config
from official.nlp import keras_nlp from official.nlp import modeling
from official.nlp.configs import bert from official.nlp.configs import bert
from official.nlp.configs import encoders from official.nlp.configs import encoders
from official.nlp.data import data_loader_factory from official.nlp.data import data_loader_factory
...@@ -96,7 +96,7 @@ def build_sub_encoder(encoder, target_layer_id): ...@@ -96,7 +96,7 @@ def build_sub_encoder(encoder, target_layer_id):
input_ids = encoder.inputs[0] input_ids = encoder.inputs[0]
input_mask = encoder.inputs[1] input_mask = encoder.inputs[1]
type_ids = encoder.inputs[2] type_ids = encoder.inputs[2]
attention_mask = keras_nlp.layers.SelfAttentionMask()( attention_mask = modeling.layers.SelfAttentionMask()(
inputs=input_ids, to_mask=input_mask) inputs=input_ids, to_mask=input_mask)
embedding_output = encoder.embedding_layer(input_ids, type_ids) embedding_output = encoder.embedding_layer(input_ids, type_ids)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment