"examples/vscode:/vscode.git/clone" did not exist on "7e92c5bc736a5e65c71b3017a0b1a8b8648d3b7e"
Commit 01116d8e authored by Zhenyu Tan's avatar Zhenyu Tan Committed by A. Unique TensorFlower
Browse files

Replace Transformer for albert_transformer_encoder

PiperOrigin-RevId: 328975553
parent cc7495e4
......@@ -22,6 +22,7 @@ from __future__ import print_function
import tensorflow as tf
from official.modeling import activations
from official.nlp import keras_nlp
from official.nlp.modeling import layers
......@@ -152,12 +153,12 @@ class AlbertTransformerEncoder(tf.keras.Model):
data = embeddings
attention_mask = layers.SelfAttentionMask()([data, mask])
shared_layer = layers.Transformer(
shared_layer = keras_nlp.TransformerEncoderBlock(
num_attention_heads=num_attention_heads,
intermediate_size=intermediate_size,
intermediate_activation=activation,
dropout_rate=dropout_rate,
attention_dropout_rate=attention_dropout_rate,
inner_dim=intermediate_size,
inner_activation=activation,
output_dropout=dropout_rate,
attention_dropout=attention_dropout_rate,
kernel_initializer=initializer,
name='transformer')
for _ in range(num_layers):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment