Commit 54e406c0 authored by A. Unique TensorFlower's avatar A. Unique TensorFlower
Browse files

Fix minor comment typos in BERT and MobileBERT APIs.

PiperOrigin-RevId: 360529962
parent 1630eccd
......@@ -60,7 +60,7 @@ class BertEncoder(tf.keras.Model):
initializer: The initialzer to use for all weights in this encoder.
output_range: The sequence output range, [0, output_range), by slicing the
target sequence of the last transformer layer. `None` means the entire
target sequence will attend to the source sequence, which yeilds the full
target sequence will attend to the source sequence, which yields the full
output.
embedding_width: The width of the word embeddings. If the embedding width is
not equal to hidden size, embedding parameters will be factorized into two
......
......@@ -22,7 +22,7 @@ from official.nlp.modeling.networks import mobile_bert_encoder
def generate_fake_input(batch_size=1, seq_len=5, vocab_size=10000, seed=0):
"""Generate consisitant fake integer input sequences."""
"""Generate consistent fake integer input sequences."""
np.random.seed(seed)
fake_input = []
for _ in range(batch_size):
......
......@@ -65,7 +65,7 @@ class BertEncoder(keras_nlp.encoders.BertEncoder):
keyed by `encoder_outputs`.
output_range: The sequence output range, [0, output_range), by slicing the
target sequence of the last transformer layer. `None` means the entire
target sequence will attend to the source sequence, which yeilds the full
target sequence will attend to the source sequence, which yields the full
output.
embedding_width: The width of the word embeddings. If the embedding width is
not equal to hidden size, embedding parameters will be factorized into two
......
......@@ -21,7 +21,7 @@ from official.nlp.modeling.networks import mobile_bert_encoder
def generate_fake_input(batch_size=1, seq_len=5, vocab_size=10000, seed=0):
"""Generate consisitant fake integer input sequences."""
"""Generate consistent fake integer input sequences."""
np.random.seed(seed)
fake_input = []
for _ in range(batch_size):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment