Unverified Commit 3cac867f authored by Sam Shleifer's avatar Sam Shleifer Committed by GitHub
Browse files

t5 model should make decoder_attention_mask (#6800)

parent 20f77864
......@@ -372,6 +372,5 @@ class T5Tokenizer(PreTrainedTokenizer):
**kwargs,
)
model_inputs["labels"] = labels_and_decoder_mask["input_ids"]
model_inputs["decoder_attention_mask"] = labels_and_decoder_mask["attention_mask"]
self.prefix_tokens = []
return model_inputs
......@@ -162,14 +162,12 @@ class T5TokenizationTest(TokenizerTesterMixin, unittest.TestCase):
src_text, tgt_texts=tgt_text, max_target_length=32, padding="max_length", return_tensors=FRAMEWORK
)
self.assertEqual(32, batch["labels"].shape[1])
self.assertEqual(32, batch["decoder_attention_mask"].shape[1])
# test None max_target_length
batch = tokenizer.prepare_seq2seq_batch(
src_text, tgt_texts=tgt_text, max_length=32, padding="max_length", return_tensors=FRAMEWORK
)
self.assertEqual(32, batch["labels"].shape[1])
self.assertEqual(32, batch["decoder_attention_mask"].shape[1])
def test_outputs_not_longer_than_maxlen(self):
tokenizer = self.t5_base_tokenizer
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment