Commit a5751f75 authored by patrickvonplaten's avatar patrickvonplaten Committed by Patrick von Platen
Browse files

fix bug with attention_mask as optional input argument

parent 629aac92
...@@ -313,7 +313,7 @@ class BartHeadTests(unittest.TestCase): ...@@ -313,7 +313,7 @@ class BartHeadTests(unittest.TestCase):
config, input_ids, batch_size = self._get_config_and_data(output_past=True) config, input_ids, batch_size = self._get_config_and_data(output_past=True)
attention_mask = input_ids.ne(1) attention_mask = input_ids.ne(1)
lm_model = BartForConditionalGeneration(config).eval().to(torch_device).half() lm_model = BartForConditionalGeneration(config).eval().to(torch_device).half()
lm_model.generate(input_ids, attention_mask) lm_model.generate(input_ids, attention_mask=attention_mask)
def test_prepare_bart_decoder_inputs(self): def test_prepare_bart_decoder_inputs(self):
config, *_ = self._get_config_and_data(output_past=False) config, *_ = self._get_config_and_data(output_past=False)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment