input_ids=tf.convert_to_tensor([[1,14,2232,26,1]],dtype=tf.int32)# the dog is cute
expected_output_ids=[
1,
14,
2232,
26,
1,
567,
26,
32,
149,
149,
149,
149,
149,
149,
149,
149,
149,
149,
149,
149,
]# The dog is nothing is it!!!!!!!!!!!! TODO (PVP): this sentence (and others I tried) does not make much sense, there seems to be a problem with xlm language generation.
input_ids=torch.Tensor([[1,14,2232,26,1]]).long()# The dog is cute
input_ids=torch.tensor([[1,14,2232,26,1]],dtype=torch.long,device=torch_device)# The dog is cute
expected_output_ids=[
expected_output_ids=[
1,
1,
14,
14,
...
@@ -426,8 +426,5 @@ class XLMModelLanguageGenerationTest(unittest.TestCase):
...
@@ -426,8 +426,5 @@ class XLMModelLanguageGenerationTest(unittest.TestCase):
149,
149,
149,
149,
]# The dog is nothing is it!!!!!!!!!!!! TODO (PVP): this sentence (and others I tried) does not make much sense, there seems to be a problem with xlm language generation.
]# The dog is nothing is it!!!!!!!!!!!! TODO (PVP): this sentence (and others I tried) does not make much sense, there seems to be a problem with xlm language generation.
]#Thedogisnothingisit!!!!!!!!!!!! TODO (PVP): this sentence (and others I tried) does not make much sense, there seems to be a problem with xlm language generation.