Unverified Commit 0940e9b2 authored by Henrik Holm's avatar Henrik Holm Committed by GitHub
Browse files

Add 'with torch.no_grad()' to integration test forward pass (#14820)

parent b37cf7de
...@@ -587,6 +587,7 @@ class BertModelIntegrationTest(unittest.TestCase): ...@@ -587,6 +587,7 @@ class BertModelIntegrationTest(unittest.TestCase):
model = BertModel.from_pretrained("bert-base-uncased") model = BertModel.from_pretrained("bert-base-uncased")
input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]]) input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]])
attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]) attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])
with torch.no_grad():
output = model(input_ids, attention_mask=attention_mask)[0] output = model(input_ids, attention_mask=attention_mask)[0]
expected_shape = torch.Size((1, 11, 768)) expected_shape = torch.Size((1, 11, 768))
self.assertEqual(output.shape, expected_shape) self.assertEqual(output.shape, expected_shape)
...@@ -599,6 +600,7 @@ class BertModelIntegrationTest(unittest.TestCase): ...@@ -599,6 +600,7 @@ class BertModelIntegrationTest(unittest.TestCase):
model = BertModel.from_pretrained("zhiheng-huang/bert-base-uncased-embedding-relative-key") model = BertModel.from_pretrained("zhiheng-huang/bert-base-uncased-embedding-relative-key")
input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]]) input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]])
attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]) attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])
with torch.no_grad():
output = model(input_ids, attention_mask=attention_mask)[0] output = model(input_ids, attention_mask=attention_mask)[0]
expected_shape = torch.Size((1, 11, 768)) expected_shape = torch.Size((1, 11, 768))
self.assertEqual(output.shape, expected_shape) self.assertEqual(output.shape, expected_shape)
...@@ -613,6 +615,7 @@ class BertModelIntegrationTest(unittest.TestCase): ...@@ -613,6 +615,7 @@ class BertModelIntegrationTest(unittest.TestCase):
model = BertModel.from_pretrained("zhiheng-huang/bert-base-uncased-embedding-relative-key-query") model = BertModel.from_pretrained("zhiheng-huang/bert-base-uncased-embedding-relative-key-query")
input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]]) input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]])
attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]) attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])
with torch.no_grad():
output = model(input_ids, attention_mask=attention_mask)[0] output = model(input_ids, attention_mask=attention_mask)[0]
expected_shape = torch.Size((1, 11, 768)) expected_shape = torch.Size((1, 11, 768))
self.assertEqual(output.shape, expected_shape) self.assertEqual(output.shape, expected_shape)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment