Unverified Commit 97f3beed authored by Jake Tae's avatar Jake Tae Committed by GitHub
Browse files

Add `with torch.no_grad()` to DistilBERT integration test forward pass (#14979)

* refactor: wrap forward pass around no_grad context

* Update tests/test_modeling_distilbert.py

* fix: rm `no_grad` from non-integration tests

* chore: rm whitespace change
parent 021f2ea9
...@@ -284,7 +284,8 @@ class DistilBertModelIntergrationTest(unittest.TestCase): ...@@ -284,7 +284,8 @@ class DistilBertModelIntergrationTest(unittest.TestCase):
model = DistilBertModel.from_pretrained("distilbert-base-uncased") model = DistilBertModel.from_pretrained("distilbert-base-uncased")
input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]]) input_ids = torch.tensor([[0, 345, 232, 328, 740, 140, 1695, 69, 6078, 1588, 2]])
attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]) attention_mask = torch.tensor([[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])
output = model(input_ids, attention_mask=attention_mask)[0] with torch.no_grad():
output = model(input_ids, attention_mask=attention_mask)[0]
expected_shape = torch.Size((1, 11, 768)) expected_shape = torch.Size((1, 11, 768))
self.assertEqual(output.shape, expected_shape) self.assertEqual(output.shape, expected_shape)
expected_slice = torch.tensor( expected_slice = torch.tensor(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment