Unverified Commit 5ab87cd4 authored by Matt Churgin's avatar Matt Churgin Committed by GitHub
Browse files

wrapped forward passes in torch.no_grad() (#15037)

parent 5a06118b
......@@ -485,6 +485,7 @@ class RobertaModelIntegrationTest(TestCasePlus):
model = RobertaForMaskedLM.from_pretrained("roberta-base")
input_ids = torch.tensor([[0, 31414, 232, 328, 740, 1140, 12695, 69, 46078, 1588, 2]])
with torch.no_grad():
output = model(input_ids)[0]
expected_shape = torch.Size((1, 11, 50265))
self.assertEqual(output.shape, expected_shape)
......@@ -504,6 +505,7 @@ class RobertaModelIntegrationTest(TestCasePlus):
model = RobertaModel.from_pretrained("roberta-base")
input_ids = torch.tensor([[0, 31414, 232, 328, 740, 1140, 12695, 69, 46078, 1588, 2]])
with torch.no_grad():
output = model(input_ids)[0]
# compare the actual values for a slice.
expected_slice = torch.tensor(
......@@ -521,6 +523,7 @@ class RobertaModelIntegrationTest(TestCasePlus):
model = RobertaForSequenceClassification.from_pretrained("roberta-large-mnli")
input_ids = torch.tensor([[0, 31414, 232, 328, 740, 1140, 12695, 69, 46078, 1588, 2]])
with torch.no_grad():
output = model(input_ids)[0]
expected_shape = torch.Size((1, 3))
self.assertEqual(output.shape, expected_shape)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment