Unverified Commit d984b103 authored by Tavin Turner's avatar Tavin Turner Committed by GitHub
Browse files

Add 'with torch.no_grad()' to BEiT integration test forward passes (#14961)

* Add 'with torch.no_grad()' to BEiT integration test forward pass

* Fix inconsistent use of tabs and spaces in indentation
parent 09f9d072
......@@ -435,6 +435,7 @@ class BeitModelIntegrationTest(unittest.TestCase):
bool_masked_pos = torch.ones((1, 196), dtype=torch.bool).to(torch_device)
# forward pass
with torch.no_grad():
outputs = model(pixel_values=pixel_values, bool_masked_pos=bool_masked_pos)
logits = outputs.logits
......@@ -457,6 +458,7 @@ class BeitModelIntegrationTest(unittest.TestCase):
inputs = feature_extractor(images=image, return_tensors="pt").to(torch_device)
# forward pass
with torch.no_grad():
outputs = model(**inputs)
logits = outputs.logits
......@@ -482,6 +484,7 @@ class BeitModelIntegrationTest(unittest.TestCase):
inputs = feature_extractor(images=image, return_tensors="pt").to(torch_device)
# forward pass
with torch.no_grad():
outputs = model(**inputs)
logits = outputs.logits
......@@ -508,6 +511,7 @@ class BeitModelIntegrationTest(unittest.TestCase):
inputs = feature_extractor(images=image, return_tensors="pt").to(torch_device)
# forward pass
with torch.no_grad():
outputs = model(**inputs)
logits = outputs.logits
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment