Unverified Commit c037b2e3 authored by Arthur's avatar Arthur Committed by GitHub
Browse files

skip flaky hub tests (#26594)

skip flaky
parent ca7912d1
...@@ -1096,6 +1096,7 @@ class ModelPushToHubTester(unittest.TestCase): ...@@ -1096,6 +1096,7 @@ class ModelPushToHubTester(unittest.TestCase):
except HTTPError: except HTTPError:
pass pass
@unittest.skip("This test is flaky")
def test_push_to_hub(self): def test_push_to_hub(self):
config = BertConfig( config = BertConfig(
vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37 vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37
...@@ -1118,6 +1119,7 @@ class ModelPushToHubTester(unittest.TestCase): ...@@ -1118,6 +1119,7 @@ class ModelPushToHubTester(unittest.TestCase):
for p1, p2 in zip(model.parameters(), new_model.parameters()): for p1, p2 in zip(model.parameters(), new_model.parameters()):
self.assertTrue(torch.equal(p1, p2)) self.assertTrue(torch.equal(p1, p2))
@unittest.skip("This test is flaky")
def test_push_to_hub_in_organization(self): def test_push_to_hub_in_organization(self):
config = BertConfig( config = BertConfig(
vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37 vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment