Commit 7e98e211 authored by Aymeric Augustin's avatar Aymeric Augustin
Browse files

Remove unittest.main() in test modules.

This construct isn't used anymore these days.

Running python tests/test_foo.py puts the tests/ directory on
PYTHONPATH, which isn't representative of how we run tests.

Use python -m unittest tests/test_foo.py instead.
parent 6be7cdda
......@@ -99,7 +99,3 @@ class TFAutoModelTest(unittest.TestCase):
logging.basicConfig(level=logging.INFO)
model = TFAutoModelWithLMHead.from_pretrained(SMALL_MODEL_IDENTIFIER)
self.assertIsInstance(model, TFBertForMaskedLM)
if __name__ == "__main__":
unittest.main()
......@@ -14,8 +14,6 @@
# limitations under the License.
from __future__ import absolute_import, division, print_function
import unittest
from transformers import BertConfig, is_tf_available
from .test_configuration_common import ConfigTester
......@@ -315,7 +313,3 @@ class TFBertModelTest(TFCommonTestCases.TFCommonModelTester):
for model_name in ["bert-base-uncased"]:
model = TFBertModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
self.assertIsNotNone(model)
if __name__ == "__main__":
unittest.main()
......@@ -378,7 +378,3 @@ def ids_tensor(shape, vocab_size, rng=None, name=None, dtype=None):
output = tf.constant(values, shape=shape, dtype=dtype if dtype is not None else tf.int32)
return output
if __name__ == "__main__":
unittest.main()
......@@ -14,8 +14,6 @@
# limitations under the License.
from __future__ import absolute_import, division, print_function
import unittest
from transformers import CTRLConfig, is_tf_available
from .test_configuration_common import ConfigTester
......@@ -201,7 +199,3 @@ class TFCTRLModelTest(TFCommonTestCases.TFCommonModelTester):
for model_name in list(TF_CTRL_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = TFCTRLModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
self.assertIsNotNone(model)
if __name__ == "__main__":
unittest.main()
......@@ -14,8 +14,6 @@
# limitations under the License.
from __future__ import absolute_import, division, print_function
import unittest
from transformers import DistilBertConfig, is_tf_available
from .test_configuration_common import ConfigTester
......@@ -221,7 +219,3 @@ class TFDistilBertModelTest(TFCommonTestCases.TFCommonModelTester):
# for model_name in list(DISTILBERT_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
# model = DistilBertModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
# self.assertIsNotNone(model)
if __name__ == "__main__":
unittest.main()
......@@ -14,8 +14,6 @@
# limitations under the License.
from __future__ import absolute_import, division, print_function
import unittest
from transformers import GPT2Config, is_tf_available
from .test_configuration_common import ConfigTester
......@@ -234,7 +232,3 @@ class TFGPT2ModelTest(TFCommonTestCases.TFCommonModelTester):
for model_name in list(TF_GPT2_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = TFGPT2Model.from_pretrained(model_name, cache_dir=CACHE_DIR)
self.assertIsNotNone(model)
if __name__ == "__main__":
unittest.main()
......@@ -14,8 +14,6 @@
# limitations under the License.
from __future__ import absolute_import, division, print_function
import unittest
from transformers import OpenAIGPTConfig, is_tf_available
from .test_configuration_common import ConfigTester
......@@ -235,7 +233,3 @@ class TFOpenAIGPTModelTest(TFCommonTestCases.TFCommonModelTester):
for model_name in list(TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = TFOpenAIGPTModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
self.assertIsNotNone(model)
if __name__ == "__main__":
unittest.main()
......@@ -244,7 +244,3 @@ class TFRobertaModelIntegrationTest(unittest.TestCase):
self.assertEqual(list(output.numpy().shape), expected_shape)
expected_tensor = tf.constant([[-0.9469, 0.3913, 0.5118]])
self.assertTrue(numpy.allclose(output.numpy(), expected_tensor.numpy(), atol=1e-3))
if __name__ == "__main__":
unittest.main()
......@@ -14,8 +14,6 @@
# limitations under the License.
from __future__ import absolute_import, division, print_function
import unittest
from transformers import T5Config, is_tf_available
from .test_configuration_common import ConfigTester
......@@ -165,7 +163,3 @@ class TFT5ModelTest(TFCommonTestCases.TFCommonModelTester):
for model_name in ["t5-small"]:
model = TFT5Model.from_pretrained(model_name, cache_dir=CACHE_DIR)
self.assertIsNotNone(model)
if __name__ == "__main__":
unittest.main()
......@@ -15,7 +15,6 @@
from __future__ import absolute_import, division, print_function
import random
import unittest
from transformers import TransfoXLConfig, is_tf_available
......@@ -207,7 +206,3 @@ class TFTransfoXLModelTest(TFCommonTestCases.TFCommonModelTester):
for model_name in list(TF_TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = TFTransfoXLModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
self.assertIsNotNone(model)
if __name__ == "__main__":
unittest.main()
......@@ -14,8 +14,6 @@
# limitations under the License.
from __future__ import absolute_import, division, print_function
import unittest
from transformers import is_tf_available
from .test_configuration_common import ConfigTester
......@@ -306,7 +304,3 @@ class TFXLMModelTest(TFCommonTestCases.TFCommonModelTester):
for model_name in list(TF_XLM_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = TFXLMModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
self.assertIsNotNone(model)
if __name__ == "__main__":
unittest.main()
......@@ -15,7 +15,6 @@
from __future__ import absolute_import, division, print_function
import random
import unittest
from transformers import XLNetConfig, is_tf_available
......@@ -401,7 +400,3 @@ class TFXLNetModelTest(TFCommonTestCases.TFCommonModelTester):
for model_name in list(TF_XLNET_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = TFXLNetModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
self.assertIsNotNone(model)
if __name__ == "__main__":
unittest.main()
......@@ -15,7 +15,6 @@
from __future__ import absolute_import, division, print_function
import random
import unittest
from transformers import is_torch_available
......@@ -208,7 +207,3 @@ class TransfoXLModelTest(CommonTestCases.CommonModelTester):
for model_name in list(TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = TransfoXLModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
self.assertIsNotNone(model)
if __name__ == "__main__":
unittest.main()
......@@ -14,8 +14,6 @@
# limitations under the License.
from __future__ import absolute_import, division, print_function
import unittest
from transformers import is_torch_available
from .test_configuration_common import ConfigTester
......@@ -390,7 +388,3 @@ class XLMModelTest(CommonTestCases.CommonModelTester):
for model_name in list(XLM_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = XLMModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
self.assertIsNotNone(model)
if __name__ == "__main__":
unittest.main()
......@@ -15,7 +15,6 @@
from __future__ import absolute_import, division, print_function
import random
import unittest
from transformers import is_torch_available
......@@ -499,7 +498,3 @@ class XLNetModelTest(CommonTestCases.CommonModelTester):
for model_name in list(XLNET_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = XLNetModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
self.assertIsNotNone(model)
if __name__ == "__main__":
unittest.main()
......@@ -150,7 +150,3 @@ class ScheduleInitTest(unittest.TestCase):
)
lrs_2 = unwrap_and_save_reload_schedule(scheduler, self.num_steps)
self.assertListEqual([l[0] for l in lrs], [l[0] for l in lrs_2])
if __name__ == "__main__":
unittest.main()
......@@ -83,7 +83,3 @@ class OptimizationFTest(unittest.TestCase):
self.assertEqual(accumulator.step, 0)
self.assertListAlmostEqual(accumulator._gradients[0].values[0].value().numpy().tolist(), [0.0, 0.0], tol=1e-2)
self.assertListAlmostEqual(accumulator._gradients[0].values[1].value().numpy().tolist(), [0.0, 0.0], tol=1e-2)
if __name__ == "__main__":
unittest.main()
......@@ -205,7 +205,3 @@ class MultiColumnInputTestCase(unittest.TestCase):
for tokenizer, model, config in TF_QA_FINETUNED_MODELS:
nlp = pipeline(task="question-answering", model=model, config=config, tokenizer=tokenizer)
self._test_multicolumn_pipeline(nlp, valid_samples, invalid_samples, mandatory_output_keys)
if __name__ == "__main__":
unittest.main()
......@@ -15,7 +15,6 @@
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import unittest
from transformers.tokenization_albert import AlbertTokenizer
......@@ -78,7 +77,3 @@ class AlbertTokenizationTest(CommonTestCases.CommonTokenizerTester):
assert encoded_pair == [tokenizer.cls_token_id] + text + [tokenizer.sep_token_id] + text_2 + [
tokenizer.sep_token_id
]
if __name__ == "__main__":
unittest.main()
......@@ -49,7 +49,3 @@ class AutoTokenizerTest(unittest.TestCase):
tokenizer = AutoTokenizer.from_pretrained(SMALL_MODEL_IDENTIFIER)
self.assertIsInstance(tokenizer, BertTokenizer)
self.assertEqual(len(tokenizer), 12)
if __name__ == "__main__":
unittest.main()
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment