Commit f54dc3f4 authored by Julien Chaumond's avatar Julien Chaumond
Browse files

[ci] Load pretrained models into the default (long-lived) cache

There's an inconsistency right now where:
- we load some models into CACHE_DIR
- and some models in the default cache
- and often, in both for the same models

When running the RUN_SLOW tests, this takes a lot of disk space, time, and bandwidth.

I'd rather always use the default cache
parent 6b410bed
......@@ -21,7 +21,7 @@ from transformers import TransfoXLConfig, is_tf_available
from .test_configuration_common import ConfigTester
from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor
from .utils import CACHE_DIR, require_tf, slow
from .utils import require_tf, slow
if is_tf_available():
......@@ -210,7 +210,7 @@ class TFTransfoXLModelTest(TFModelTesterMixin, unittest.TestCase):
@slow
def test_model_from_pretrained(self):
for model_name in list(TF_TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = TFTransfoXLModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
model = TFTransfoXLModel.from_pretrained(model_name)
self.assertIsNotNone(model)
......
......@@ -20,7 +20,7 @@ from transformers import is_tf_available
from .test_configuration_common import ConfigTester
from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor
from .utils import CACHE_DIR, require_tf, slow
from .utils import require_tf, slow
if is_tf_available():
......@@ -309,7 +309,7 @@ class TFXLMModelTest(TFModelTesterMixin, unittest.TestCase):
@slow
def test_model_from_pretrained(self):
for model_name in list(TF_XLM_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = TFXLMModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
model = TFXLMModel.from_pretrained(model_name)
self.assertIsNotNone(model)
......
......@@ -21,7 +21,7 @@ from transformers import XLNetConfig, is_tf_available
from .test_configuration_common import ConfigTester
from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor
from .utils import CACHE_DIR, require_tf, slow
from .utils import require_tf, slow
if is_tf_available():
......@@ -411,7 +411,7 @@ class TFXLNetModelTest(TFModelTesterMixin, unittest.TestCase):
@slow
def test_model_from_pretrained(self):
for model_name in list(TF_XLNET_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = TFXLNetModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
model = TFXLNetModel.from_pretrained(model_name)
self.assertIsNotNone(model)
......
......@@ -21,7 +21,7 @@ from transformers import is_torch_available
from .test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, ids_tensor
from .utils import CACHE_DIR, require_torch, slow, torch_device
from .utils import require_torch, slow, torch_device
if is_torch_available():
......@@ -210,7 +210,7 @@ class TransfoXLModelTest(ModelTesterMixin, unittest.TestCase):
@slow
def test_model_from_pretrained(self):
for model_name in list(TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = TransfoXLModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
model = TransfoXLModel.from_pretrained(model_name)
self.assertIsNotNone(model)
......
......@@ -20,7 +20,7 @@ from transformers import is_torch_available
from .test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, ids_tensor
from .utils import CACHE_DIR, require_torch, slow, torch_device
from .utils import require_torch, slow, torch_device
if is_torch_available():
......@@ -426,7 +426,7 @@ class XLMModelTest(ModelTesterMixin, unittest.TestCase):
@slow
def test_model_from_pretrained(self):
for model_name in list(XLM_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = XLMModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
model = XLMModel.from_pretrained(model_name)
self.assertIsNotNone(model)
......
......@@ -21,7 +21,7 @@ from transformers import is_torch_available
from .test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, ids_tensor
from .utils import CACHE_DIR, require_torch, slow, torch_device
from .utils import require_torch, slow, torch_device
if is_torch_available():
......@@ -509,7 +509,7 @@ class XLNetModelTest(ModelTesterMixin, unittest.TestCase):
@slow
def test_model_from_pretrained(self):
for model_name in list(XLNET_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
model = XLNetModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
model = XLNetModel.from_pretrained(model_name)
self.assertIsNotNone(model)
......
import os
import tempfile
import unittest
from distutils.util import strtobool
from transformers.file_utils import _tf_available, _torch_available
CACHE_DIR = os.path.join(tempfile.gettempdir(), "transformers_test")
SMALL_MODEL_IDENTIFIER = "julien-c/bert-xsmall-dummy"
DUMMY_UNKWOWN_IDENTIFIER = "julien-c/dummy-unknown"
# Used to test Auto{Config, Model, Tokenizer} model_type detection.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment