Unverified Commit 19420fd9 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

Move test model folders (#17034)



* move test model folders (TODO: fix imports and others)

* fix (potentially partially) imports (in model test modules)

* fix (potentially partially) imports (in tokenization test modules)

* fix (potentially partially) imports (in feature extraction test modules)

* fix import utils.test_modeling_tf_core

* fix path ../fixtures/

* fix imports about generation.test_generation_flax_utils

* fix more imports

* fix fixture path

* fix get_test_dir

* update module_to_test_file

* fix get_tests_dir from wrong transformers.utils

* update config.yml (CircleCI)

* fix style

* remove missing imports

* update new model script

* update check_repo

* update SPECIAL_MODULE_TO_TEST_MAP

* fix style

* add __init__

* update self-scheduled

* fix add_new_model scripts

* check one way to get location back

* python setup.py build install

* fix import in test auto

* update self-scheduled.yml

* update slack notification script

* Add comments about artifact names

* fix for yolos
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent cd9274d0
...@@ -22,8 +22,8 @@ from transformers import YolosConfig ...@@ -22,8 +22,8 @@ from transformers import YolosConfig
from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.testing_utils import require_torch, require_vision, slow, torch_device
from transformers.utils import cached_property, is_torch_available, is_vision_available from transformers.utils import cached_property, is_torch_available, is_vision_available
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, floats_tensor from ...test_modeling_common import ModelTesterMixin, floats_tensor
if is_torch_available(): if is_torch_available():
......
...@@ -20,8 +20,8 @@ import unittest ...@@ -20,8 +20,8 @@ import unittest
from transformers import YosoConfig, is_torch_available from transformers import YosoConfig, is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device from transformers.testing_utils import require_torch, slow, torch_device
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
if is_torch_available(): if is_torch_available():
......
...@@ -25,7 +25,7 @@ from pathlib import Path ...@@ -25,7 +25,7 @@ from pathlib import Path
from huggingface_hub import Repository, delete_repo, login from huggingface_hub import Repository, delete_repo, login
from requests.exceptions import HTTPError from requests.exceptions import HTTPError
from transformers import AutoFeatureExtractor, Wav2Vec2FeatureExtractor from transformers import AutoFeatureExtractor, Wav2Vec2FeatureExtractor
from transformers.testing_utils import PASS, USER, is_staging_test from transformers.testing_utils import PASS, USER, get_tests_dir, is_staging_test
from transformers.utils import is_torch_available, is_vision_available from transformers.utils import is_torch_available, is_vision_available
...@@ -42,7 +42,7 @@ if is_vision_available(): ...@@ -42,7 +42,7 @@ if is_vision_available():
from PIL import Image from PIL import Image
SAMPLE_FEATURE_EXTRACTION_CONFIG_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures") SAMPLE_FEATURE_EXTRACTION_CONFIG_DIR = get_tests_dir("fixtures")
def prepare_image_inputs(feature_extract_tester, equal_resolution=False, numpify=False, torchify=False): def prepare_image_inputs(feature_extract_tester, equal_resolution=False, numpify=False, torchify=False):
......
...@@ -13,8 +13,8 @@ class FakeOriginalTokenizer: ...@@ -13,8 +13,8 @@ class FakeOriginalTokenizer:
class ConvertSlowTokenizerTest(unittest.TestCase): class ConvertSlowTokenizerTest(unittest.TestCase):
def test_spm_converter_bytefallback_warning(self): def test_spm_converter_bytefallback_warning(self):
spm_model_file_without_bytefallback = f"{get_tests_dir()}/fixtures/test_sentencepiece.model" spm_model_file_without_bytefallback = get_tests_dir("fixtures/test_sentencepiece.model")
spm_model_file_with_bytefallback = f"{get_tests_dir()}/fixtures/test_sentencepiece_with_bytefallback.model" spm_model_file_with_bytefallback = get_tests_dir("fixtures/test_sentencepiece_with_bytefallback.model")
original_tokenizer_without_bytefallback = FakeOriginalTokenizer(vocab_file=spm_model_file_without_bytefallback) original_tokenizer_without_bytefallback = FakeOriginalTokenizer(vocab_file=spm_model_file_without_bytefallback)
......
...@@ -97,21 +97,21 @@ IGNORE_NON_TESTED = PRIVATE_MODELS.copy() + [ ...@@ -97,21 +97,21 @@ IGNORE_NON_TESTED = PRIVATE_MODELS.copy() + [
# Update this list with test files that don't have a tester with a `all_model_classes` variable and which don't # Update this list with test files that don't have a tester with a `all_model_classes` variable and which don't
# trigger the common tests. # trigger the common tests.
TEST_FILES_WITH_NO_COMMON_TESTS = [ TEST_FILES_WITH_NO_COMMON_TESTS = [
"decision_transformer/test_modeling_decision_transformer.py", "models/decision_transformer/test_modeling_decision_transformer.py",
"camembert/test_modeling_camembert.py", "models/camembert/test_modeling_camembert.py",
"mt5/test_modeling_flax_mt5.py", "models/mt5/test_modeling_flax_mt5.py",
"mbart/test_modeling_mbart.py", "models/mbart/test_modeling_mbart.py",
"mt5/test_modeling_mt5.py", "models/mt5/test_modeling_mt5.py",
"pegasus/test_modeling_pegasus.py", "models/pegasus/test_modeling_pegasus.py",
"camembert/test_modeling_tf_camembert.py", "models/camembert/test_modeling_tf_camembert.py",
"mt5/test_modeling_tf_mt5.py", "models/mt5/test_modeling_tf_mt5.py",
"xlm_roberta/test_modeling_tf_xlm_roberta.py", "models/xlm_roberta/test_modeling_tf_xlm_roberta.py",
"xlm_roberta/test_modeling_flax_xlm_roberta.py", "models/xlm_roberta/test_modeling_flax_xlm_roberta.py",
"xlm_prophetnet/test_modeling_xlm_prophetnet.py", "models/xlm_prophetnet/test_modeling_xlm_prophetnet.py",
"xlm_roberta/test_modeling_xlm_roberta.py", "models/xlm_roberta/test_modeling_xlm_roberta.py",
"vision_text_dual_encoder/test_modeling_vision_text_dual_encoder.py", "models/vision_text_dual_encoder/test_modeling_vision_text_dual_encoder.py",
"vision_text_dual_encoder/test_modeling_flax_vision_text_dual_encoder.py", "models/vision_text_dual_encoder/test_modeling_flax_vision_text_dual_encoder.py",
"decision_transformer/test_modeling_decision_transformer.py", "models/decision_transformer/test_modeling_decision_transformer.py",
] ]
# Update this list for models that are not in any of the auto MODEL_XXX_MAPPING. Being in this list is an exception and # Update this list for models that are not in any of the auto MODEL_XXX_MAPPING. Being in this list is an exception and
...@@ -308,7 +308,12 @@ def check_models_are_in_init(): ...@@ -308,7 +308,12 @@ def check_models_are_in_init():
# If some test_modeling files should be ignored when checking models are all tested, they should be added in the # If some test_modeling files should be ignored when checking models are all tested, they should be added in the
# nested list _ignore_files of this function. # nested list _ignore_files of this function.
def get_model_test_files(): def get_model_test_files():
"""Get the model test files.""" """Get the model test files.
The returned files should NOT contain the `tests` (i.e. `PATH_TO_TESTS` defined in this script). They will be
considered as paths relative to `tests`. A caller has to use `os.path.join(PATH_TO_TESTS, ...)` to access the files.
"""
_ignore_files = [ _ignore_files = [
"test_modeling_common", "test_modeling_common",
"test_modeling_encoder_decoder", "test_modeling_encoder_decoder",
...@@ -319,20 +324,23 @@ def get_model_test_files(): ...@@ -319,20 +324,23 @@ def get_model_test_files():
"test_modeling_tf_encoder_decoder", "test_modeling_tf_encoder_decoder",
] ]
test_files = [] test_files = []
for file_or_dir in os.listdir(PATH_TO_TESTS): # Check both `PATH_TO_TESTS` and `PATH_TO_TESTS/models`
path = os.path.join(PATH_TO_TESTS, file_or_dir) model_test_root = os.path.join(PATH_TO_TESTS, "models")
if os.path.isdir(path): model_test_dirs = []
filenames = [os.path.join(file_or_dir, file) for file in os.listdir(path)] for x in os.listdir(model_test_root):
else: x = os.path.join(model_test_root, x)
filenames = [file_or_dir] if os.path.isdir(x):
model_test_dirs.append(x)
for filename in filenames:
if ( for target_dir in [PATH_TO_TESTS] + model_test_dirs:
os.path.isfile(os.path.join(PATH_TO_TESTS, filename)) for file_or_dir in os.listdir(target_dir):
and "test_modeling" in filename path = os.path.join(target_dir, file_or_dir)
and not os.path.splitext(filename)[0] in _ignore_files if os.path.isfile(path):
): filename = os.path.split(path)[-1]
test_files.append(filename) if "test_modeling" in filename and not os.path.splitext(filename)[0] in _ignore_files:
file = os.path.join(*path.split(os.sep)[1:])
test_files.append(file)
return test_files return test_files
......
...@@ -561,6 +561,8 @@ if __name__ == "__main__": ...@@ -561,6 +561,8 @@ if __name__ == "__main__":
arguments = sys.argv[1:][0] arguments = sys.argv[1:][0]
try: try:
models = ast.literal_eval(arguments) models = ast.literal_eval(arguments)
# Need to change from elements like `models/bert` to `models_bert` (the ones used as artifact names).
models = [x.replace("models/", "models_") for x in models]
except SyntaxError: except SyntaxError:
Message.error_out() Message.error_out()
raise ValueError("Errored out.") raise ValueError("Errored out.")
......
...@@ -275,18 +275,21 @@ SPECIAL_MODULE_TO_TEST_MAP = { ...@@ -275,18 +275,21 @@ SPECIAL_MODULE_TO_TEST_MAP = {
"modeling_tf_utils.py": ["test_modeling_tf_common.py", "utils/test_modeling_tf_core.py"], "modeling_tf_utils.py": ["test_modeling_tf_common.py", "utils/test_modeling_tf_core.py"],
"modeling_utils.py": ["test_modeling_common.py", "utils/test_offline.py"], "modeling_utils.py": ["test_modeling_common.py", "utils/test_offline.py"],
"models/auto/modeling_auto.py": [ "models/auto/modeling_auto.py": [
"auto/test_modeling_auto.py", "models/auto/test_modeling_auto.py",
"auto/test_modeling_tf_pytorch.py", "models/auto/test_modeling_tf_pytorch.py",
"bort/test_modeling_bort.py", "models/bort/test_modeling_bort.py",
"dit/test_modeling_dit.py", "models/dit/test_modeling_dit.py",
], ],
"models/auto/modeling_flax_auto.py": "auto/test_modeling_flax_auto.py", "models/auto/modeling_flax_auto.py": "models/auto/test_modeling_flax_auto.py",
"models/auto/modeling_tf_auto.py": [ "models/auto/modeling_tf_auto.py": [
"auto/test_modeling_tf_auto.py", "models/auto/test_modeling_tf_auto.py",
"auto/test_modeling_tf_pytorch.py", "models/auto/test_modeling_tf_pytorch.py",
"bort/test_modeling_tf_bort.py", "models/bort/test_modeling_tf_bort.py",
],
"models/gpt2/modeling_gpt2.py": [
"models/gpt2/test_modeling_gpt2.py",
"models/megatron_gpt2/test_modeling_megatron_gpt2.py",
], ],
"models/gpt2/modeling_gpt2.py": ["gpt2/test_modeling_gpt2.py", "megatron_gpt2/test_modeling_megatron_gpt2.py"],
"optimization.py": "optimization/test_optimization.py", "optimization.py": "optimization/test_optimization.py",
"optimization_tf.py": "optimization/test_optimization_tf.py", "optimization_tf.py": "optimization/test_optimization_tf.py",
"pipelines/base.py": "pipelines/test_pipelines_*.py", "pipelines/base.py": "pipelines/test_pipelines_*.py",
...@@ -350,7 +353,7 @@ def module_to_test_file(module_fname): ...@@ -350,7 +353,7 @@ def module_to_test_file(module_fname):
elif len(splits) > 0 and splits[0] == "utils": elif len(splits) > 0 and splits[0] == "utils":
default_test_file = f"tests/utils/test_utils_{module_name}" default_test_file = f"tests/utils/test_utils_{module_name}"
elif len(splits) > 4 and splits[2] == "models": elif len(splits) > 4 and splits[2] == "models":
default_test_file = f"tests/{splits[3]}/test_{module_name}" default_test_file = f"tests/models/{splits[3]}/test_{module_name}"
elif len(splits) > 2 and splits[2].startswith("generation"): elif len(splits) > 2 and splits[2].startswith("generation"):
default_test_file = f"tests/generation/test_{module_name}" default_test_file = f"tests/generation/test_{module_name}"
elif len(splits) > 2 and splits[2].startswith("trainer"): elif len(splits) > 2 and splits[2].startswith("trainer"):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment