"tests/models/ctrl/test_modeling_ctrl.py" did not exist on "b390a5672aea995e65d031b3759274d92188e553"
Unverified Commit 19420fd9 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

Move test model folders (#17034)



* move test model folders (TODO: fix imports and others)

* fix (potentially partially) imports (in model test modules)

* fix (potentially partially) imports (in tokenization test modules)

* fix (potentially partially) imports (in feature extraction test modules)

* fix import utils.test_modeling_tf_core

* fix path ../fixtures/

* fix imports about generation.test_generation_flax_utils

* fix more imports

* fix fixture path

* fix get_test_dir

* update module_to_test_file

* fix get_tests_dir from wrong transformers.utils

* update config.yml (CircleCI)

* fix style

* remove missing imports

* update new model script

* update check_repo

* update SPECIAL_MODULE_TO_TEST_MAP

* fix style

* add __init__

* update self-scheduled

* fix add_new_model scripts

* check one way to get location back

* python setup.py build install

* fix import in test auto

* update self-scheduled.yml

* update slack notification script

* Add comments about artifact names

* fix for yolos
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent cd9274d0
......@@ -916,7 +916,7 @@ jobs:
path: ~/transformers/test_preparation.txt
- run: |
if [ -f test_list.txt ]; then
python -m pytest -n 1 tests/*layoutlmv2* --dist=loadfile -s --make-reports=tests_layoutlmv2 --durations=100
python -m pytest -n 1 tests/models/*layoutlmv2* --dist=loadfile -s --make-reports=tests_layoutlmv2 --durations=100
fi
- store_artifacts:
path: ~/transformers/tests_output.txt
......
......@@ -43,13 +43,14 @@ jobs:
working-directory: /transformers
run: |
rm -rf tests/__pycache__
rm -rf tests/models/__pycache__
rm -rf reports
- id: set-matrix
name: Identify models to test
working-directory: /transformers/tests
run: |
echo "::set-output name=matrix::$(python3 -c 'import os; x = list(filter(os.path.isdir, os.listdir(os.getcwd()))); x.sort(); print(x)')"
echo "::set-output name=matrix::$(python3 -c 'import os; tests = os.getcwd(); model_tests = os.listdir(os.path.join(tests, "models")); d1 = sorted(list(filter(os.path.isdir, os.listdir(tests)))); d2 = sorted(list(filter(os.path.isdir, [f"models/{x}" for x in model_tests]))); d1.remove("models"); d = d2 + d1; print(d)')"
- name: NVIDIA-SMI
run: |
......@@ -76,7 +77,16 @@ jobs:
needs: setup
steps:
- name: Echo folder ${{ matrix.folders }}
run: echo "${{ matrix.folders }}"
shell: bash
# For folders like `models/bert`, set an env. var. (`matrix_folders`) to `models_bert`, which will be used to
# set the artifact folder names (because the character `/` is not allowed).
run: |
echo "${{ matrix.folders }}"
matrix_folders=${{ matrix.folders }}
echo "$matrix_folders"
matrix_folders=${matrix_folders/'models/'/'models_'}
echo "$matrix_folders"
echo "matrix_folders=$matrix_folders" >> $GITHUB_ENV
- name: Update clone
working-directory: /transformers
......@@ -95,7 +105,7 @@ jobs:
if: ${{ always() }}
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.machines }}_run_all_tests_gpu_${{ matrix.folders }}_test_reports
name: ${{ matrix.machines }}_run_all_tests_gpu_${{ env.matrix_folders }}_test_reports
path: /transformers/reports/${{ matrix.machines }}_tests_gpu_${{ matrix.folders }}
run_examples_gpu:
......@@ -255,6 +265,8 @@ jobs:
CI_SLACK_CHANNEL_ID: ${{ secrets.CI_SLACK_CHANNEL_ID }}
CI_SLACK_CHANNEL_ID_DAILY: ${{ secrets.CI_SLACK_CHANNEL_ID_DAILY }}
CI_SLACK_CHANNEL_DUMMY_TESTS: ${{ secrets.CI_SLACK_CHANNEL_DUMMY_TESTS }}
# We pass `needs.setup.outputs.matrix` as the argument. A processing in `notification_service.py` to change
# `models/bert` to `models_bert` is required, as the artifact names use `_` instead of `/`.
run: |
pip install slack_sdk
python utils/notification_service.py "${{ needs.setup.outputs.matrix }}"
......@@ -102,10 +102,10 @@ class AddNewModelCommand(BaseTransformersCLICommand):
model_dir = f"{path_to_transformer_root}/src/transformers/models/{lowercase_model_name}"
os.makedirs(model_dir, exist_ok=True)
os.makedirs(f"{path_to_transformer_root}/tests/{lowercase_model_name}", exist_ok=True)
os.makedirs(f"{path_to_transformer_root}/tests/models/{lowercase_model_name}", exist_ok=True)
# Tests require submodules as they have parent imports
with open(f"{path_to_transformer_root}/tests/{lowercase_model_name}/__init__.py", "w"):
with open(f"{path_to_transformer_root}/tests/models/{lowercase_model_name}/__init__.py", "w"):
pass
shutil.move(
......@@ -136,7 +136,7 @@ class AddNewModelCommand(BaseTransformersCLICommand):
shutil.move(
f"{directory}/test_modeling_{lowercase_model_name}.py",
f"{path_to_transformer_root}/tests/{lowercase_model_name}/test_modeling_{lowercase_model_name}.py",
f"{path_to_transformer_root}/tests/models/{lowercase_model_name}/test_modeling_{lowercase_model_name}.py",
)
else:
os.remove(f"{directory}/modeling_{lowercase_model_name}.py")
......@@ -153,7 +153,7 @@ class AddNewModelCommand(BaseTransformersCLICommand):
shutil.move(
f"{directory}/test_modeling_tf_{lowercase_model_name}.py",
f"{path_to_transformer_root}/tests/{lowercase_model_name}/test_modeling_tf_{lowercase_model_name}.py",
f"{path_to_transformer_root}/tests/models/{lowercase_model_name}/test_modeling_tf_{lowercase_model_name}.py",
)
else:
os.remove(f"{directory}/modeling_tf_{lowercase_model_name}.py")
......@@ -170,7 +170,7 @@ class AddNewModelCommand(BaseTransformersCLICommand):
shutil.move(
f"{directory}/test_modeling_flax_{lowercase_model_name}.py",
f"{path_to_transformer_root}/tests/{lowercase_model_name}/test_modeling_flax_{lowercase_model_name}.py",
f"{path_to_transformer_root}/tests/models/{lowercase_model_name}/test_modeling_flax_{lowercase_model_name}.py",
)
else:
os.remove(f"{directory}/modeling_flax_{lowercase_model_name}.py")
......
......@@ -554,7 +554,7 @@ def get_model_files(model_type: str, frameworks: Optional[List[str]] = None) ->
]
test_files = filter_framework_files(test_files, frameworks=frameworks)
# Add the test directory
test_files = [REPO_PATH / "tests" / module_name / f for f in test_files]
test_files = [REPO_PATH / "tests" / "models" / module_name / f for f in test_files]
# Filter by existing files
test_files = [f for f in test_files if f.exists()]
......@@ -1227,7 +1227,7 @@ def create_new_model_like(
disabled_fx_test = False
tests_folder = REPO_PATH / "tests" / new_model_patterns.model_lower_cased
tests_folder = REPO_PATH / "tests" / "models" / new_model_patterns.model_lower_cased
os.makedirs(tests_folder, exist_ok=True)
with open(tests_folder / "__init__.py", "w"):
pass
......
......@@ -26,25 +26,25 @@ from transformers import {{cookiecutter.camelcase_modelname}}TokenizerFast
{% endif -%}
{% if cookiecutter.has_fast_class == "True" and cookiecutter.slow_tokenizer_use_sentencepiece == "True" -%}
from transformers.testing_utils import require_sentencepiece, require_tokenizers
from ..test_tokenization_common import TokenizerTesterMixin
from ...test_tokenization_common import TokenizerTesterMixin
@require_sentencepiece
@require_tokenizers
{% elif cookiecutter.slow_tokenizer_use_sentencepiece == "True" -%}
from transformers.testing_utils import require_sentencepiece
from ..test_tokenization_common import TokenizerTesterMixin
from ...test_tokenization_common import TokenizerTesterMixin
@require_sentencepiece
{% elif cookiecutter.has_fast_class == "True" -%}
from transformers.testing_utils import require_tokenizers
from ..test_tokenization_common import TokenizerTesterMixin
from ...test_tokenization_common import TokenizerTesterMixin
@require_tokenizers
{% else -%}
from ..test_tokenization_common import TokenizerTesterMixin
from ...test_tokenization_common import TokenizerTesterMixin
{% endif -%}
......
......@@ -20,8 +20,8 @@ import unittest
from transformers import is_flax_available, {{cookiecutter.camelcase_modelname}}Config
from transformers.testing_utils import require_flax, slow
from ..test_configuration_common import ConfigTester
from ..test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor
from ...test_configuration_common import ConfigTester
from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor
if is_flax_available():
import numpy as np
......@@ -345,8 +345,8 @@ from transformers import (
)
from transformers.testing_utils import require_sentencepiece, require_flax, require_tokenizers, slow
from ..test_configuration_common import ConfigTester
from ..test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor
from ...test_configuration_common import ConfigTester
from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor
if is_flax_available():
......
......@@ -20,8 +20,8 @@ import unittest
from transformers import is_tf_available, {{cookiecutter.camelcase_modelname}}Config
from transformers.testing_utils import require_tf, slow
from ..test_configuration_common import ConfigTester
from ..test_modeling_tf_common import TFModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
from ...test_configuration_common import ConfigTester
from ...test_modeling_tf_common import TFModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
if is_tf_available():
......@@ -711,8 +711,8 @@ from transformers import (
)
from transformers.testing_utils import require_sentencepiece, require_tf, require_tokenizers, slow
from ..test_configuration_common import ConfigTester
from ..test_modeling_tf_common import TFModelTesterMixin, ids_tensor
from ...test_configuration_common import ConfigTester
from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor
if is_tf_available():
......
......@@ -18,13 +18,13 @@
{% if cookiecutter.is_encoder_decoder_model == "False" -%}
import unittest
from ..test_modeling_common import floats_tensor
from ...test_modeling_common import floats_tensor
from transformers import is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device
from transformers import {{cookiecutter.camelcase_modelname}}Config
from ..test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
from ...test_configuration_common import ConfigTester
from ...test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
if is_torch_available():
......@@ -489,9 +489,9 @@ from transformers import is_torch_available
from transformers.utils import cached_property
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
from ..test_configuration_common import ConfigTester
from ..generation.test_generation_utils import GenerationTesterMixin
from ..test_modeling_common import ModelTesterMixin, ids_tensor
from ...test_configuration_common import ConfigTester
from ...generation.test_generation_utils import GenerationTesterMixin
from ...test_modeling_common import ModelTesterMixin, ids_tensor
if is_torch_available():
......
......@@ -24,6 +24,7 @@ from transformers.testing_utils import (
TestCasePlus,
execute_subprocess_async,
get_gpu_count,
get_tests_dir,
require_deepspeed,
require_torch_gpu,
slow,
......@@ -70,8 +71,8 @@ ELECTRA_TINY = "hf-internal-testing/tiny-electra"
XLNET_TINY = "sshleifer/tiny-xlnet-base-cased"
BERT_TINY = "hf-internal-testing/tiny-bert"
FIXTURE_DIRECTORY = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures")
ROOT_DIRECTORY = os.path.join(dirname(dirname(dirname(os.path.abspath(__file__)))))
FIXTURE_DIRECTORY = get_tests_dir("fixtures")
ROOT_DIRECTORY = os.path.join(dirname(get_tests_dir()))
# TODO: to add:
# albert
......
......@@ -20,8 +20,8 @@ from transformers import AlbertConfig, is_torch_available
from transformers.models.auto import get_values
from transformers.testing_utils import require_torch, slow, torch_device
from ..test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
from ...test_configuration_common import ConfigTester
from ...test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
if is_torch_available():
......
......@@ -19,7 +19,7 @@ import numpy as np
from transformers import AlbertConfig, is_flax_available
from transformers.testing_utils import require_flax, slow
from ..test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask
from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask
if is_flax_available():
......
......@@ -20,8 +20,8 @@ from transformers import AlbertConfig, is_tf_available
from transformers.models.auto import get_values
from transformers.testing_utils import require_tf, slow
from ..test_configuration_common import ConfigTester
from ..test_modeling_tf_common import TFModelTesterMixin, ids_tensor, random_attention_mask
from ...test_configuration_common import ConfigTester
from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor, random_attention_mask
if is_tf_available():
......
......@@ -13,17 +13,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
from os.path import dirname
from transformers import AlbertTokenizer, AlbertTokenizerFast
from transformers.testing_utils import require_sentencepiece, require_tokenizers, slow
from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow
from ..test_tokenization_common import TokenizerTesterMixin
from ...test_tokenization_common import TokenizerTesterMixin
SAMPLE_VOCAB = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/spiece.model")
SAMPLE_VOCAB = get_tests_dir("fixtures/spiece.model")
@require_sentencepiece
......
......@@ -14,7 +14,6 @@
# limitations under the License.
import importlib
import os
import sys
import tempfile
import unittest
......@@ -24,15 +23,15 @@ import transformers.models.auto
from transformers.models.auto.configuration_auto import CONFIG_MAPPING, AutoConfig
from transformers.models.bert.configuration_bert import BertConfig
from transformers.models.roberta.configuration_roberta import RobertaConfig
from transformers.testing_utils import DUMMY_UNKNOWN_IDENTIFIER
from transformers.testing_utils import DUMMY_UNKNOWN_IDENTIFIER, get_tests_dir
sys.path.append(str(Path(__file__).parent.parent.parent / "utils"))
sys.path.append(str(Path(__file__).parent.parent.parent.parent / "utils"))
from test_module.custom_configuration import CustomConfig # noqa E402
SAMPLE_ROBERTA_CONFIG = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../fixtures/dummy-config.json")
SAMPLE_ROBERTA_CONFIG = get_tests_dir("fixtures/dummy-config.json")
class AutoConfigTest(unittest.TestCase):
......
......@@ -14,7 +14,6 @@
# limitations under the License.
import json
import os
import sys
import tempfile
import unittest
......@@ -28,20 +27,18 @@ from transformers import (
Wav2Vec2Config,
Wav2Vec2FeatureExtractor,
)
from transformers.testing_utils import DUMMY_UNKNOWN_IDENTIFIER
from transformers.testing_utils import DUMMY_UNKNOWN_IDENTIFIER, get_tests_dir
sys.path.append(str(Path(__file__).parent.parent.parent / "utils"))
sys.path.append(str(Path(__file__).parent.parent.parent.parent / "utils"))
from test_module.custom_configuration import CustomConfig # noqa E402
from test_module.custom_feature_extraction import CustomFeatureExtractor # noqa E402
SAMPLE_FEATURE_EXTRACTION_CONFIG_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../fixtures")
SAMPLE_FEATURE_EXTRACTION_CONFIG = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "../fixtures/dummy_feature_extractor_config.json"
)
SAMPLE_CONFIG = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../fixtures/dummy-config.json")
SAMPLE_FEATURE_EXTRACTION_CONFIG_DIR = get_tests_dir("fixtures")
SAMPLE_FEATURE_EXTRACTION_CONFIG = get_tests_dir("fixtures/dummy_feature_extractor_config.json")
SAMPLE_CONFIG = get_tests_dir("fixtures/dummy-config.json")
class AutoFeatureExtractorTest(unittest.TestCase):
......
......@@ -32,7 +32,7 @@ from transformers.testing_utils import (
from ..bert.test_modeling_bert import BertModelTester
sys.path.append(str(Path(__file__).parent.parent.parent / "utils"))
sys.path.append(str(Path(__file__).parent.parent.parent.parent / "utils"))
from test_module.custom_configuration import CustomConfig # noqa E402
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment