Unverified Commit 29c10a41 authored by Lysandre Debut's avatar Lysandre Debut Committed by GitHub
Browse files

[Test refactor 1/5] Per-folder tests reorganization (#15725)



* Per-folder tests reorganization
Co-authored-by: default avatarsgugger <sylvain.gugger@gmail.com>
Co-authored-by: default avatarStas Bekman <stas@stason.org>
parent fecb08c2
......@@ -21,7 +21,7 @@ import numpy as np
from transformers.file_utils import is_pytesseract_available, is_torch_available
from transformers.testing_utils import require_pytesseract, require_torch
from .test_feature_extraction_common import FeatureExtractionSavingTestMixin, prepare_image_inputs
from ..test_feature_extraction_common import FeatureExtractionSavingTestMixin, prepare_image_inputs
if is_torch_available():
......
......@@ -23,8 +23,8 @@ import unittest
from transformers.file_utils import is_detectron2_available, is_torch_available
from transformers.testing_utils import require_detectron2, require_torch, slow, torch_device
from .test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, _config_zero_init, ids_tensor, random_attention_mask
from ..test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, _config_zero_init, ids_tensor, random_attention_mask
if is_torch_available():
......
......@@ -40,7 +40,7 @@ from transformers.testing_utils import (
slow,
)
from .test_tokenization_common import (
from ..test_tokenization_common import (
SMALL_TRAINING_CORPUS,
TokenizerTesterMixin,
filter_non_english,
......
......@@ -17,6 +17,7 @@ import os
import shutil
import tempfile
import unittest
from os.path import dirname
from typing import List
from transformers import PreTrainedTokenizer, PreTrainedTokenizerBase, PreTrainedTokenizerFast
......@@ -37,7 +38,7 @@ if is_pytesseract_available():
from transformers import LayoutLMv2FeatureExtractor, LayoutXLMProcessor
SAMPLE_SP = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/test_sentencepiece.model")
SAMPLE_SP = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/test_sentencepiece.model")
@require_pytesseract
......
......@@ -32,7 +32,7 @@ from transformers.testing_utils import (
slow,
)
from .test_tokenization_common import (
from ..test_tokenization_common import (
SMALL_TRAINING_CORPUS,
TokenizerTesterMixin,
filter_non_english,
......@@ -40,7 +40,7 @@ from .test_tokenization_common import (
)
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/test_sentencepiece.model")
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../fixtures/test_sentencepiece.model")
@require_sentencepiece
......
......@@ -24,9 +24,9 @@ from transformers.file_utils import cached_property
from transformers.models.auto import get_values
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
from .test_configuration_common import ConfigTester
from .test_generation_utils import GenerationTesterMixin
from .test_modeling_common import ModelTesterMixin, ids_tensor
from ..generation.test_generation_utils import GenerationTesterMixin
from ..test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor
if is_torch_available():
......
......@@ -19,8 +19,8 @@ import unittest
from transformers import LEDConfig, is_tf_available
from transformers.testing_utils import is_pt_tf_cross_test, require_tf, slow
from .test_configuration_common import ConfigTester
from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor
from ..test_configuration_common import ConfigTester
from ..test_modeling_tf_common import TFModelTesterMixin, ids_tensor
if is_tf_available():
......
......@@ -19,8 +19,8 @@ import unittest
from transformers import LongformerConfig, is_torch_available
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
from .test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
from ..test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
if is_torch_available():
......
......@@ -19,8 +19,8 @@ import unittest
from transformers import is_tf_available
from transformers.testing_utils import require_sentencepiece, require_tf, require_tokenizers, slow
from .test_configuration_common import ConfigTester
from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor
from ..test_configuration_common import ConfigTester
from ..test_modeling_tf_common import TFModelTesterMixin, ids_tensor
if is_tf_available():
......
......@@ -18,8 +18,8 @@ import unittest
from transformers import LukeConfig, is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device
from .test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
from ..test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
if is_torch_available():
......
......@@ -15,17 +15,18 @@
import os
import unittest
from os.path import dirname
from typing import Tuple
from transformers import AddedToken, LukeTokenizer
from transformers.testing_utils import require_torch, slow
from .test_tokenization_common import TokenizerTesterMixin
from ..test_tokenization_common import TokenizerTesterMixin
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/vocab.json")
SAMPLE_MERGE_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/merges.txt")
SAMPLE_ENTITY_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/test_entity_vocab.json")
SAMPLE_VOCAB = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/vocab.json")
SAMPLE_MERGE_FILE = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/merges.txt")
SAMPLE_ENTITY_VOCAB = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/test_entity_vocab.json")
class LukeTokenizerTest(TokenizerTesterMixin, unittest.TestCase):
......
......@@ -26,8 +26,8 @@ from transformers import LxmertConfig, is_tf_available, is_torch_available
from transformers.models.auto import get_values
from transformers.testing_utils import is_pt_tf_cross_test, require_torch, slow, torch_device
from .test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, ids_tensor
from ..test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor
if is_torch_available():
......@@ -48,8 +48,6 @@ if is_tf_available():
class LxmertModelTester:
"""You can also import this e.g from .test_modeling_bart import BartModelTester"""
def __init__(
self,
parent,
......
......@@ -22,8 +22,8 @@ import numpy as np
from transformers import LxmertConfig, is_tf_available
from transformers.testing_utils import require_tf, slow
from .test_configuration_common import ConfigTester
from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor
from ..test_configuration_common import ConfigTester
from ..test_modeling_tf_common import TFModelTesterMixin, ids_tensor
if is_tf_available():
......
......@@ -21,7 +21,7 @@ from transformers import LxmertTokenizer, LxmertTokenizerFast
from transformers.models.bert.tokenization_bert import VOCAB_FILES_NAMES
from transformers.testing_utils import require_tokenizers
from .test_tokenization_common import TokenizerTesterMixin
from ..test_tokenization_common import TokenizerTesterMixin
@require_tokenizers
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment