Unverified Commit 29c10a41 authored by Lysandre Debut's avatar Lysandre Debut Committed by GitHub
Browse files

[Test refactor 1/5] Per-folder tests reorganization (#15725)



* Per-folder tests reorganization
Co-authored-by: default avatarsgugger <sylvain.gugger@gmail.com>
Co-authored-by: default avatarStas Bekman <stas@stason.org>
parent fecb08c2
...@@ -19,8 +19,8 @@ import unittest ...@@ -19,8 +19,8 @@ import unittest
from transformers import is_tf_available from transformers import is_tf_available
from transformers.testing_utils import require_tf, slow from transformers.testing_utils import require_tf, slow
from .test_configuration_common import ConfigTester from ..test_configuration_common import ConfigTester
from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor from ..test_modeling_tf_common import TFModelTesterMixin, ids_tensor
if is_tf_available(): if is_tf_available():
......
...@@ -18,9 +18,9 @@ import unittest ...@@ -18,9 +18,9 @@ import unittest
from transformers import XLMConfig, is_torch_available from transformers import XLMConfig, is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device from transformers.testing_utils import require_torch, slow, torch_device
from .test_configuration_common import ConfigTester from ..generation.test_generation_utils import GenerationTesterMixin
from .test_generation_utils import GenerationTesterMixin from ..test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask from ..test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
if is_torch_available(): if is_torch_available():
......
...@@ -21,7 +21,7 @@ import unittest ...@@ -21,7 +21,7 @@ import unittest
from transformers.models.xlm.tokenization_xlm import VOCAB_FILES_NAMES, XLMTokenizer from transformers.models.xlm.tokenization_xlm import VOCAB_FILES_NAMES, XLMTokenizer
from transformers.testing_utils import slow from transformers.testing_utils import slow
from .test_tokenization_common import TokenizerTesterMixin from ..test_tokenization_common import TokenizerTesterMixin
class XLMTokenizationTest(TokenizerTesterMixin, unittest.TestCase): class XLMTokenizationTest(TokenizerTesterMixin, unittest.TestCase):
......
...@@ -15,15 +15,16 @@ ...@@ -15,15 +15,16 @@
import os import os
import unittest import unittest
from os.path import dirname
from transformers.file_utils import cached_property from transformers.file_utils import cached_property
from transformers.models.xlm_prophetnet.tokenization_xlm_prophetnet import SPIECE_UNDERLINE, XLMProphetNetTokenizer from transformers.models.xlm_prophetnet.tokenization_xlm_prophetnet import SPIECE_UNDERLINE, XLMProphetNetTokenizer
from transformers.testing_utils import require_sentencepiece, slow from transformers.testing_utils import require_sentencepiece, slow
from .test_tokenization_common import TokenizerTesterMixin from ..test_tokenization_common import TokenizerTesterMixin
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/test_sentencepiece.model") SAMPLE_VOCAB = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/test_sentencepiece.model")
@require_sentencepiece @require_sentencepiece
......
...@@ -18,15 +18,16 @@ import pickle ...@@ -18,15 +18,16 @@ import pickle
import shutil import shutil
import tempfile import tempfile
import unittest import unittest
from os.path import dirname
from transformers import SPIECE_UNDERLINE, XLMRobertaTokenizer, XLMRobertaTokenizerFast from transformers import SPIECE_UNDERLINE, XLMRobertaTokenizer, XLMRobertaTokenizerFast
from transformers.file_utils import cached_property from transformers.file_utils import cached_property
from transformers.testing_utils import require_sentencepiece, require_tokenizers, slow from transformers.testing_utils import require_sentencepiece, require_tokenizers, slow
from .test_tokenization_common import TokenizerTesterMixin from ..test_tokenization_common import TokenizerTesterMixin
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/test_sentencepiece.model") SAMPLE_VOCAB = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/test_sentencepiece.model")
@require_sentencepiece @require_sentencepiece
......
...@@ -19,9 +19,9 @@ import unittest ...@@ -19,9 +19,9 @@ import unittest
from transformers import XLMRobertaXLConfig, is_torch_available from transformers import XLMRobertaXLConfig, is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device from transformers.testing_utils import require_torch, slow, torch_device
from .test_configuration_common import ConfigTester from ..generation.test_generation_utils import GenerationTesterMixin
from .test_generation_utils import GenerationTesterMixin from ..test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask from ..test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
if is_torch_available(): if is_torch_available():
......
...@@ -21,8 +21,8 @@ import unittest ...@@ -21,8 +21,8 @@ import unittest
from transformers import XLNetConfig, is_tf_available from transformers import XLNetConfig, is_tf_available
from transformers.testing_utils import require_tf, slow from transformers.testing_utils import require_tf, slow
from .test_configuration_common import ConfigTester from ..test_configuration_common import ConfigTester
from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor from ..test_modeling_tf_common import TFModelTesterMixin, ids_tensor
if is_tf_available(): if is_tf_available():
......
...@@ -19,9 +19,9 @@ import unittest ...@@ -19,9 +19,9 @@ import unittest
from transformers import XLNetConfig, is_torch_available from transformers import XLNetConfig, is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device from transformers.testing_utils import require_torch, slow, torch_device
from .test_configuration_common import ConfigTester from ..generation.test_generation_utils import GenerationTesterMixin
from .test_generation_utils import GenerationTesterMixin from ..test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask from ..test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
if is_torch_available(): if is_torch_available():
...@@ -526,7 +526,6 @@ class XLNetModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase) ...@@ -526,7 +526,6 @@ class XLNetModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase)
all_generative_model_classes = ( all_generative_model_classes = (
(XLNetLMHeadModel,) if is_torch_available() else () (XLNetLMHeadModel,) if is_torch_available() else ()
) # TODO (PVP): Check other models whether language generation is also applicable ) # TODO (PVP): Check other models whether language generation is also applicable
test_pruning = False test_pruning = False
# XLNet has 2 QA models -> need to manually set the correct labels for one of them here # XLNet has 2 QA models -> need to manually set the correct labels for one of them here
......
...@@ -15,14 +15,15 @@ ...@@ -15,14 +15,15 @@
import os import os
import unittest import unittest
from os.path import dirname
from transformers import SPIECE_UNDERLINE, XLNetTokenizer, XLNetTokenizerFast from transformers import SPIECE_UNDERLINE, XLNetTokenizer, XLNetTokenizerFast
from transformers.testing_utils import require_sentencepiece, require_tokenizers, slow from transformers.testing_utils import require_sentencepiece, require_tokenizers, slow
from .test_tokenization_common import TokenizerTesterMixin from ..test_tokenization_common import TokenizerTesterMixin
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/test_sentencepiece.model") SAMPLE_VOCAB = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/test_sentencepiece.model")
@require_sentencepiece @require_sentencepiece
......
...@@ -17,12 +17,11 @@ ...@@ -17,12 +17,11 @@
import unittest import unittest
from tests.test_modeling_common import floats_tensor
from transformers import YosoConfig, is_torch_available from transformers import YosoConfig, is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device from transformers.testing_utils import require_torch, slow, torch_device
from .test_configuration_common import ConfigTester from ..test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask from ..test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
if is_torch_available(): if is_torch_available():
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment