Unverified Commit 19420fd9 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

Move test model folders (#17034)



* move test model folders (TODO: fix imports and others)

* fix (potentially partially) imports (in model test modules)

* fix (potentially partially) imports (in tokenization test modules)

* fix (potentially partially) imports (in feature extraction test modules)

* fix import utils.test_modeling_tf_core

* fix path ../fixtures/

* fix imports about generation.test_generation_flax_utils

* fix more imports

* fix fixture path

* fix get_test_dir

* update module_to_test_file

* fix get_tests_dir from wrong transformers.utils

* update config.yml (CircleCI)

* fix style

* remove missing imports

* update new model script

* update check_repo

* update SPECIAL_MODULE_TO_TEST_MAP

* fix style

* add __init__

* update self-scheduled

* fix add_new_model scripts

* check one way to get location back

* python setup.py build install

* fix import in test auto

* update self-scheduled.yml

* update slack notification script

* Add comments about artifact names

* fix for yolos
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent cd9274d0
...@@ -21,7 +21,7 @@ from transformers import LayoutLMTokenizer, LayoutLMTokenizerFast ...@@ -21,7 +21,7 @@ from transformers import LayoutLMTokenizer, LayoutLMTokenizerFast
from transformers.models.layoutlm.tokenization_layoutlm import VOCAB_FILES_NAMES from transformers.models.layoutlm.tokenization_layoutlm import VOCAB_FILES_NAMES
from transformers.testing_utils import require_tokenizers from transformers.testing_utils import require_tokenizers
from ..test_tokenization_common import TokenizerTesterMixin from ...test_tokenization_common import TokenizerTesterMixin
@require_tokenizers @require_tokenizers
......
...@@ -21,7 +21,7 @@ import numpy as np ...@@ -21,7 +21,7 @@ import numpy as np
from transformers.testing_utils import require_pytesseract, require_torch from transformers.testing_utils import require_pytesseract, require_torch
from transformers.utils import is_pytesseract_available, is_torch_available from transformers.utils import is_pytesseract_available, is_torch_available
from ..test_feature_extraction_common import FeatureExtractionSavingTestMixin, prepare_image_inputs from ...test_feature_extraction_common import FeatureExtractionSavingTestMixin, prepare_image_inputs
if is_torch_available(): if is_torch_available():
......
...@@ -23,8 +23,8 @@ import unittest ...@@ -23,8 +23,8 @@ import unittest
from transformers.testing_utils import require_detectron2, require_torch, slow, torch_device from transformers.testing_utils import require_detectron2, require_torch, slow, torch_device
from transformers.utils import is_detectron2_available, is_torch_available from transformers.utils import is_detectron2_available, is_torch_available
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, _config_zero_init, ids_tensor, random_attention_mask from ...test_modeling_common import ModelTesterMixin, _config_zero_init, ids_tensor, random_attention_mask
if is_torch_available(): if is_torch_available():
......
...@@ -33,7 +33,7 @@ from transformers.models.layoutlmv2.tokenization_layoutlmv2 import ( ...@@ -33,7 +33,7 @@ from transformers.models.layoutlmv2.tokenization_layoutlmv2 import (
) )
from transformers.testing_utils import is_pt_tf_cross_test, require_pandas, require_tokenizers, require_torch, slow from transformers.testing_utils import is_pt_tf_cross_test, require_pandas, require_tokenizers, require_torch, slow
from ..test_tokenization_common import ( from ...test_tokenization_common import (
SMALL_TRAINING_CORPUS, SMALL_TRAINING_CORPUS,
TokenizerTesterMixin, TokenizerTesterMixin,
filter_non_english, filter_non_english,
......
...@@ -17,12 +17,12 @@ import os ...@@ -17,12 +17,12 @@ import os
import shutil import shutil
import tempfile import tempfile
import unittest import unittest
from os.path import dirname
from typing import List from typing import List
from transformers import PreTrainedTokenizer, PreTrainedTokenizerBase, PreTrainedTokenizerFast from transformers import PreTrainedTokenizer, PreTrainedTokenizerBase, PreTrainedTokenizerFast
from transformers.models.layoutxlm import LayoutXLMTokenizer, LayoutXLMTokenizerFast from transformers.models.layoutxlm import LayoutXLMTokenizer, LayoutXLMTokenizerFast
from transformers.testing_utils import ( from transformers.testing_utils import (
get_tests_dir,
require_pytesseract, require_pytesseract,
require_sentencepiece, require_sentencepiece,
require_tokenizers, require_tokenizers,
...@@ -38,7 +38,7 @@ if is_pytesseract_available(): ...@@ -38,7 +38,7 @@ if is_pytesseract_available():
from transformers import LayoutLMv2FeatureExtractor, LayoutXLMProcessor from transformers import LayoutLMv2FeatureExtractor, LayoutXLMProcessor
SAMPLE_SP = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/test_sentencepiece.model") SAMPLE_SP = get_tests_dir("fixtures/test_sentencepiece.model")
@require_pytesseract @require_pytesseract
......
...@@ -14,7 +14,6 @@ ...@@ -14,7 +14,6 @@
# limitations under the License. # limitations under the License.
import inspect import inspect
import os
import shutil import shutil
import tempfile import tempfile
import unittest import unittest
...@@ -23,6 +22,7 @@ from typing import List ...@@ -23,6 +22,7 @@ from typing import List
from transformers import AddedToken, LayoutXLMTokenizerFast, SpecialTokensMixin, is_tf_available, is_torch_available from transformers import AddedToken, LayoutXLMTokenizerFast, SpecialTokensMixin, is_tf_available, is_torch_available
from transformers.models.layoutxlm.tokenization_layoutxlm import LayoutXLMTokenizer from transformers.models.layoutxlm.tokenization_layoutxlm import LayoutXLMTokenizer
from transformers.testing_utils import ( from transformers.testing_utils import (
get_tests_dir,
is_pt_tf_cross_test, is_pt_tf_cross_test,
require_pandas, require_pandas,
require_scatter, require_scatter,
...@@ -32,7 +32,7 @@ from transformers.testing_utils import ( ...@@ -32,7 +32,7 @@ from transformers.testing_utils import (
slow, slow,
) )
from ..test_tokenization_common import ( from ...test_tokenization_common import (
SMALL_TRAINING_CORPUS, SMALL_TRAINING_CORPUS,
TokenizerTesterMixin, TokenizerTesterMixin,
filter_non_english, filter_non_english,
...@@ -40,7 +40,7 @@ from ..test_tokenization_common import ( ...@@ -40,7 +40,7 @@ from ..test_tokenization_common import (
) )
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../fixtures/test_sentencepiece.model") SAMPLE_VOCAB = get_tests_dir("fixtures/test_sentencepiece.model")
@require_sentencepiece @require_sentencepiece
......
...@@ -24,9 +24,9 @@ from transformers.models.auto import get_values ...@@ -24,9 +24,9 @@ from transformers.models.auto import get_values
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
from transformers.utils import cached_property from transformers.utils import cached_property
from ..generation.test_generation_utils import GenerationTesterMixin from ...generation.test_generation_utils import GenerationTesterMixin
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor from ...test_modeling_common import ModelTesterMixin, ids_tensor
if is_torch_available(): if is_torch_available():
......
...@@ -19,8 +19,8 @@ import unittest ...@@ -19,8 +19,8 @@ import unittest
from transformers import LEDConfig, is_tf_available from transformers import LEDConfig, is_tf_available
from transformers.testing_utils import require_tf, slow from transformers.testing_utils import require_tf, slow
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_tf_common import TFModelTesterMixin, ids_tensor from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor
if is_tf_available(): if is_tf_available():
......
...@@ -19,8 +19,8 @@ import unittest ...@@ -19,8 +19,8 @@ import unittest
from transformers import LongformerConfig, is_torch_available from transformers import LongformerConfig, is_torch_available
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask from ...test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
if is_torch_available(): if is_torch_available():
......
...@@ -19,8 +19,8 @@ import unittest ...@@ -19,8 +19,8 @@ import unittest
from transformers import is_tf_available from transformers import is_tf_available
from transformers.testing_utils import require_sentencepiece, require_tf, require_tokenizers, slow from transformers.testing_utils import require_sentencepiece, require_tf, require_tokenizers, slow
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_tf_common import TFModelTesterMixin, ids_tensor, random_attention_mask from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor, random_attention_mask
if is_tf_available(): if is_tf_available():
......
...@@ -18,8 +18,8 @@ import unittest ...@@ -18,8 +18,8 @@ import unittest
from transformers import LukeConfig, is_torch_available from transformers import LukeConfig, is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device from transformers.testing_utils import require_torch, slow, torch_device
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask from ...test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
if is_torch_available(): if is_torch_available():
......
...@@ -13,20 +13,18 @@ ...@@ -13,20 +13,18 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os
import unittest import unittest
from os.path import dirname
from typing import Tuple from typing import Tuple
from transformers import AddedToken, LukeTokenizer from transformers import AddedToken, LukeTokenizer
from transformers.testing_utils import require_torch, slow from transformers.testing_utils import get_tests_dir, require_torch, slow
from ..test_tokenization_common import TokenizerTesterMixin from ...test_tokenization_common import TokenizerTesterMixin
SAMPLE_VOCAB = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/vocab.json") SAMPLE_VOCAB = get_tests_dir("fixtures/vocab.json")
SAMPLE_MERGE_FILE = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/merges.txt") SAMPLE_MERGE_FILE = get_tests_dir("fixtures/merges.txt")
SAMPLE_ENTITY_VOCAB = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/test_entity_vocab.json") SAMPLE_ENTITY_VOCAB = get_tests_dir("fixtures/test_entity_vocab.json")
class LukeTokenizerTest(TokenizerTesterMixin, unittest.TestCase): class LukeTokenizerTest(TokenizerTesterMixin, unittest.TestCase):
......
...@@ -23,8 +23,8 @@ from transformers import LxmertConfig, is_tf_available, is_torch_available ...@@ -23,8 +23,8 @@ from transformers import LxmertConfig, is_tf_available, is_torch_available
from transformers.models.auto import get_values from transformers.models.auto import get_values
from transformers.testing_utils import require_torch, slow, torch_device from transformers.testing_utils import require_torch, slow, torch_device
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor from ...test_modeling_common import ModelTesterMixin, ids_tensor
if is_torch_available(): if is_torch_available():
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment