"vscode:/vscode.git/clone" did not exist on "485da7222f7f9ca9854db1a6df027b00d348d017"
Unverified Commit 19420fd9 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

Move test model folders (#17034)



* move test model folders (TODO: fix imports and others)

* fix (potentially partially) imports (in model test modules)

* fix (potentially partially) imports (in tokenization test modules)

* fix (potentially partially) imports (in feature extraction test modules)

* fix import utils.test_modeling_tf_core

* fix path ../fixtures/

* fix imports about generation.test_generation_flax_utils

* fix more imports

* fix fixture path

* fix get_test_dir

* update module_to_test_file

* fix get_tests_dir from wrong transformers.utils

* update config.yml (CircleCI)

* fix style

* remove missing imports

* update new model script

* update check_repo

* update SPECIAL_MODULE_TO_TEST_MAP

* fix style

* add __init__

* update self-scheduled

* fix add_new_model scripts

* check one way to get location back

* python setup.py build install

* fix import in test auto

* update self-scheduled.yml

* update slack notification script

* Add comments about artifact names

* fix for yolos
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent cd9274d0
...@@ -20,9 +20,9 @@ import unittest ...@@ -20,9 +20,9 @@ import unittest
from transformers import ProphetNetConfig, is_torch_available from transformers import ProphetNetConfig, is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device from transformers.testing_utils import require_torch, slow, torch_device
from ..generation.test_generation_utils import GenerationTesterMixin from ...generation.test_generation_utils import GenerationTesterMixin
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor
if is_torch_available(): if is_torch_available():
......
...@@ -28,7 +28,7 @@ from transformers.models.bert.tokenization_bert import ( ...@@ -28,7 +28,7 @@ from transformers.models.bert.tokenization_bert import (
from transformers.models.prophetnet.tokenization_prophetnet import VOCAB_FILES_NAMES, ProphetNetTokenizer from transformers.models.prophetnet.tokenization_prophetnet import VOCAB_FILES_NAMES, ProphetNetTokenizer
from transformers.testing_utils import require_torch, slow from transformers.testing_utils import require_torch, slow
from ..test_tokenization_common import TokenizerTesterMixin from ...test_tokenization_common import TokenizerTesterMixin
class ProphetNetTokenizationTest(TokenizerTesterMixin, unittest.TestCase): class ProphetNetTokenizationTest(TokenizerTesterMixin, unittest.TestCase):
......
...@@ -21,8 +21,8 @@ import unittest ...@@ -21,8 +21,8 @@ import unittest
from transformers import QDQBertConfig, is_torch_available from transformers import QDQBertConfig, is_torch_available
from transformers.testing_utils import require_pytorch_quantization, require_torch, slow, torch_device from transformers.testing_utils import require_pytorch_quantization, require_torch, slow, torch_device
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
if is_torch_available(): if is_torch_available():
......
...@@ -20,7 +20,6 @@ import os ...@@ -20,7 +20,6 @@ import os
import shutil import shutil
import tempfile import tempfile
import unittest import unittest
from os.path import dirname
from unittest.mock import patch from unittest.mock import patch
import numpy as np import numpy as np
...@@ -30,6 +29,7 @@ from transformers.models.bert.tokenization_bert import VOCAB_FILES_NAMES as DPR_ ...@@ -30,6 +29,7 @@ from transformers.models.bert.tokenization_bert import VOCAB_FILES_NAMES as DPR_
from transformers.models.dpr.tokenization_dpr import DPRContextEncoderTokenizer, DPRQuestionEncoderTokenizer from transformers.models.dpr.tokenization_dpr import DPRContextEncoderTokenizer, DPRQuestionEncoderTokenizer
from transformers.models.roberta.tokenization_roberta import VOCAB_FILES_NAMES as BART_VOCAB_FILES_NAMES from transformers.models.roberta.tokenization_roberta import VOCAB_FILES_NAMES as BART_VOCAB_FILES_NAMES
from transformers.testing_utils import ( from transformers.testing_utils import (
get_tests_dir,
require_sentencepiece, require_sentencepiece,
require_tokenizers, require_tokenizers,
require_torch, require_torch,
...@@ -46,7 +46,7 @@ from ..t5.test_modeling_t5 import T5ModelTester ...@@ -46,7 +46,7 @@ from ..t5.test_modeling_t5 import T5ModelTester
TOLERANCE = 1e-3 TOLERANCE = 1e-3
T5_SAMPLE_VOCAB = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/test_sentencepiece.model") T5_SAMPLE_VOCAB = get_tests_dir("fixtures/test_sentencepiece.model")
if is_torch_available() and is_datasets_available() and is_faiss_available(): if is_torch_available() and is_datasets_available() and is_faiss_available():
import torch import torch
from datasets import Dataset from datasets import Dataset
......
...@@ -22,8 +22,8 @@ import numpy as np ...@@ -22,8 +22,8 @@ import numpy as np
from transformers import RealmConfig, is_torch_available from transformers import RealmConfig, is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device from transformers.testing_utils import require_torch, slow, torch_device
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
if is_torch_available(): if is_torch_available():
......
...@@ -28,7 +28,7 @@ from transformers.models.bert.tokenization_bert import ( ...@@ -28,7 +28,7 @@ from transformers.models.bert.tokenization_bert import (
from transformers.models.realm.tokenization_realm import RealmTokenizer from transformers.models.realm.tokenization_realm import RealmTokenizer
from transformers.testing_utils import require_tokenizers, slow from transformers.testing_utils import require_tokenizers, slow
from ..test_tokenization_common import TokenizerTesterMixin, filter_non_english from ...test_tokenization_common import TokenizerTesterMixin, filter_non_english
@require_tokenizers @require_tokenizers
......
...@@ -25,9 +25,9 @@ from transformers.testing_utils import ( ...@@ -25,9 +25,9 @@ from transformers.testing_utils import (
torch_device, torch_device,
) )
from ..generation.test_generation_utils import GenerationTesterMixin from ...generation.test_generation_utils import GenerationTesterMixin
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
if is_torch_available(): if is_torch_available():
......
...@@ -12,18 +12,16 @@ ...@@ -12,18 +12,16 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os
import unittest import unittest
from os.path import dirname
from transformers import SPIECE_UNDERLINE, ReformerTokenizer, ReformerTokenizerFast from transformers import SPIECE_UNDERLINE, ReformerTokenizer, ReformerTokenizerFast
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, require_torch, slow
from transformers.utils import cached_property from transformers.utils import cached_property
from ..test_tokenization_common import TokenizerTesterMixin from ...test_tokenization_common import TokenizerTesterMixin
SAMPLE_VOCAB = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/test_sentencepiece.model") SAMPLE_VOCAB = get_tests_dir("fixtures/test_sentencepiece.model")
@require_sentencepiece @require_sentencepiece
......
...@@ -22,8 +22,8 @@ from transformers import RegNetConfig ...@@ -22,8 +22,8 @@ from transformers import RegNetConfig
from transformers.file_utils import cached_property, is_torch_available, is_vision_available from transformers.file_utils import cached_property, is_torch_available, is_vision_available
from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.testing_utils import require_torch, require_vision, slow, torch_device
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor
if is_torch_available(): if is_torch_available():
......
...@@ -20,8 +20,8 @@ import unittest ...@@ -20,8 +20,8 @@ import unittest
from transformers import is_torch_available from transformers import is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device from transformers.testing_utils import require_torch, slow, torch_device
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask from ...test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
if is_torch_available(): if is_torch_available():
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment