"model_cards/vscode:/vscode.git/clone" did not exist on "ea1507fb458d41a0ee43045bcee2bcd77bf79497"
Unverified Commit 19420fd9 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

Move test model folders (#17034)



* move test model folders (TODO: fix imports and others)

* fix (potentially partially) imports (in model test modules)

* fix (potentially partially) imports (in tokenization test modules)

* fix (potentially partially) imports (in feature extraction test modules)

* fix import utils.test_modeling_tf_core

* fix path ../fixtures/

* fix imports about generation.test_generation_flax_utils

* fix more imports

* fix fixture path

* fix get_test_dir

* update module_to_test_file

* fix get_tests_dir from wrong transformers.utils

* update config.yml (CircleCI)

* fix style

* remove missing imports

* update new model script

* update check_repo

* update SPECIAL_MODULE_TO_TEST_MAP

* fix style

* add __init__

* update self-scheduled

* fix add_new_model scripts

* check one way to get location back

* python setup.py build install

* fix import in test auto

* update self-scheduled.yml

* update slack notification script

* Add comments about artifact names

* fix for yolos
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent cd9274d0
...@@ -22,8 +22,8 @@ import numpy as np ...@@ -22,8 +22,8 @@ import numpy as np
from transformers import LxmertConfig, is_tf_available from transformers import LxmertConfig, is_tf_available
from transformers.testing_utils import require_tf, slow from transformers.testing_utils import require_tf, slow
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_tf_common import TFModelTesterMixin, ids_tensor, random_attention_mask from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor, random_attention_mask
if is_tf_available(): if is_tf_available():
......
...@@ -21,7 +21,7 @@ from transformers import LxmertTokenizer, LxmertTokenizerFast ...@@ -21,7 +21,7 @@ from transformers import LxmertTokenizer, LxmertTokenizerFast
from transformers.models.bert.tokenization_bert import VOCAB_FILES_NAMES from transformers.models.bert.tokenization_bert import VOCAB_FILES_NAMES
from transformers.testing_utils import require_tokenizers from transformers.testing_utils import require_tokenizers
from ..test_tokenization_common import TokenizerTesterMixin from ...test_tokenization_common import TokenizerTesterMixin
@require_tokenizers @require_tokenizers
......
...@@ -23,9 +23,9 @@ from transformers import M2M100Config, is_torch_available ...@@ -23,9 +23,9 @@ from transformers import M2M100Config, is_torch_available
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
from transformers.utils import cached_property from transformers.utils import cached_property
from ..generation.test_generation_utils import GenerationTesterMixin from ...generation.test_generation_utils import GenerationTesterMixin
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor from ...test_modeling_common import ModelTesterMixin, ids_tensor
if is_torch_available(): if is_torch_available():
......
...@@ -12,26 +12,31 @@ ...@@ -12,26 +12,31 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os
import tempfile import tempfile
import unittest import unittest
from os.path import dirname
from pathlib import Path from pathlib import Path
from shutil import copyfile from shutil import copyfile
from transformers import M2M100Tokenizer, is_torch_available from transformers import M2M100Tokenizer, is_torch_available
from transformers.testing_utils import nested_simplify, require_sentencepiece, require_tokenizers, require_torch, slow from transformers.testing_utils import (
get_tests_dir,
nested_simplify,
require_sentencepiece,
require_tokenizers,
require_torch,
slow,
)
from transformers.utils import is_sentencepiece_available from transformers.utils import is_sentencepiece_available
if is_sentencepiece_available(): if is_sentencepiece_available():
from transformers.models.m2m_100.tokenization_m2m_100 import save_json, VOCAB_FILES_NAMES from transformers.models.m2m_100.tokenization_m2m_100 import save_json, VOCAB_FILES_NAMES
from ..test_tokenization_common import TokenizerTesterMixin from ...test_tokenization_common import TokenizerTesterMixin
if is_sentencepiece_available(): if is_sentencepiece_available():
SAMPLE_SP = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/test_sentencepiece.model") SAMPLE_SP = get_tests_dir("fixtures/test_sentencepiece.model")
if is_torch_available(): if is_torch_available():
......
...@@ -21,8 +21,8 @@ from transformers import MarianConfig, is_flax_available ...@@ -21,8 +21,8 @@ from transformers import MarianConfig, is_flax_available
from transformers.testing_utils import require_flax, require_sentencepiece, require_tokenizers, slow from transformers.testing_utils import require_flax, require_sentencepiece, require_tokenizers, slow
from transformers.utils import cached_property from transformers.utils import cached_property
from ..generation.test_generation_flax_utils import FlaxGenerationTesterMixin from ...generation.test_generation_flax_utils import FlaxGenerationTesterMixin
from ..test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor
if is_flax_available(): if is_flax_available():
......
...@@ -22,9 +22,9 @@ from transformers import MarianConfig, is_torch_available ...@@ -22,9 +22,9 @@ from transformers import MarianConfig, is_torch_available
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
from transformers.utils import cached_property from transformers.utils import cached_property
from ..generation.test_generation_utils import GenerationTesterMixin from ...generation.test_generation_utils import GenerationTesterMixin
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor from ...test_modeling_common import ModelTesterMixin, ids_tensor
if is_torch_available(): if is_torch_available():
......
...@@ -22,8 +22,8 @@ from transformers import AutoTokenizer, MarianConfig, MarianTokenizer, Translati ...@@ -22,8 +22,8 @@ from transformers import AutoTokenizer, MarianConfig, MarianTokenizer, Translati
from transformers.testing_utils import require_sentencepiece, require_tf, require_tokenizers, slow from transformers.testing_utils import require_sentencepiece, require_tf, require_tokenizers, slow
from transformers.utils import cached_property from transformers.utils import cached_property
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_tf_common import TFModelTesterMixin, ids_tensor from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor
if is_tf_available(): if is_tf_available():
......
...@@ -13,25 +13,23 @@ ...@@ -13,25 +13,23 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os
import tempfile import tempfile
import unittest import unittest
from os.path import dirname
from pathlib import Path from pathlib import Path
from shutil import copyfile from shutil import copyfile
from transformers import BatchEncoding, MarianTokenizer from transformers import BatchEncoding, MarianTokenizer
from transformers.testing_utils import require_sentencepiece, slow from transformers.testing_utils import get_tests_dir, require_sentencepiece, slow
from transformers.utils import is_sentencepiece_available, is_tf_available, is_torch_available from transformers.utils import is_sentencepiece_available, is_tf_available, is_torch_available
if is_sentencepiece_available(): if is_sentencepiece_available():
from transformers.models.marian.tokenization_marian import VOCAB_FILES_NAMES, save_json from transformers.models.marian.tokenization_marian import VOCAB_FILES_NAMES, save_json
from ..test_tokenization_common import TokenizerTesterMixin from ...test_tokenization_common import TokenizerTesterMixin
SAMPLE_SP = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/test_sentencepiece.model") SAMPLE_SP = get_tests_dir("fixtures/test_sentencepiece.model")
mock_tokenizer_config = {"target_lang": "fi", "source_lang": "en"} mock_tokenizer_config = {"target_lang": "fi", "source_lang": "en"}
zh_code = ">>zh<<" zh_code = ">>zh<<"
......
...@@ -21,7 +21,7 @@ import numpy as np ...@@ -21,7 +21,7 @@ import numpy as np
from transformers.testing_utils import require_torch, require_vision from transformers.testing_utils import require_torch, require_vision
from transformers.utils import is_torch_available, is_vision_available from transformers.utils import is_torch_available, is_vision_available
from ..test_feature_extraction_common import FeatureExtractionSavingTestMixin, prepare_image_inputs from ...test_feature_extraction_common import FeatureExtractionSavingTestMixin, prepare_image_inputs
if is_torch_available(): if is_torch_available():
......
...@@ -24,8 +24,8 @@ from transformers import DetrConfig, MaskFormerConfig, SwinConfig, is_torch_avai ...@@ -24,8 +24,8 @@ from transformers import DetrConfig, MaskFormerConfig, SwinConfig, is_torch_avai
from transformers.testing_utils import require_torch, require_vision, slow, torch_device from transformers.testing_utils import require_torch, require_vision, slow, torch_device
from transformers.utils import cached_property from transformers.utils import cached_property
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin from ...test_modeling_common import ModelTesterMixin
if is_torch_available(): if is_torch_available():
......
...@@ -21,8 +21,8 @@ from transformers import MBartConfig, is_flax_available ...@@ -21,8 +21,8 @@ from transformers import MBartConfig, is_flax_available
from transformers.testing_utils import require_flax, require_sentencepiece, require_tokenizers, slow from transformers.testing_utils import require_flax, require_sentencepiece, require_tokenizers, slow
from transformers.utils import cached_property from transformers.utils import cached_property
from ..generation.test_generation_flax_utils import FlaxGenerationTesterMixin from ...generation.test_generation_flax_utils import FlaxGenerationTesterMixin
from ..test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor from ...test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor
if is_flax_available(): if is_flax_available():
......
...@@ -23,9 +23,9 @@ from transformers import MBartConfig, is_torch_available ...@@ -23,9 +23,9 @@ from transformers import MBartConfig, is_torch_available
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
from transformers.utils import cached_property from transformers.utils import cached_property
from ..generation.test_generation_utils import GenerationTesterMixin from ...generation.test_generation_utils import GenerationTesterMixin
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_common import ModelTesterMixin, ids_tensor from ...test_modeling_common import ModelTesterMixin, ids_tensor
if is_torch_available(): if is_torch_available():
......
...@@ -20,8 +20,8 @@ from transformers import AutoTokenizer, MBartConfig, is_tf_available ...@@ -20,8 +20,8 @@ from transformers import AutoTokenizer, MBartConfig, is_tf_available
from transformers.testing_utils import require_sentencepiece, require_tf, require_tokenizers, slow from transformers.testing_utils import require_sentencepiece, require_tf, require_tokenizers, slow
from transformers.utils import cached_property from transformers.utils import cached_property
from ..test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
from ..test_modeling_tf_common import TFModelTesterMixin, ids_tensor from ...test_modeling_tf_common import TFModelTesterMixin, ids_tensor
if is_tf_available(): if is_tf_available():
......
...@@ -12,18 +12,23 @@ ...@@ -12,18 +12,23 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os
import shutil import shutil
import tempfile import tempfile
import unittest import unittest
from transformers import SPIECE_UNDERLINE, BatchEncoding, MBartTokenizer, MBartTokenizerFast, is_torch_available from transformers import SPIECE_UNDERLINE, BatchEncoding, MBartTokenizer, MBartTokenizerFast, is_torch_available
from transformers.testing_utils import nested_simplify, require_sentencepiece, require_tokenizers, require_torch from transformers.testing_utils import (
get_tests_dir,
nested_simplify,
require_sentencepiece,
require_tokenizers,
require_torch,
)
from ..test_tokenization_common import TokenizerTesterMixin from ...test_tokenization_common import TokenizerTesterMixin
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../fixtures/test_sentencepiece.model") SAMPLE_VOCAB = get_tests_dir("fixtures/test_sentencepiece.model")
if is_torch_available(): if is_torch_available():
......
...@@ -12,19 +12,24 @@ ...@@ -12,19 +12,24 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os
import shutil import shutil
import tempfile import tempfile
import unittest import unittest
from os.path import dirname
from transformers import SPIECE_UNDERLINE, BatchEncoding, MBart50Tokenizer, MBart50TokenizerFast, is_torch_available from transformers import SPIECE_UNDERLINE, BatchEncoding, MBart50Tokenizer, MBart50TokenizerFast, is_torch_available
from transformers.testing_utils import nested_simplify, require_sentencepiece, require_tokenizers, require_torch, slow from transformers.testing_utils import (
get_tests_dir,
nested_simplify,
require_sentencepiece,
require_tokenizers,
require_torch,
slow,
)
from ..test_tokenization_common import TokenizerTesterMixin from ...test_tokenization_common import TokenizerTesterMixin
SAMPLE_VOCAB = os.path.join(dirname(dirname(os.path.abspath(__file__))), "fixtures/test_sentencepiece.model") SAMPLE_VOCAB = get_tests_dir("fixtures/test_sentencepiece.model")
if is_torch_available(): if is_torch_available():
from transformers.models.mbart.modeling_mbart import shift_tokens_right from transformers.models.mbart.modeling_mbart import shift_tokens_right
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment