"src/nni_manager/vscode:/vscode.git/clone" did not exist on "b1a65978ca13ef8faa0aab0365c188f0db43c127"
Unverified Commit 13deb95a authored by Sam Shleifer's avatar Sam Shleifer Committed by GitHub
Browse files

Move tests/utils.py -> transformers/testing_utils.py (#5350)

parent 9c219305
...@@ -17,10 +17,10 @@ ...@@ -17,10 +17,10 @@
import unittest import unittest
from transformers import T5Config, is_tf_available from transformers import T5Config, is_tf_available
from transformers.testing_utils import require_tf, slow
from .test_configuration_common import ConfigTester from .test_configuration_common import ConfigTester
from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor
from .utils import require_tf, slow
if is_tf_available(): if is_tf_available():
......
...@@ -18,10 +18,10 @@ import random ...@@ -18,10 +18,10 @@ import random
import unittest import unittest
from transformers import TransfoXLConfig, is_tf_available from transformers import TransfoXLConfig, is_tf_available
from transformers.testing_utils import require_tf, slow
from .test_configuration_common import ConfigTester from .test_configuration_common import ConfigTester
from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor
from .utils import require_tf, slow
if is_tf_available(): if is_tf_available():
......
...@@ -17,10 +17,10 @@ ...@@ -17,10 +17,10 @@
import unittest import unittest
from transformers import is_tf_available from transformers import is_tf_available
from transformers.testing_utils import require_tf, slow
from .test_configuration_common import ConfigTester from .test_configuration_common import ConfigTester
from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor
from .utils import require_tf, slow
if is_tf_available(): if is_tf_available():
......
...@@ -16,8 +16,7 @@ ...@@ -16,8 +16,7 @@
import unittest import unittest
from transformers import is_tf_available from transformers import is_tf_available
from transformers.testing_utils import require_tf, slow
from .utils import require_tf, slow
if is_tf_available(): if is_tf_available():
......
...@@ -18,10 +18,10 @@ import random ...@@ -18,10 +18,10 @@ import random
import unittest import unittest
from transformers import XLNetConfig, is_tf_available from transformers import XLNetConfig, is_tf_available
from transformers.testing_utils import require_tf, slow
from .test_configuration_common import ConfigTester from .test_configuration_common import ConfigTester
from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor
from .utils import require_tf, slow
if is_tf_available(): if is_tf_available():
......
...@@ -17,10 +17,10 @@ import random ...@@ -17,10 +17,10 @@ import random
import unittest import unittest
from transformers import is_torch_available from transformers import is_torch_available
from transformers.testing_utils import require_multigpu, require_torch, slow, torch_device
from .test_configuration_common import ConfigTester from .test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, ids_tensor from .test_modeling_common import ModelTesterMixin, ids_tensor
from .utils import require_multigpu, require_torch, slow, torch_device
if is_torch_available(): if is_torch_available():
......
...@@ -17,10 +17,10 @@ ...@@ -17,10 +17,10 @@
import unittest import unittest
from transformers import is_torch_available from transformers import is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device
from .test_configuration_common import ConfigTester from .test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, ids_tensor from .test_modeling_common import ModelTesterMixin, ids_tensor
from .utils import require_torch, slow, torch_device
if is_torch_available(): if is_torch_available():
......
...@@ -17,8 +17,7 @@ ...@@ -17,8 +17,7 @@
import unittest import unittest
from transformers import is_torch_available from transformers import is_torch_available
from transformers.testing_utils import slow
from .utils import slow
if is_torch_available(): if is_torch_available():
......
...@@ -18,10 +18,10 @@ import random ...@@ -18,10 +18,10 @@ import random
import unittest import unittest
from transformers import is_torch_available from transformers import is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device
from .test_configuration_common import ConfigTester from .test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, ids_tensor from .test_modeling_common import ModelTesterMixin, ids_tensor
from .utils import require_torch, slow, torch_device
if is_torch_available(): if is_torch_available():
......
...@@ -3,9 +3,9 @@ from os.path import dirname, exists ...@@ -3,9 +3,9 @@ from os.path import dirname, exists
from shutil import rmtree from shutil import rmtree
from tempfile import NamedTemporaryFile, TemporaryDirectory from tempfile import NamedTemporaryFile, TemporaryDirectory
from tests.utils import require_tf, require_torch, slow
from transformers import BertConfig, BertTokenizerFast, FeatureExtractionPipeline from transformers import BertConfig, BertTokenizerFast, FeatureExtractionPipeline
from transformers.convert_graph_to_onnx import convert, ensure_valid_input, infer_shapes from transformers.convert_graph_to_onnx import convert, ensure_valid_input, infer_shapes
from transformers.testing_utils import require_tf, require_torch, slow
class FuncContiguousArgs: class FuncContiguousArgs:
......
...@@ -19,8 +19,7 @@ import tempfile ...@@ -19,8 +19,7 @@ import tempfile
import unittest import unittest
from transformers import is_torch_available from transformers import is_torch_available
from transformers.testing_utils import require_torch
from .utils import require_torch
if is_torch_available(): if is_torch_available():
......
import unittest import unittest
from transformers import is_tf_available from transformers import is_tf_available
from transformers.testing_utils import require_tf
from .utils import require_tf
if is_tf_available(): if is_tf_available():
......
...@@ -3,8 +3,7 @@ from typing import Iterable, List, Optional ...@@ -3,8 +3,7 @@ from typing import Iterable, List, Optional
from transformers import pipeline from transformers import pipeline
from transformers.pipelines import SUPPORTED_TASKS, DefaultArgumentHandler, Pipeline from transformers.pipelines import SUPPORTED_TASKS, DefaultArgumentHandler, Pipeline
from transformers.testing_utils import require_tf, require_torch, slow, torch_device
from .utils import require_tf, require_torch, slow, torch_device
DEFAULT_DEVICE_NUM = -1 if torch_device == "cpu" else 0 DEFAULT_DEVICE_NUM = -1 if torch_device == "cpu" else 0
......
...@@ -27,10 +27,9 @@ from transformers import ( ...@@ -27,10 +27,9 @@ from transformers import (
RobertaTokenizer, RobertaTokenizer,
RobertaTokenizerFast, RobertaTokenizerFast,
) )
from transformers.testing_utils import DUMMY_UNKWOWN_IDENTIFIER, SMALL_MODEL_IDENTIFIER # noqa: F401
from transformers.tokenization_auto import TOKENIZER_MAPPING from transformers.tokenization_auto import TOKENIZER_MAPPING
from .utils import DUMMY_UNKWOWN_IDENTIFIER, SMALL_MODEL_IDENTIFIER, slow # noqa: F401
class AutoTokenizerTest(unittest.TestCase): class AutoTokenizerTest(unittest.TestCase):
# @slow # @slow
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
import os import os
import unittest import unittest
from transformers.testing_utils import slow
from transformers.tokenization_bert import ( from transformers.tokenization_bert import (
VOCAB_FILES_NAMES, VOCAB_FILES_NAMES,
BasicTokenizer, BasicTokenizer,
...@@ -29,7 +30,6 @@ from transformers.tokenization_bert import ( ...@@ -29,7 +30,6 @@ from transformers.tokenization_bert import (
) )
from .test_tokenization_common import TokenizerTesterMixin from .test_tokenization_common import TokenizerTesterMixin
from .utils import slow
class BertTokenizationTest(TokenizerTesterMixin, unittest.TestCase): class BertTokenizationTest(TokenizerTesterMixin, unittest.TestCase):
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
import os import os
import unittest import unittest
from transformers.testing_utils import custom_tokenizers
from transformers.tokenization_bert import WordpieceTokenizer from transformers.tokenization_bert import WordpieceTokenizer
from transformers.tokenization_bert_japanese import ( from transformers.tokenization_bert_japanese import (
VOCAB_FILES_NAMES, VOCAB_FILES_NAMES,
...@@ -26,7 +27,6 @@ from transformers.tokenization_bert_japanese import ( ...@@ -26,7 +27,6 @@ from transformers.tokenization_bert_japanese import (
) )
from .test_tokenization_common import TokenizerTesterMixin from .test_tokenization_common import TokenizerTesterMixin
from .utils import custom_tokenizers
@custom_tokenizers @custom_tokenizers
......
...@@ -22,8 +22,8 @@ import tempfile ...@@ -22,8 +22,8 @@ import tempfile
from collections import OrderedDict from collections import OrderedDict
from typing import TYPE_CHECKING, Dict, List, Tuple, Union from typing import TYPE_CHECKING, Dict, List, Tuple, Union
from tests.utils import require_tf, require_torch, slow
from transformers import PreTrainedTokenizer, PreTrainedTokenizerBase, PreTrainedTokenizerFast from transformers import PreTrainedTokenizer, PreTrainedTokenizerBase, PreTrainedTokenizerFast
from transformers.testing_utils import require_tf, require_torch, slow
if TYPE_CHECKING: if TYPE_CHECKING:
......
...@@ -14,10 +14,10 @@ ...@@ -14,10 +14,10 @@
# limitations under the License. # limitations under the License.
from transformers.testing_utils import slow
from transformers.tokenization_distilbert import DistilBertTokenizer, DistilBertTokenizerFast from transformers.tokenization_distilbert import DistilBertTokenizer, DistilBertTokenizerFast
from .test_tokenization_bert import BertTokenizationTest from .test_tokenization_bert import BertTokenizationTest
from .utils import slow
class DistilBertTokenizationTest(BertTokenizationTest): class DistilBertTokenizationTest(BertTokenizationTest):
......
...@@ -3,7 +3,6 @@ import unittest ...@@ -3,7 +3,6 @@ import unittest
from collections import namedtuple from collections import namedtuple
from itertools import takewhile from itertools import takewhile
from tests.utils import require_torch
from transformers import ( from transformers import (
BertTokenizer, BertTokenizer,
BertTokenizerFast, BertTokenizerFast,
...@@ -16,6 +15,7 @@ from transformers import ( ...@@ -16,6 +15,7 @@ from transformers import (
TransfoXLTokenizer, TransfoXLTokenizer,
is_torch_available, is_torch_available,
) )
from transformers.testing_utils import require_torch
from transformers.tokenization_distilbert import DistilBertTokenizerFast from transformers.tokenization_distilbert import DistilBertTokenizerFast
from transformers.tokenization_openai import OpenAIGPTTokenizerFast from transformers.tokenization_openai import OpenAIGPTTokenizerFast
from transformers.tokenization_roberta import RobertaTokenizerFast from transformers.tokenization_roberta import RobertaTokenizerFast
......
...@@ -18,10 +18,10 @@ import json ...@@ -18,10 +18,10 @@ import json
import os import os
import unittest import unittest
from transformers.testing_utils import slow
from transformers.tokenization_roberta import VOCAB_FILES_NAMES, AddedToken, RobertaTokenizer, RobertaTokenizerFast from transformers.tokenization_roberta import VOCAB_FILES_NAMES, AddedToken, RobertaTokenizer, RobertaTokenizerFast
from .test_tokenization_common import TokenizerTesterMixin from .test_tokenization_common import TokenizerTesterMixin
from .utils import slow
class RobertaTokenizationTest(TokenizerTesterMixin, unittest.TestCase): class RobertaTokenizationTest(TokenizerTesterMixin, unittest.TestCase):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment