Unverified Commit 11505fa1 authored by Sylvain Gugger's avatar Sylvain Gugger Committed by GitHub
Browse files

Dummies multi backend (#11100)

* Replaces requires_xxx by one generic method

* Quality and update check_dummies

* Fix inits check

* Post-merge cleanup
parent 424419f5
......@@ -339,9 +339,6 @@ if is_tokenizers_available():
_import_structure["models.xlnet"].append("XLNetTokenizerFast")
_import_structure["tokenization_utils_fast"] = ["PreTrainedTokenizerFast"]
if is_sentencepiece_available():
_import_structure["convert_slow_tokenizer"] = ["SLOW_TO_FAST_CONVERTERS", "convert_slow_tokenizer"]
else:
from .utils import dummy_tokenizers_objects
......@@ -349,13 +346,19 @@ else:
name for name in dir(dummy_tokenizers_objects) if not name.startswith("_")
]
if is_sentencepiece_available() and is_tokenizers_available():
_import_structure["convert_slow_tokenizer"] = ["SLOW_TO_FAST_CONVERTERS", "convert_slow_tokenizer"]
else:
from .utils import dummy_sentencepiece_and_tokenizers_objects
_import_structure["utils.dummy_sentencepiece_and_tokenizers_objects"] = [
name for name in dir(dummy_sentencepiece_and_tokenizers_objects) if not name.startswith("_")
]
# Speech-specific objects
if is_speech_available():
_import_structure["models.speech_to_text"].append("Speech2TextFeatureExtractor")
if is_sentencepiece_available():
_import_structure["models.speech_to_text"].append("Speech2TextProcessor")
else:
from .utils import dummy_speech_objects
......@@ -363,6 +366,15 @@ else:
name for name in dir(dummy_speech_objects) if not name.startswith("_")
]
if is_sentencepiece_available() and is_speech_available():
_import_structure["models.speech_to_text"].append("Speech2TextProcessor")
else:
from .utils import dummy_sentencepiece_and_speech_objects
_import_structure["utils.dummy_sentencepiece_and_speech_objects"] = [
name for name in dir(dummy_sentencepiece_and_speech_objects) if not name.startswith("_")
]
# Vision-specific objects
if is_vision_available():
_import_structure["image_utils"] = ["ImageFeatureExtractionMixin"]
......@@ -1641,21 +1653,25 @@ if TYPE_CHECKING:
from .models.xlnet import XLNetTokenizerFast
from .tokenization_utils_fast import PreTrainedTokenizerFast
if is_sentencepiece_available():
from .convert_slow_tokenizer import SLOW_TO_FAST_CONVERTERS, convert_slow_tokenizer
else:
from .utils.dummy_tokenizers_objects import *
if is_sentencepiece_available() and is_tokenizers_available():
from .convert_slow_tokenizer import SLOW_TO_FAST_CONVERTERS, convert_slow_tokenizer
else:
from .utils.dummies_sentencepiece_and_tokenizers_objects import *
if is_speech_available():
from .models.speech_to_text import Speech2TextFeatureExtractor
if is_sentencepiece_available():
from .models.speech_to_text import Speech2TextProcessor
else:
from .utils.dummy_speech_objects import *
if is_speech_available() and is_sentencepiece_available():
from .models.speech_to_text import Speech2TextProcessor
else:
from .utils.dummy_sentencepiece_and_speech_objects import *
if is_vision_available():
from .image_utils import ImageFeatureExtractionMixin
from .models.vit import ViTFeatureExtractor
......
......@@ -24,7 +24,7 @@ from typing import Dict, List, Tuple
from tokenizers import Regex, Tokenizer, decoders, normalizers, pre_tokenizers, processors
from tokenizers.models import BPE, Unigram, WordPiece
from .file_utils import requires_protobuf, requires_sentencepiece
from .file_utils import requires_backends
class SentencePieceExtractor:
......@@ -33,7 +33,7 @@ class SentencePieceExtractor:
"""
def __init__(self, model: str):
requires_sentencepiece(self)
requires_backends(self, "sentencepiece")
from sentencepiece import SentencePieceProcessor
self.sp = SentencePieceProcessor()
......@@ -298,7 +298,7 @@ class RobertaConverter(Converter):
class SpmConverter(Converter):
def __init__(self, *args):
requires_protobuf(self)
requires_backends(self, "protobuf")
super().__init__(*args)
......
......@@ -16,7 +16,7 @@
import warnings
from ...file_utils import is_sklearn_available, requires_sklearn
from ...file_utils import is_sklearn_available, requires_backends
if is_sklearn_available():
......@@ -34,13 +34,13 @@ DEPRECATION_WARNING = (
def simple_accuracy(preds, labels):
warnings.warn(DEPRECATION_WARNING, FutureWarning)
requires_sklearn(simple_accuracy)
requires_backends(simple_accuracy, "sklearn")
return (preds == labels).mean()
def acc_and_f1(preds, labels):
warnings.warn(DEPRECATION_WARNING, FutureWarning)
requires_sklearn(acc_and_f1)
requires_backends(acc_and_f1, "sklearn")
acc = simple_accuracy(preds, labels)
f1 = f1_score(y_true=labels, y_pred=preds)
return {
......@@ -52,7 +52,7 @@ def acc_and_f1(preds, labels):
def pearson_and_spearman(preds, labels):
warnings.warn(DEPRECATION_WARNING, FutureWarning)
requires_sklearn(pearson_and_spearman)
requires_backends(pearson_and_spearman, "sklearn")
pearson_corr = pearsonr(preds, labels)[0]
spearman_corr = spearmanr(preds, labels)[0]
return {
......@@ -64,7 +64,7 @@ def pearson_and_spearman(preds, labels):
def glue_compute_metrics(task_name, preds, labels):
warnings.warn(DEPRECATION_WARNING, FutureWarning)
requires_sklearn(glue_compute_metrics)
requires_backends(glue_compute_metrics, "sklearn")
assert len(preds) == len(labels), f"Predictions and labels have mismatched lengths {len(preds)} and {len(labels)}"
if task_name == "cola":
return {"mcc": matthews_corrcoef(labels, preds)}
......@@ -94,7 +94,7 @@ def glue_compute_metrics(task_name, preds, labels):
def xnli_compute_metrics(task_name, preds, labels):
warnings.warn(DEPRECATION_WARNING, FutureWarning)
requires_sklearn(xnli_compute_metrics)
requires_backends(xnli_compute_metrics, "sklearn")
assert len(preds) == len(labels), f"Predictions and labels have mismatched lengths {len(preds)} and {len(labels)}"
if task_name == "xnli":
return {"acc": simple_accuracy(preds, labels)}
......
......@@ -532,82 +532,32 @@ VISION_IMPORT_ERROR = """
"""
def requires_datasets(obj):
name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
if not is_datasets_available():
raise ImportError(DATASETS_IMPORT_ERROR.format(name))
def requires_faiss(obj):
name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
if not is_faiss_available():
raise ImportError(FAISS_IMPORT_ERROR.format(name))
def requires_pytorch(obj):
name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
if not is_torch_available():
raise ImportError(PYTORCH_IMPORT_ERROR.format(name))
def requires_sklearn(obj):
name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
if not is_sklearn_available():
raise ImportError(SKLEARN_IMPORT_ERROR.format(name))
def requires_tf(obj):
name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
if not is_tf_available():
raise ImportError(TENSORFLOW_IMPORT_ERROR.format(name))
def requires_flax(obj):
name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
if not is_flax_available():
raise ImportError(FLAX_IMPORT_ERROR.format(name))
def requires_tokenizers(obj):
name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
if not is_tokenizers_available():
raise ImportError(TOKENIZERS_IMPORT_ERROR.format(name))
def requires_sentencepiece(obj):
name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
if not is_sentencepiece_available():
raise ImportError(SENTENCEPIECE_IMPORT_ERROR.format(name))
def requires_protobuf(obj):
name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
if not is_protobuf_available():
raise ImportError(PROTOBUF_IMPORT_ERROR.format(name))
def requires_pandas(obj):
name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
if not is_pandas_available():
raise ImportError(PANDAS_IMPORT_ERROR.format(name))
def requires_scatter(obj):
name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
if not is_scatter_available():
raise ImportError(SCATTER_IMPORT_ERROR.format(name))
BACKENDS_MAPPING = OrderedDict(
[
("datasets", (is_datasets_available, DATASETS_IMPORT_ERROR)),
("faiss", (is_faiss_available, FAISS_IMPORT_ERROR)),
("flax", (is_flax_available, FLAX_IMPORT_ERROR)),
("pandas", (is_pandas_available, PANDAS_IMPORT_ERROR)),
("protobuf", (is_protobuf_available, PROTOBUF_IMPORT_ERROR)),
("scatter", (is_scatter_available, SCATTER_IMPORT_ERROR)),
("sentencepiece", (is_sentencepiece_available, SENTENCEPIECE_IMPORT_ERROR)),
("sklearn", (is_sklearn_available, SKLEARN_IMPORT_ERROR)),
("speech", (is_speech_available, SPEECH_IMPORT_ERROR)),
("tf", (is_tf_available, TENSORFLOW_IMPORT_ERROR)),
("tokenziers", (is_tokenizers_available, TOKENIZERS_IMPORT_ERROR)),
("torch", (is_torch_available, PYTORCH_IMPORT_ERROR)),
("vision", (is_vision_available, VISION_IMPORT_ERROR)),
]
)
def requires_speech(obj):
name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
if not is_speech_available():
raise ImportError(SPEECH_IMPORT_ERROR.format(name))
def requires_backends(obj, backends):
if not isinstance(backends, (list, tuple)):
backends = [backends]
def requires_vision(obj):
name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
if not is_vision_available():
raise ImportError(VISION_IMPORT_ERROR.format(name))
if not all(BACKENDS_MAPPING[backend][0]() for backend in backends):
raise ImportError("".join([BACKENDS_MAPPING[backend][1].format(name) for backend in backends]))
def add_start_docstrings(*docstr):
......
......@@ -21,14 +21,7 @@ from typing import Iterable, List, Optional, Tuple
import numpy as np
from ...file_utils import (
cached_path,
is_datasets_available,
is_faiss_available,
is_remote_url,
requires_datasets,
requires_faiss,
)
from ...file_utils import cached_path, is_datasets_available, is_faiss_available, is_remote_url, requires_backends
from ...tokenization_utils_base import BatchEncoding
from ...utils import logging
from .configuration_rag import RagConfig
......@@ -372,8 +365,7 @@ class RagRetriever:
def __init__(self, config, question_encoder_tokenizer, generator_tokenizer, index=None, init_retrieval=True):
self._init_retrieval = init_retrieval
requires_datasets(self)
requires_faiss(self)
requires_backends(self, ["datasets", "faiss"])
super().__init__()
self.index = index or self._build_index(config)
self.generator_tokenizer = generator_tokenizer
......@@ -411,8 +403,7 @@ class RagRetriever:
@classmethod
def from_pretrained(cls, retriever_name_or_path, indexed_dataset=None, **kwargs):
requires_datasets(cls)
requires_faiss(cls)
requires_backends(cls, ["datasets", "faiss"])
config = kwargs.pop("config", None) or RagConfig.from_pretrained(retriever_name_or_path, **kwargs)
rag_tokenizer = RagTokenizer.from_pretrained(retriever_name_or_path, config=config)
question_encoder_tokenizer = rag_tokenizer.question_encoder
......
......@@ -33,7 +33,7 @@ from ...file_utils import (
add_start_docstrings_to_model_forward,
is_scatter_available,
replace_return_docstrings,
requires_scatter,
requires_backends,
)
from ...modeling_outputs import BaseModelOutput, BaseModelOutputWithPooling, MaskedLMOutput, SequenceClassifierOutput
from ...modeling_utils import (
......@@ -792,7 +792,7 @@ class TapasModel(TapasPreTrainedModel):
"""
def __init__(self, config, add_pooling_layer=True):
requires_scatter(self)
requires_backends(self, "scatter")
super().__init__(config)
self.config = config
......
......@@ -2,7 +2,7 @@ import collections
import numpy as np
from ..file_utils import add_end_docstrings, is_torch_available, requires_pandas
from ..file_utils import add_end_docstrings, is_torch_available, requires_backends
from .base import PIPELINE_INIT_ARGS, ArgumentHandler, Pipeline, PipelineException
......@@ -24,7 +24,7 @@ class TableQuestionAnsweringArgumentHandler(ArgumentHandler):
# ...,
# {"table": pd.DataFrame, "query" : List[str]}
# ]
requires_pandas(self)
requires_backends(self, "pandas")
import pandas as pd
if table is None:
......
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..file_utils import requires_flax
from ..file_utils import requires_backends
class FlaxPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
FLAX_MODEL_FOR_MASKED_LM_MAPPING = None
......@@ -37,153 +37,153 @@ FLAX_MODEL_MAPPING = None
class FlaxAutoModel:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxAutoModelForMaskedLM:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxAutoModelForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxAutoModelForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxAutoModelForPreTraining:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxAutoModelForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxAutoModelForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxAutoModelForTokenClassification:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxBertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxBertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxBertForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxBertForPreTraining:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxBertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxBertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxBertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxBertModel:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxBertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
class FlaxRobertaModel:
def __init__(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_flax(self)
requires_backends(self, ["flax"])
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..file_utils import requires_pytorch
from ..file_utils import requires_backends
class PyTorchBenchmark:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class PyTorchBenchmarkArguments:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DataCollator:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DataCollatorForLanguageModeling:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DataCollatorForPermutationLanguageModeling:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DataCollatorForSeq2Seq:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DataCollatorForSOP:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DataCollatorForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DataCollatorForWholeWordMask:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DataCollatorWithPadding:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def default_data_collator(*args, **kwargs):
requires_pytorch(default_data_collator)
requires_backends(default_data_collator, ["torch"])
class GlueDataset:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class GlueDataTrainingArguments:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LineByLineTextDataset:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LineByLineWithRefDataset:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LineByLineWithSOPTextDataset:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class SquadDataset:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class SquadDataTrainingArguments:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class TextDataset:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class TextDatasetForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BeamScorer:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BeamSearchScorer:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ForcedBOSTokenLogitsProcessor:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ForcedEOSTokenLogitsProcessor:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class HammingDiversityLogitsProcessor:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class InfNanRemoveLogitsProcessor:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LogitsProcessor:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LogitsProcessorList:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LogitsWarper:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MinLengthLogitsProcessor:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class NoBadWordsLogitsProcessor:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class NoRepeatNGramLogitsProcessor:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class PrefixConstrainedLogitsProcessor:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class RepetitionPenaltyLogitsProcessor:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class TemperatureLogitsWarper:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class TopKLogitsWarper:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class TopPLogitsWarper:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MaxLengthCriteria:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MaxTimeCriteria:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class StoppingCriteria:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class StoppingCriteriaList:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def top_k_top_p_filtering(*args, **kwargs):
requires_pytorch(top_k_top_p_filtering)
requires_backends(top_k_top_p_filtering, ["torch"])
class Conv1D:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class PreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def apply_chunking_to_forward(*args, **kwargs):
requires_pytorch(apply_chunking_to_forward)
requires_backends(apply_chunking_to_forward, ["torch"])
def prune_layer(*args, **kwargs):
requires_pytorch(prune_layer)
requires_backends(prune_layer, ["torch"])
ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -249,74 +249,74 @@ ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class AlbertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AlbertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AlbertForPreTraining:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AlbertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AlbertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AlbertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AlbertModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AlbertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_albert(*args, **kwargs):
requires_pytorch(load_tf_weights_in_albert)
requires_backends(load_tf_weights_in_albert, ["torch"])
MODEL_FOR_CAUSAL_LM_MAPPING = None
......@@ -360,110 +360,110 @@ MODEL_WITH_LM_HEAD_MAPPING = None
class AutoModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AutoModelForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AutoModelForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AutoModelForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AutoModelForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AutoModelForPreTraining:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AutoModelForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AutoModelForSeq2SeqLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AutoModelForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AutoModelForTableQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AutoModelForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AutoModelWithLMHead:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
BART_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -471,61 +471,61 @@ BART_PRETRAINED_MODEL_ARCHIVE_LIST = None
class BartForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BartForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BartForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BartForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BartModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BartPretrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class PretrainedBartModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
BERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -533,107 +533,107 @@ BERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class BertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BertForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BertForPreTraining:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BertLayer:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BertLMHeadModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BertModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_bert(*args, **kwargs):
requires_pytorch(load_tf_weights_in_bert)
requires_backends(load_tf_weights_in_bert, ["torch"])
class BertGenerationDecoder:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BertGenerationEncoder:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_bert_generation(*args, **kwargs):
requires_pytorch(load_tf_weights_in_bert_generation)
requires_backends(load_tf_weights_in_bert_generation, ["torch"])
BIG_BIRD_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -641,84 +641,84 @@ BIG_BIRD_PRETRAINED_MODEL_ARCHIVE_LIST = None
class BigBirdForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BigBirdForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BigBirdForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BigBirdForPreTraining:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BigBirdForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BigBirdForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BigBirdForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BigBirdLayer:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BigBirdModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BigBirdPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_big_bird(*args, **kwargs):
requires_pytorch(load_tf_weights_in_big_bird)
requires_backends(load_tf_weights_in_big_bird, ["torch"])
BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -726,25 +726,25 @@ BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class BlenderbotForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BlenderbotForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BlenderbotModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -752,25 +752,25 @@ BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST = None
class BlenderbotSmallForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BlenderbotSmallForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class BlenderbotSmallModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -778,61 +778,61 @@ CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class CamembertForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class CamembertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class CamembertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class CamembertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class CamembertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class CamembertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class CamembertModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -840,74 +840,74 @@ CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class ConvBertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ConvBertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ConvBertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ConvBertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ConvBertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ConvBertLayer:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ConvBertModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ConvBertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_convbert(*args, **kwargs):
requires_pytorch(load_tf_weights_in_convbert)
requires_backends(load_tf_weights_in_convbert, ["torch"])
CTRL_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -915,38 +915,38 @@ CTRL_PRETRAINED_MODEL_ARCHIVE_LIST = None
class CTRLForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class CTRLLMHeadModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class CTRLModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class CTRLPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -954,56 +954,56 @@ DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = None
class DebertaForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DebertaForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DebertaForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DebertaForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DebertaModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DebertaPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
DEBERTA_V2_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1011,56 +1011,56 @@ DEBERTA_V2_PRETRAINED_MODEL_ARCHIVE_LIST = None
class DebertaV2ForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DebertaV2ForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DebertaV2ForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DebertaV2ForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DebertaV2Model:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DebertaV2PreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1068,65 +1068,65 @@ DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class DistilBertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DistilBertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DistilBertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DistilBertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DistilBertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DistilBertModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DistilBertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1140,32 +1140,32 @@ DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST = None
class DPRContextEncoder:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DPRPretrainedContextEncoder:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DPRPretrainedQuestionEncoder:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DPRPretrainedReader:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DPRQuestionEncoder:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class DPRReader:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1173,83 +1173,83 @@ ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST = None
class ElectraForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ElectraForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ElectraForPreTraining:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ElectraForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ElectraForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ElectraForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ElectraModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ElectraPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_electra(*args, **kwargs):
requires_pytorch(load_tf_weights_in_electra)
requires_backends(load_tf_weights_in_electra, ["torch"])
class EncoderDecoderModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1257,92 +1257,92 @@ FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class FlaubertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FlaubertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FlaubertForQuestionAnsweringSimple:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FlaubertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FlaubertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FlaubertModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FlaubertWithLMHeadModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FSMTForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FSMTModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class PretrainedFSMTModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1350,74 +1350,74 @@ FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST = None
class FunnelBaseModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FunnelForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FunnelForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FunnelForPreTraining:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FunnelForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FunnelForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FunnelForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class FunnelModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_funnel(*args, **kwargs):
requires_pytorch(load_tf_weights_in_funnel)
requires_backends(load_tf_weights_in_funnel, ["torch"])
GPT2_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1425,51 +1425,51 @@ GPT2_PRETRAINED_MODEL_ARCHIVE_LIST = None
class GPT2DoubleHeadsModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class GPT2ForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class GPT2LMHeadModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class GPT2Model:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class GPT2PreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_gpt2(*args, **kwargs):
requires_pytorch(load_tf_weights_in_gpt2)
requires_backends(load_tf_weights_in_gpt2, ["torch"])
GPT_NEO_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1477,29 +1477,29 @@ GPT_NEO_PRETRAINED_MODEL_ARCHIVE_LIST = None
class GPTNeoForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class GPTNeoModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class GPTNeoPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_gpt_neo(*args, **kwargs):
requires_pytorch(load_tf_weights_in_gpt_neo)
requires_backends(load_tf_weights_in_gpt_neo, ["torch"])
IBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1507,65 +1507,65 @@ IBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class IBertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class IBertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class IBertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class IBertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class IBertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class IBertModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class IBertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1573,38 +1573,38 @@ LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST = None
class LayoutLMForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LayoutLMForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LayoutLMForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LayoutLMModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
LED_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1612,38 +1612,38 @@ LED_PRETRAINED_MODEL_ARCHIVE_LIST = None
class LEDForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LEDForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LEDForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LEDModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1651,108 +1651,108 @@ LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST = None
class LongformerForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LongformerForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LongformerForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LongformerForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LongformerForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LongformerModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LongformerSelfAttention:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LxmertEncoder:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LxmertForPreTraining:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LxmertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LxmertModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LxmertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LxmertVisualFeatureEncoder:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class LxmertXLayer:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
M2M_100_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1760,103 +1760,103 @@ M2M_100_PRETRAINED_MODEL_ARCHIVE_LIST = None
class M2M100ForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class M2M100Model:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MarianForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MarianModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MarianMTModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MBartForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MBartForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MBartForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MBartForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MBartModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MMBTForClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MMBTModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ModalEmbeddings:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1864,84 +1864,84 @@ MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class MobileBertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MobileBertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MobileBertForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MobileBertForPreTraining:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MobileBertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MobileBertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MobileBertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MobileBertLayer:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MobileBertModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MobileBertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_mobilebert(*args, **kwargs):
requires_pytorch(load_tf_weights_in_mobilebert)
requires_backends(load_tf_weights_in_mobilebert, ["torch"])
MPNET_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1949,97 +1949,97 @@ MPNET_PRETRAINED_MODEL_ARCHIVE_LIST = None
class MPNetForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MPNetForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MPNetForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MPNetForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MPNetForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MPNetLayer:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MPNetModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MPNetPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MT5EncoderModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MT5ForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class MT5Model:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2047,74 +2047,74 @@ OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class OpenAIGPTDoubleHeadsModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class OpenAIGPTForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class OpenAIGPTLMHeadModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class OpenAIGPTModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class OpenAIGPTPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_openai_gpt(*args, **kwargs):
requires_pytorch(load_tf_weights_in_openai_gpt)
requires_backends(load_tf_weights_in_openai_gpt, ["torch"])
class PegasusForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class PegasusForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class PegasusModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2122,63 +2122,63 @@ PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST = None
class ProphetNetDecoder:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ProphetNetEncoder:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ProphetNetForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ProphetNetForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ProphetNetModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ProphetNetPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class RagModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class RagSequenceForGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class RagTokenForGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2186,57 +2186,57 @@ REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST = None
class ReformerAttention:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ReformerForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ReformerForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ReformerForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ReformerLayer:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ReformerModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ReformerModelWithLMHead:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2244,20 +2244,20 @@ RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class RetriBertModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class RetriBertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2265,61 +2265,61 @@ ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = None
class RobertaForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class RobertaForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class RobertaForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class RobertaForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class RobertaForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class RobertaForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class RobertaModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
SPEECH_TO_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2327,20 +2327,20 @@ SPEECH_TO_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class Speech2TextForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class Speech2TextModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
SQUEEZEBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2348,70 +2348,70 @@ SQUEEZEBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class SqueezeBertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class SqueezeBertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class SqueezeBertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class SqueezeBertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class SqueezeBertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class SqueezeBertModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class SqueezeBertModule:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class SqueezeBertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
T5_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2419,42 +2419,42 @@ T5_PRETRAINED_MODEL_ARCHIVE_LIST = None
class T5EncoderModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class T5ForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class T5Model:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class T5PreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_t5(*args, **kwargs):
requires_pytorch(load_tf_weights_in_t5)
requires_backends(load_tf_weights_in_t5, ["torch"])
TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2462,38 +2462,38 @@ TAPAS_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TapasForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class TapasForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class TapasForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class TapasModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2501,47 +2501,47 @@ TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST = None
class AdaptiveEmbedding:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class TransfoXLForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class TransfoXLLMHeadModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class TransfoXLModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class TransfoXLPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_transfo_xl(*args, **kwargs):
requires_pytorch(load_tf_weights_in_transfo_xl)
requires_backends(load_tf_weights_in_transfo_xl, ["torch"])
VIT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2549,25 +2549,25 @@ VIT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class ViTForImageClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ViTModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class ViTPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
WAV_2_VEC_2_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2575,34 +2575,34 @@ WAV_2_VEC_2_PRETRAINED_MODEL_ARCHIVE_LIST = None
class Wav2Vec2ForCTC:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class Wav2Vec2ForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class Wav2Vec2Model:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class Wav2Vec2PreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
XLM_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2610,74 +2610,74 @@ XLM_PRETRAINED_MODEL_ARCHIVE_LIST = None
class XLMForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMForQuestionAnsweringSimple:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMWithLMHeadModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
XLM_PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2685,35 +2685,35 @@ XLM_PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST = None
class XLMProphetNetDecoder:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMProphetNetEncoder:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMProphetNetForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMProphetNetForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMProphetNetModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2721,61 +2721,61 @@ XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = None
class XLMRobertaForCausalLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMRobertaForMaskedLM:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMRobertaForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMRobertaForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMRobertaForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMRobertaForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLMRobertaModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
XLNET_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -2783,127 +2783,127 @@ XLNET_PRETRAINED_MODEL_ARCHIVE_LIST = None
class XLNetForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLNetForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLNetForQuestionAnsweringSimple:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLNetForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLNetForTokenClassification:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLNetLMHeadModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLNetModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class XLNetPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def load_tf_weights_in_xlnet(*args, **kwargs):
requires_pytorch(load_tf_weights_in_xlnet)
requires_backends(load_tf_weights_in_xlnet, ["torch"])
class Adafactor:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
class AdamW:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def get_constant_schedule(*args, **kwargs):
requires_pytorch(get_constant_schedule)
requires_backends(get_constant_schedule, ["torch"])
def get_constant_schedule_with_warmup(*args, **kwargs):
requires_pytorch(get_constant_schedule_with_warmup)
requires_backends(get_constant_schedule_with_warmup, ["torch"])
def get_cosine_schedule_with_warmup(*args, **kwargs):
requires_pytorch(get_cosine_schedule_with_warmup)
requires_backends(get_cosine_schedule_with_warmup, ["torch"])
def get_cosine_with_hard_restarts_schedule_with_warmup(*args, **kwargs):
requires_pytorch(get_cosine_with_hard_restarts_schedule_with_warmup)
requires_backends(get_cosine_with_hard_restarts_schedule_with_warmup, ["torch"])
def get_linear_schedule_with_warmup(*args, **kwargs):
requires_pytorch(get_linear_schedule_with_warmup)
requires_backends(get_linear_schedule_with_warmup, ["torch"])
def get_polynomial_decay_schedule_with_warmup(*args, **kwargs):
requires_pytorch(get_polynomial_decay_schedule_with_warmup)
requires_backends(get_polynomial_decay_schedule_with_warmup, ["torch"])
def get_scheduler(*args, **kwargs):
requires_pytorch(get_scheduler)
requires_backends(get_scheduler, ["torch"])
class Trainer:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
def torch_distributed_zero_first(*args, **kwargs):
requires_pytorch(torch_distributed_zero_first)
requires_backends(torch_distributed_zero_first, ["torch"])
class Seq2SeqTrainer:
def __init__(self, *args, **kwargs):
requires_pytorch(self)
requires_backends(self, ["torch"])
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..file_utils import requires_backends
class Speech2TextProcessor:
def __init__(self, *args, **kwargs):
requires_backends(self, ["sentencepiece", "speech"])
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..file_utils import requires_backends
SLOW_TO_FAST_CONVERTERS = None
def convert_slow_tokenizer(*args, **kwargs):
requires_backends(convert_slow_tokenizer, ["sentencepiece", "tokenizers"])
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..file_utils import requires_sentencepiece
from ..file_utils import requires_backends
class AlbertTokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class BarthezTokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class BertGenerationTokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class CamembertTokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class DebertaV2Tokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class M2M100Tokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class MarianTokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class MBart50Tokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class MBartTokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class MT5Tokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class PegasusTokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class ReformerTokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class Speech2TextTokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class T5Tokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class XLMProphetNetTokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class XLMRobertaTokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
class XLNetTokenizer:
def __init__(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_sentencepiece(self)
requires_backends(self, ["sentencepiece"])
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..file_utils import requires_speech
from ..file_utils import requires_backends
class Speech2TextFeatureExtractor:
def __init__(self, *args, **kwargs):
requires_speech(self)
class Speech2TextProcessor:
def __init__(self, *args, **kwargs):
requires_speech(self)
requires_backends(self, ["speech"])
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..file_utils import requires_tf
from ..file_utils import requires_backends
class TensorFlowBenchmarkArguments:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TensorFlowBenchmark:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
def tf_top_k_top_p_filtering(*args, **kwargs):
requires_tf(tf_top_k_top_p_filtering)
requires_backends(tf_top_k_top_p_filtering, ["tf"])
TF_LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -21,75 +21,75 @@ TF_LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFLayoutLMForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLayoutLMForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLayoutLMForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLayoutLMMainLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLayoutLMModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLayoutLMPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFSequenceSummary:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFSharedEmbeddings:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
def shape_list(*args, **kwargs):
requires_tf(shape_list)
requires_backends(shape_list, ["tf"])
TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -97,75 +97,75 @@ TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFAlbertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAlbertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAlbertForPreTraining:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAlbertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAlbertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAlbertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAlbertMainLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAlbertModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAlbertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_MODEL_FOR_CAUSAL_LM_MAPPING = None
......@@ -203,119 +203,119 @@ TF_MODEL_WITH_LM_HEAD_MAPPING = None
class TFAutoModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAutoModelForCausalLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAutoModelForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAutoModelForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAutoModelForPreTraining:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAutoModelForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAutoModelForSeq2SeqLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAutoModelForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAutoModelForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFAutoModelWithLMHead:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBartForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBartModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBartPretrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -323,130 +323,130 @@ TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFBertEmbeddings:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBertForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBertForPreTraining:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBertLMHeadModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBertMainLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBertModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBlenderbotForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBlenderbotModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBlenderbotSmallForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFBlenderbotSmallModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -454,56 +454,56 @@ TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFCamembertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFCamembertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFCamembertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFCamembertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFCamembertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFCamembertModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -511,70 +511,70 @@ TF_CONVBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFConvBertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFConvBertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFConvBertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFConvBertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFConvBertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFConvBertLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFConvBertModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFConvBertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_CTRL_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -582,38 +582,38 @@ TF_CTRL_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFCTRLForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFCTRLLMHeadModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFCTRLModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFCTRLPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -621,70 +621,70 @@ TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFDistilBertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFDistilBertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFDistilBertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFDistilBertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFDistilBertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFDistilBertMainLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFDistilBertModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFDistilBertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -698,32 +698,32 @@ TF_DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFDPRContextEncoder:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFDPRPretrainedContextEncoder:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFDPRPretrainedQuestionEncoder:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFDPRPretrainedReader:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFDPRQuestionEncoder:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFDPRReader:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -731,70 +731,70 @@ TF_ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFElectraForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFElectraForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFElectraForPreTraining:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFElectraForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFElectraForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFElectraForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFElectraModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFElectraPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -802,56 +802,56 @@ TF_FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFFlaubertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFFlaubertForQuestionAnsweringSimple:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFFlaubertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFFlaubertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFFlaubertModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFFlaubertWithLMHeadModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -859,70 +859,70 @@ TF_FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFFunnelBaseModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFFunnelForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFFunnelForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFFunnelForPreTraining:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFFunnelForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFFunnelForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFFunnelForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFFunnelModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_GPT2_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -930,79 +930,79 @@ TF_GPT2_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFGPT2DoubleHeadsModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFGPT2ForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFGPT2LMHeadModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFGPT2MainLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFGPT2Model:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFGPT2PreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLEDForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLEDModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLEDPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1010,61 +1010,61 @@ TF_LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFLongformerForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLongformerForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLongformerForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLongformerForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLongformerForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLongformerModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLongformerSelfAttention:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_LXMERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1072,71 +1072,71 @@ TF_LXMERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFLxmertForPreTraining:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLxmertMainLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLxmertModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLxmertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFLxmertVisualFeatureEncoder:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMarianModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMarianMTModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMBartForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMBartModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1144,80 +1144,80 @@ TF_MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFMobileBertForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMobileBertForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMobileBertForNextSentencePrediction:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMobileBertForPreTraining:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMobileBertForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMobileBertForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMobileBertForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMobileBertMainLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMobileBertModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMobileBertPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1225,97 +1225,97 @@ TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFMPNetForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMPNetForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMPNetForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMPNetForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMPNetForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMPNetMainLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMPNetModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMPNetPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMT5EncoderModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMT5ForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFMT5Model:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1323,89 +1323,89 @@ TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFOpenAIGPTDoubleHeadsModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFOpenAIGPTForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFOpenAIGPTLMHeadModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFOpenAIGPTMainLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFOpenAIGPTModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFOpenAIGPTPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFPegasusForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFPegasusModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFRagModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFRagSequenceForGeneration:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFRagTokenForGeneration:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1413,70 +1413,70 @@ TF_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFRobertaForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFRobertaForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFRobertaForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFRobertaForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFRobertaForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFRobertaMainLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFRobertaModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFRobertaPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_T5_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1484,38 +1484,38 @@ TF_T5_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFT5EncoderModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFT5ForConditionalGeneration:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFT5Model:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFT5PreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1523,48 +1523,48 @@ TF_TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFAdaptiveEmbedding:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFTransfoXLForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFTransfoXLLMHeadModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFTransfoXLMainLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFTransfoXLModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFTransfoXLPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_XLM_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1572,70 +1572,70 @@ TF_XLM_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFXLMForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLMForQuestionAnsweringSimple:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLMForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLMForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLMMainLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLMModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLMPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLMWithLMHeadModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1643,56 +1643,56 @@ TF_XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFXLMRobertaForMaskedLM:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLMRobertaForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLMRobertaForQuestionAnswering:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLMRobertaForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLMRobertaForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLMRobertaModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
TF_XLNET_PRETRAINED_MODEL_ARCHIVE_LIST = None
......@@ -1700,91 +1700,91 @@ TF_XLNET_PRETRAINED_MODEL_ARCHIVE_LIST = None
class TFXLNetForMultipleChoice:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLNetForQuestionAnsweringSimple:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLNetForSequenceClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLNetForTokenClassification:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLNetLMHeadModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLNetMainLayer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLNetModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class TFXLNetPreTrainedModel:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class AdamWeightDecay:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class GradientAccumulator:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
class WarmUp:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
def create_optimizer(*args, **kwargs):
requires_tf(create_optimizer)
requires_backends(create_optimizer, ["tf"])
class TFTrainer:
def __init__(self, *args, **kwargs):
requires_tf(self)
requires_backends(self, ["tf"])
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..file_utils import requires_tokenizers
from ..file_utils import requires_backends
class AlbertTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class BartTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class BarthezTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class BertTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class CamembertTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class ConvBertTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class DistilBertTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class DPRContextEncoderTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class DPRQuestionEncoderTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class DPRReaderTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class ElectraTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class FunnelTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class GPT2TokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class HerbertTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class LayoutLMTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class LEDTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class LongformerTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class LxmertTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class MBart50TokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class MBartTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class MobileBertTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class MPNetTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class MT5TokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class OpenAIGPTTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class PegasusTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class ReformerTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class RetriBertTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class RobertaTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class SqueezeBertTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class T5TokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class XLMRobertaTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class XLNetTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
class PreTrainedTokenizerFast:
def __init__(self, *args, **kwargs):
requires_tokenizers(self)
requires_backends(self, ["tokenizers"])
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_tokenizers(self)
SLOW_TO_FAST_CONVERTERS = None
def convert_slow_tokenizer(*args, **kwargs):
requires_tokenizers(convert_slow_tokenizer)
requires_backends(self, ["tokenizers"])
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..file_utils import requires_vision
from ..file_utils import requires_backends
class ImageFeatureExtractionMixin:
def __init__(self, *args, **kwargs):
requires_vision(self)
requires_backends(self, ["vision"])
class ViTFeatureExtractor:
def __init__(self, *args, **kwargs):
requires_vision(self)
requires_backends(self, ["vision"])
......@@ -22,11 +22,11 @@ import re
# python utils/check_dummies.py
PATH_TO_TRANSFORMERS = "src/transformers"
# Matches is_xxx_available()
_re_backend = re.compile(r"is\_([a-z]*)_available()")
# Matches from xxx import bla
_re_single_line_import = re.compile(r"\s+from\s+\S*\s+import\s+([^\(\s].*)\n")
_re_test_backend = re.compile(r"^\s+if\s+is\_([a-z]*)\_available\(\):\s*$")
BACKENDS = ["torch", "tf", "flax", "sentencepiece", "speech", "tokenizers", "vision"]
_re_test_backend = re.compile(r"^\s+if\s+is\_[a-z]*\_available\(\)")
DUMMY_CONSTANT = """
......@@ -36,25 +36,34 @@ DUMMY_CONSTANT = """
DUMMY_PRETRAINED_CLASS = """
class {0}:
def __init__(self, *args, **kwargs):
requires_{1}(self)
requires_backends(self, {1})
@classmethod
def from_pretrained(self, *args, **kwargs):
requires_{1}(self)
requires_backends(self, {1})
"""
DUMMY_CLASS = """
class {0}:
def __init__(self, *args, **kwargs):
requires_{1}(self)
requires_backends(self, {1})
"""
DUMMY_FUNCTION = """
def {0}(*args, **kwargs):
requires_{1}({0})
requires_backends({0}, {1})
"""
def find_backend(line):
"""Find one (or multiple) backend in a code line of the init."""
if _re_test_backend.search(line) is None:
return None
backends = [b[0] for b in _re_backend.findall(line)]
backends.sort()
return "_and_".join(backends)
def read_init():
""" Read the init and extracts PyTorch, TensorFlow, SentencePiece and Tokenizers objects. """
with open(os.path.join(PATH_TO_TRANSFORMERS, "__init__.py"), "r", encoding="utf-8", newline="\n") as f:
......@@ -69,14 +78,10 @@ def read_init():
# Go through the end of the file
while line_index < len(lines):
# If the line is an if is_backend_available, we grab all objects associated.
if _re_test_backend.search(lines[line_index]) is not None:
backend = _re_test_backend.search(lines[line_index]).groups()[0]
backend = find_backend(lines[line_index])
if backend is not None:
line_index += 1
# Ignore if backend isn't tracked for dummies.
if backend not in BACKENDS:
continue
objects = []
# Until we unindent, add backend objects to the list
while len(lines[line_index]) <= 1 or lines[line_index].startswith(" " * 8):
......@@ -128,13 +133,12 @@ def create_dummy_files():
""" Create the content of the dummy files. """
backend_specific_objects = read_init()
# For special correspondence backend to module name as used in the function requires_modulename
module_names = {"torch": "pytorch"}
dummy_files = {}
for backend, objects in backend_specific_objects.items():
backend_name = module_names.get(backend, backend)
backend_name = "[" + ", ".join(f'"{b}"' for b in backend.split("_and_")) + "]"
dummy_file = "# This file is autogenerated by the command `make fix-copies`, do not edit.\n"
dummy_file += f"from ..file_utils import requires_{backend_name}\n\n"
dummy_file += "from ..file_utils import requires_backends\n\n"
dummy_file += "\n".join([create_dummy_object(o, backend_name) for o in objects])
dummy_files[backend] = dummy_file
......@@ -156,8 +160,11 @@ def check_dummies(overwrite=False):
actual_dummies = {}
for backend, file_path in dummy_file_paths.items():
with open(file_path, "r", encoding="utf-8", newline="\n") as f:
actual_dummies[backend] = f.read()
if os.path.isfile(file_path):
with open(file_path, "r", encoding="utf-8", newline="\n") as f:
actual_dummies[backend] = f.read()
else:
actual_dummies[backend] = ""
for backend in dummy_files.keys():
if dummy_files[backend] != actual_dummies[backend]:
......
......@@ -18,12 +18,14 @@ import re
PATH_TO_TRANSFORMERS = "src/transformers"
BACKENDS = ["torch", "tf", "flax", "sentencepiece", "speech", "tokenizers", "vision"]
# Matches is_xxx_available()
_re_backend = re.compile(r"is\_([a-z]*)_available()")
# Catches a line with a key-values pattern: "bla": ["foo", "bar"]
_re_import_struct_key_value = re.compile(r'\s+"\S*":\s+\[([^\]]*)\]')
# Catches a line if is_foo_available
_re_test_backend = re.compile(r"^\s*if\s+is\_([a-z]*)\_available\(\):\s*$")
_re_test_backend = re.compile(r"^\s*if\s+is\_[a-z]*\_available\(\)")
# Catches a line _import_struct["bla"].append("foo")
_re_import_struct_add_one = re.compile(r'^\s*_import_structure\["\S*"\]\.append\("(\S*)"\)')
# Catches a line _import_struct["bla"].extend(["foo", "bar"]) or _import_struct["bla"] = ["foo", "bar"]
......@@ -36,6 +38,15 @@ _re_between_brackets = re.compile("^\s+\[([^\]]+)\]")
_re_import = re.compile(r"\s+from\s+\S*\s+import\s+([^\(\s].*)\n")
def find_backend(line):
"""Find one (or multiple) backend in a code line of the init."""
if _re_test_backend.search(line) is None:
return None
backends = [b[0] for b in _re_backend.findall(line)]
backends.sort()
return "_and_".join(backends)
def parse_init(init_file):
"""
Read an init_file and parse (per backend) the _import_structure objects defined and the TYPE_CHECKING objects
......@@ -54,7 +65,7 @@ def parse_init(init_file):
# First grab the objects without a specific backend in _import_structure
objects = []
while not lines[line_index].startswith("if TYPE_CHECKING") and _re_test_backend.search(lines[line_index]) is None:
while not lines[line_index].startswith("if TYPE_CHECKING") and find_backend(lines[line_index]) is None:
line = lines[line_index]
single_line_import_search = _re_import_struct_key_value.search(line)
if single_line_import_search is not None:
......@@ -68,14 +79,10 @@ def parse_init(init_file):
# Let's continue with backend-specific objects in _import_structure
while not lines[line_index].startswith("if TYPE_CHECKING"):
# If the line is an if is_backend_available, we grab all objects associated.
if _re_test_backend.search(lines[line_index]) is not None:
backend = _re_test_backend.search(lines[line_index]).groups()[0]
backend = find_backend(lines[line_index])
if backend is not None:
line_index += 1
# Ignore if backend isn't tracked for dummies.
if backend not in BACKENDS:
continue
objects = []
# Until we unindent, add backend objects to the list
while len(lines[line_index]) <= 1 or lines[line_index].startswith(" " * 4):
......@@ -106,7 +113,7 @@ def parse_init(init_file):
objects = []
while (
line_index < len(lines)
and _re_test_backend.search(lines[line_index]) is None
and find_backend(lines[line_index]) is None
and not lines[line_index].startswith("else")
):
line = lines[line_index]
......@@ -121,14 +128,10 @@ def parse_init(init_file):
# Let's continue with backend-specific objects
while line_index < len(lines):
# If the line is an if is_backemd_available, we grab all objects associated.
if _re_test_backend.search(lines[line_index]) is not None:
backend = _re_test_backend.search(lines[line_index]).groups()[0]
backend = find_backend(lines[line_index])
if backend is not None:
line_index += 1
# Ignore if backend isn't tracked for dummies.
if backend not in BACKENDS:
continue
objects = []
# Until we unindent, add backend objects to the list
while len(lines[line_index]) <= 1 or lines[line_index].startswith(" " * 8):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment