Unverified Commit 1bdf4240 authored by Sylvain Gugger's avatar Sylvain Gugger Committed by GitHub
Browse files

Fast imports part 3 (#9474)

* New intermediate inits

* Update template

* Avoid importing torch/tf/flax in tokenization unless necessary

* Styling

* Shutup flake8

* Better python version check
parent 79bbcc52
...@@ -51,7 +51,7 @@ from .utils import logging ...@@ -51,7 +51,7 @@ from .utils import logging
# The package importlib_metadata is in a different place, depending on the python version. # The package importlib_metadata is in a different place, depending on the python version.
if version.parse(sys.version) < version.parse("3.8"): if sys.version_info < (3, 8):
import importlib_metadata import importlib_metadata
else: else:
import importlib.metadata as importlib_metadata import importlib.metadata as importlib_metadata
......
...@@ -16,40 +16,107 @@ ...@@ -16,40 +16,107 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_albert import ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, AlbertConfig
from ...file_utils import (
_BaseLazyModule,
is_sentencepiece_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = {
"configuration_albert": ["ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "AlbertConfig"],
}
if is_sentencepiece_available(): if is_sentencepiece_available():
from .tokenization_albert import AlbertTokenizer _import_structure["tokenization_albert"] = ["AlbertTokenizer"]
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_albert_fast import AlbertTokenizerFast _import_structure["tokenization_albert_fast"] = ["AlbertTokenizerFast"]
if is_torch_available(): if is_torch_available():
from .modeling_albert import ( _import_structure["modeling_albert"] = [
ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST, "ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
AlbertForMaskedLM, "AlbertForMaskedLM",
AlbertForMultipleChoice, "AlbertForMultipleChoice",
AlbertForPreTraining, "AlbertForPreTraining",
AlbertForQuestionAnswering, "AlbertForQuestionAnswering",
AlbertForSequenceClassification, "AlbertForSequenceClassification",
AlbertForTokenClassification, "AlbertForTokenClassification",
AlbertModel, "AlbertModel",
AlbertPreTrainedModel, "AlbertPreTrainedModel",
load_tf_weights_in_albert, "load_tf_weights_in_albert",
) ]
if is_tf_available(): if is_tf_available():
from .modeling_tf_albert import ( _import_structure["modeling_tf_albert"] = [
TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST, "TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
TFAlbertForMaskedLM, "TFAlbertForMaskedLM",
TFAlbertForMultipleChoice, "TFAlbertForMultipleChoice",
TFAlbertForPreTraining, "TFAlbertForPreTraining",
TFAlbertForQuestionAnswering, "TFAlbertForQuestionAnswering",
TFAlbertForSequenceClassification, "TFAlbertForSequenceClassification",
TFAlbertForTokenClassification, "TFAlbertForTokenClassification",
TFAlbertMainLayer, "TFAlbertMainLayer",
TFAlbertModel, "TFAlbertModel",
TFAlbertPreTrainedModel, "TFAlbertPreTrainedModel",
) ]
if TYPE_CHECKING:
from .configuration_albert import ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, AlbertConfig
if is_sentencepiece_available():
from .tokenization_albert import AlbertTokenizer
if is_tokenizers_available():
from .tokenization_albert_fast import AlbertTokenizerFast
if is_torch_available():
from .modeling_albert import (
ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
AlbertForMaskedLM,
AlbertForMultipleChoice,
AlbertForPreTraining,
AlbertForQuestionAnswering,
AlbertForSequenceClassification,
AlbertForTokenClassification,
AlbertModel,
AlbertPreTrainedModel,
load_tf_weights_in_albert,
)
if is_tf_available():
from .modeling_tf_albert import (
TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFAlbertForMaskedLM,
TFAlbertForMultipleChoice,
TFAlbertForPreTraining,
TFAlbertForQuestionAnswering,
TFAlbertForSequenceClassification,
TFAlbertForTokenClassification,
TFAlbertMainLayer,
TFAlbertModel,
TFAlbertPreTrainedModel,
)
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,63 +16,147 @@ ...@@ -16,63 +16,147 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_flax_available, is_tf_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_auto import ALL_PRETRAINED_CONFIG_ARCHIVE_MAP, CONFIG_MAPPING, MODEL_NAMES_MAPPING, AutoConfig
from .tokenization_auto import TOKENIZER_MAPPING, AutoTokenizer
from ...file_utils import _BaseLazyModule, is_flax_available, is_tf_available, is_torch_available
_import_structure = {
"configuration_auto": ["ALL_PRETRAINED_CONFIG_ARCHIVE_MAP", "CONFIG_MAPPING", "MODEL_NAMES_MAPPING", "AutoConfig"],
"tokenization_auto": ["TOKENIZER_MAPPING", "AutoTokenizer"],
}
if is_torch_available(): if is_torch_available():
from .modeling_auto import ( _import_structure["modeling_auto"] = [
MODEL_FOR_CAUSAL_LM_MAPPING, "MODEL_FOR_CAUSAL_LM_MAPPING",
MODEL_FOR_MASKED_LM_MAPPING, "MODEL_FOR_MASKED_LM_MAPPING",
MODEL_FOR_MULTIPLE_CHOICE_MAPPING, "MODEL_FOR_MULTIPLE_CHOICE_MAPPING",
MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING, "MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING",
MODEL_FOR_PRETRAINING_MAPPING, "MODEL_FOR_PRETRAINING_MAPPING",
MODEL_FOR_QUESTION_ANSWERING_MAPPING, "MODEL_FOR_QUESTION_ANSWERING_MAPPING",
MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, "MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING",
MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING, "MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING",
MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING, "MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING",
MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING, "MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING",
MODEL_MAPPING, "MODEL_MAPPING",
MODEL_WITH_LM_HEAD_MAPPING, "MODEL_WITH_LM_HEAD_MAPPING",
AutoModel, "AutoModel",
AutoModelForCausalLM, "AutoModelForCausalLM",
AutoModelForMaskedLM, "AutoModelForMaskedLM",
AutoModelForMultipleChoice, "AutoModelForMultipleChoice",
AutoModelForNextSentencePrediction, "AutoModelForNextSentencePrediction",
AutoModelForPreTraining, "AutoModelForPreTraining",
AutoModelForQuestionAnswering, "AutoModelForQuestionAnswering",
AutoModelForSeq2SeqLM, "AutoModelForSeq2SeqLM",
AutoModelForSequenceClassification, "AutoModelForSequenceClassification",
AutoModelForTableQuestionAnswering, "AutoModelForTableQuestionAnswering",
AutoModelForTokenClassification, "AutoModelForTokenClassification",
AutoModelWithLMHead, "AutoModelWithLMHead",
) ]
if is_tf_available(): if is_tf_available():
from .modeling_tf_auto import ( _import_structure["modeling_tf_auto"] = [
TF_MODEL_FOR_CAUSAL_LM_MAPPING, "TF_MODEL_FOR_CAUSAL_LM_MAPPING",
TF_MODEL_FOR_MASKED_LM_MAPPING, "TF_MODEL_FOR_MASKED_LM_MAPPING",
TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING, "TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING",
TF_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING, "TF_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING",
TF_MODEL_FOR_PRETRAINING_MAPPING, "TF_MODEL_FOR_PRETRAINING_MAPPING",
TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING, "TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING",
TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, "TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING",
TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING, "TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING",
TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING, "TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING",
TF_MODEL_MAPPING, "TF_MODEL_MAPPING",
TF_MODEL_WITH_LM_HEAD_MAPPING, "TF_MODEL_WITH_LM_HEAD_MAPPING",
TFAutoModel, "TFAutoModel",
TFAutoModelForCausalLM, "TFAutoModelForCausalLM",
TFAutoModelForMaskedLM, "TFAutoModelForMaskedLM",
TFAutoModelForMultipleChoice, "TFAutoModelForMultipleChoice",
TFAutoModelForPreTraining, "TFAutoModelForPreTraining",
TFAutoModelForQuestionAnswering, "TFAutoModelForQuestionAnswering",
TFAutoModelForSeq2SeqLM, "TFAutoModelForSeq2SeqLM",
TFAutoModelForSequenceClassification, "TFAutoModelForSequenceClassification",
TFAutoModelForTokenClassification, "TFAutoModelForTokenClassification",
TFAutoModelWithLMHead, "TFAutoModelWithLMHead",
) ]
if is_flax_available(): if is_flax_available():
from .modeling_flax_auto import FLAX_MODEL_MAPPING, FlaxAutoModel _import_structure["modeling_flax_auto"] = ["FLAX_MODEL_MAPPING", "FlaxAutoModel"]
if TYPE_CHECKING:
from .configuration_auto import ALL_PRETRAINED_CONFIG_ARCHIVE_MAP, CONFIG_MAPPING, MODEL_NAMES_MAPPING, AutoConfig
from .tokenization_auto import TOKENIZER_MAPPING, AutoTokenizer
if is_torch_available():
from .modeling_auto import (
MODEL_FOR_CAUSAL_LM_MAPPING,
MODEL_FOR_MASKED_LM_MAPPING,
MODEL_FOR_MULTIPLE_CHOICE_MAPPING,
MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING,
MODEL_FOR_PRETRAINING_MAPPING,
MODEL_FOR_QUESTION_ANSWERING_MAPPING,
MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING,
MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING,
MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING,
MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING,
MODEL_MAPPING,
MODEL_WITH_LM_HEAD_MAPPING,
AutoModel,
AutoModelForCausalLM,
AutoModelForMaskedLM,
AutoModelForMultipleChoice,
AutoModelForNextSentencePrediction,
AutoModelForPreTraining,
AutoModelForQuestionAnswering,
AutoModelForSeq2SeqLM,
AutoModelForSequenceClassification,
AutoModelForTableQuestionAnswering,
AutoModelForTokenClassification,
AutoModelWithLMHead,
)
if is_tf_available():
from .modeling_tf_auto import (
TF_MODEL_FOR_CAUSAL_LM_MAPPING,
TF_MODEL_FOR_MASKED_LM_MAPPING,
TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING,
TF_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING,
TF_MODEL_FOR_PRETRAINING_MAPPING,
TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING,
TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING,
TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING,
TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING,
TF_MODEL_MAPPING,
TF_MODEL_WITH_LM_HEAD_MAPPING,
TFAutoModel,
TFAutoModelForCausalLM,
TFAutoModelForMaskedLM,
TFAutoModelForMultipleChoice,
TFAutoModelForPreTraining,
TFAutoModelForQuestionAnswering,
TFAutoModelForSeq2SeqLM,
TFAutoModelForSequenceClassification,
TFAutoModelForTokenClassification,
TFAutoModelWithLMHead,
)
if is_flax_available():
from .modeling_flax_auto import FLAX_MODEL_MAPPING, FlaxAutoModel
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -15,24 +15,69 @@ ...@@ -15,24 +15,69 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_bart import BART_PRETRAINED_CONFIG_ARCHIVE_MAP, BartConfig
from .tokenization_bart import BartTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_bart": ["BART_PRETRAINED_CONFIG_ARCHIVE_MAP", "BartConfig"],
"tokenization_bart": ["BartTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_bart_fast import BartTokenizerFast _import_structure["tokenization_bart_fast"] = ["BartTokenizerFast"]
if is_torch_available(): if is_torch_available():
from .modeling_bart import ( _import_structure["modeling_bart"] = [
BART_PRETRAINED_MODEL_ARCHIVE_LIST, "BART_PRETRAINED_MODEL_ARCHIVE_LIST",
BartForConditionalGeneration, "BartForConditionalGeneration",
BartForQuestionAnswering, "BartForQuestionAnswering",
BartForSequenceClassification, "BartForSequenceClassification",
BartModel, "BartModel",
BartPretrainedModel, "BartPretrainedModel",
PretrainedBartModel, "PretrainedBartModel",
) ]
if is_tf_available(): if is_tf_available():
from .modeling_tf_bart import TFBartForConditionalGeneration, TFBartModel, TFBartPretrainedModel _import_structure["modeling_tf_bart"] = ["TFBartForConditionalGeneration", "TFBartModel", "TFBartPretrainedModel"]
if TYPE_CHECKING:
from .configuration_bart import BART_PRETRAINED_CONFIG_ARCHIVE_MAP, BartConfig
from .tokenization_bart import BartTokenizer
if is_tokenizers_available():
from .tokenization_bart_fast import BartTokenizerFast
if is_torch_available():
from .modeling_bart import (
BART_PRETRAINED_MODEL_ARCHIVE_LIST,
BartForConditionalGeneration,
BartForQuestionAnswering,
BartForSequenceClassification,
BartModel,
BartPretrainedModel,
PretrainedBartModel,
)
if is_tf_available():
from .modeling_tf_bart import TFBartForConditionalGeneration, TFBartModel, TFBartPretrainedModel
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,11 +16,42 @@ ...@@ -16,11 +16,42 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_sentencepiece_available, is_tokenizers_available from typing import TYPE_CHECKING
from ...file_utils import _BaseLazyModule, is_sentencepiece_available, is_tokenizers_available
_import_structure = {}
if is_sentencepiece_available(): if is_sentencepiece_available():
from .tokenization_barthez import BarthezTokenizer _import_structure["tokenization_barthez"] = ["BarthezTokenizer"]
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_barthez_fast import BarthezTokenizerFast _import_structure["tokenization_barthez_fast"] = ["BarthezTokenizerFast"]
if TYPE_CHECKING:
if is_sentencepiece_available():
from .tokenization_barthez import BarthezTokenizer
if is_tokenizers_available():
from .tokenization_barthez_fast import BarthezTokenizerFast
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,47 +16,121 @@ ...@@ -16,47 +16,121 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_bert import BERT_PRETRAINED_CONFIG_ARCHIVE_MAP, BertConfig
from .tokenization_bert import BasicTokenizer, BertTokenizer, WordpieceTokenizer
from ...file_utils import (
_BaseLazyModule,
is_flax_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = {
"configuration_bert": ["BERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "BertConfig"],
"tokenization_bert": ["BasicTokenizer", "BertTokenizer", "WordpieceTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_bert_fast import BertTokenizerFast _import_structure["tokenization_bert_fast"] = ["BertTokenizerFast"]
if is_torch_available(): if is_torch_available():
from .modeling_bert import ( _import_structure["modeling_bert"] = [
BERT_PRETRAINED_MODEL_ARCHIVE_LIST, "BERT_PRETRAINED_MODEL_ARCHIVE_LIST",
BertForMaskedLM, "BertForMaskedLM",
BertForMultipleChoice, "BertForMultipleChoice",
BertForNextSentencePrediction, "BertForNextSentencePrediction",
BertForPreTraining, "BertForPreTraining",
BertForQuestionAnswering, "BertForQuestionAnswering",
BertForSequenceClassification, "BertForSequenceClassification",
BertForTokenClassification, "BertForTokenClassification",
BertLayer, "BertLayer",
BertLMHeadModel, "BertLMHeadModel",
BertModel, "BertModel",
BertPreTrainedModel, "BertPreTrainedModel",
load_tf_weights_in_bert, "load_tf_weights_in_bert",
) ]
if is_tf_available(): if is_tf_available():
from .modeling_tf_bert import ( _import_structure["modeling_tf_bert"] = [
TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST, "TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST",
TFBertEmbeddings, "TFBertEmbeddings",
TFBertForMaskedLM, "TFBertForMaskedLM",
TFBertForMultipleChoice, "TFBertForMultipleChoice",
TFBertForNextSentencePrediction, "TFBertForNextSentencePrediction",
TFBertForPreTraining, "TFBertForPreTraining",
TFBertForQuestionAnswering, "TFBertForQuestionAnswering",
TFBertForSequenceClassification, "TFBertForSequenceClassification",
TFBertForTokenClassification, "TFBertForTokenClassification",
TFBertLMHeadModel, "TFBertLMHeadModel",
TFBertMainLayer, "TFBertMainLayer",
TFBertModel, "TFBertModel",
TFBertPreTrainedModel, "TFBertPreTrainedModel",
) ]
if is_flax_available(): if is_flax_available():
from .modeling_flax_bert import FlaxBertForMaskedLM, FlaxBertModel _import_structure["modeling_flax_bert"] = ["FlaxBertForMaskedLM", "FlaxBertModel"]
if TYPE_CHECKING:
from .configuration_bert import BERT_PRETRAINED_CONFIG_ARCHIVE_MAP, BertConfig
from .tokenization_bert import BasicTokenizer, BertTokenizer, WordpieceTokenizer
if is_tokenizers_available():
from .tokenization_bert_fast import BertTokenizerFast
if is_torch_available():
from .modeling_bert import (
BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
BertForMaskedLM,
BertForMultipleChoice,
BertForNextSentencePrediction,
BertForPreTraining,
BertForQuestionAnswering,
BertForSequenceClassification,
BertForTokenClassification,
BertLayer,
BertLMHeadModel,
BertModel,
BertPreTrainedModel,
load_tf_weights_in_bert,
)
if is_tf_available():
from .modeling_tf_bert import (
TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFBertEmbeddings,
TFBertForMaskedLM,
TFBertForMultipleChoice,
TFBertForNextSentencePrediction,
TFBertForPreTraining,
TFBertForQuestionAnswering,
TFBertForSequenceClassification,
TFBertForTokenClassification,
TFBertLMHeadModel,
TFBertMainLayer,
TFBertModel,
TFBertPreTrainedModel,
)
if is_flax_available():
from .modeling_flax_bert import FlaxBertForMaskedLM, FlaxBertModel
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,16 +16,53 @@ ...@@ -16,16 +16,53 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_sentencepiece_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_bert_generation import BertGenerationConfig
from ...file_utils import _BaseLazyModule, is_sentencepiece_available, is_torch_available
_import_structure = {
"configuration_bert_generation": ["BertGenerationConfig"],
}
if is_sentencepiece_available(): if is_sentencepiece_available():
from .tokenization_bert_generation import BertGenerationTokenizer _import_structure["tokenization_bert_generation"] = ["BertGenerationTokenizer"]
if is_torch_available(): if is_torch_available():
from .modeling_bert_generation import ( _import_structure["modeling_bert_generation"] = [
BertGenerationDecoder, "BertGenerationDecoder",
BertGenerationEncoder, "BertGenerationEncoder",
load_tf_weights_in_bert_generation, "load_tf_weights_in_bert_generation",
) ]
if TYPE_CHECKING:
from .configuration_bert_generation import BertGenerationConfig
if is_sentencepiece_available():
from .tokenization_bert_generation import BertGenerationTokenizer
if is_torch_available():
from .modeling_bert_generation import (
BertGenerationDecoder,
BertGenerationEncoder,
load_tf_weights_in_bert_generation,
)
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,4 +16,33 @@ ...@@ -16,4 +16,33 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from .tokenization_bert_japanese import BertJapaneseTokenizer, CharacterTokenizer, MecabTokenizer from typing import TYPE_CHECKING
from ...file_utils import _BaseLazyModule
_import_structure = {
"tokenization_bert_japanese": ["BertJapaneseTokenizer", "CharacterTokenizer", "MecabTokenizer"],
}
if TYPE_CHECKING:
from .tokenization_bert_japanese import BertJapaneseTokenizer, CharacterTokenizer, MecabTokenizer
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,4 +16,33 @@ ...@@ -16,4 +16,33 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from .tokenization_bertweet import BertweetTokenizer from typing import TYPE_CHECKING
from ...file_utils import _BaseLazyModule
_import_structure = {
"tokenization_bertweet": ["BertweetTokenizer"],
}
if TYPE_CHECKING:
from .tokenization_bertweet import BertweetTokenizer
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,19 +16,58 @@ ...@@ -16,19 +16,58 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_blenderbot import BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP, BlenderbotConfig
from .tokenization_blenderbot import BlenderbotTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_torch_available
_import_structure = {
"configuration_blenderbot": ["BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP", "BlenderbotConfig"],
"tokenization_blenderbot": ["BlenderbotTokenizer"],
}
if is_torch_available(): if is_torch_available():
from .modeling_blenderbot import ( _import_structure["modeling_blenderbot"] = [
BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST, "BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST",
BlenderbotForConditionalGeneration, "BlenderbotForConditionalGeneration",
BlenderbotModel, "BlenderbotModel",
BlenderbotPreTrainedModel, "BlenderbotPreTrainedModel",
) ]
if is_tf_available(): if is_tf_available():
from .modeling_tf_blenderbot import TFBlenderbotForConditionalGeneration _import_structure["modeling_tf_blenderbot"] = ["TFBlenderbotForConditionalGeneration"]
if TYPE_CHECKING:
from .configuration_blenderbot import BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP, BlenderbotConfig
from .tokenization_blenderbot import BlenderbotTokenizer
if is_torch_available():
from .modeling_blenderbot import (
BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST,
BlenderbotForConditionalGeneration,
BlenderbotModel,
BlenderbotPreTrainedModel,
)
if is_tf_available():
from .modeling_tf_blenderbot import TFBlenderbotForConditionalGeneration
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -15,15 +15,51 @@ ...@@ -15,15 +15,51 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_torch_available from typing import TYPE_CHECKING
from .configuration_blenderbot_small import BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP, BlenderbotSmallConfig
from .tokenization_blenderbot_small import BlenderbotSmallTokenizer
from ...file_utils import _BaseLazyModule, is_torch_available
_import_structure = {
"configuration_blenderbot_small": ["BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP", "BlenderbotSmallConfig"],
"tokenization_blenderbot_small": ["BlenderbotSmallTokenizer"],
}
if is_torch_available(): if is_torch_available():
from .modeling_blenderbot_small import ( _import_structure["modeling_blenderbot_small"] = [
BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST, "BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST",
BlenderbotSmallForConditionalGeneration, "BlenderbotSmallForConditionalGeneration",
BlenderbotSmallModel, "BlenderbotSmallModel",
BlenderbotSmallPreTrainedModel, "BlenderbotSmallPreTrainedModel",
) ]
if TYPE_CHECKING:
from .configuration_blenderbot_small import BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP, BlenderbotSmallConfig
from .tokenization_blenderbot_small import BlenderbotSmallTokenizer
if is_torch_available():
from .modeling_blenderbot_small import (
BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST,
BlenderbotSmallForConditionalGeneration,
BlenderbotSmallModel,
BlenderbotSmallPreTrainedModel,
)
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,35 +16,97 @@ ...@@ -16,35 +16,97 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_camembert import CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, CamembertConfig
from ...file_utils import (
_BaseLazyModule,
is_sentencepiece_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = {
"configuration_camembert": ["CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "CamembertConfig"],
}
if is_sentencepiece_available(): if is_sentencepiece_available():
from .tokenization_camembert import CamembertTokenizer _import_structure["tokenization_camembert"] = ["CamembertTokenizer"]
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_camembert_fast import CamembertTokenizerFast _import_structure["tokenization_camembert_fast"] = ["CamembertTokenizerFast"]
if is_torch_available(): if is_torch_available():
from .modeling_camembert import ( _import_structure["modeling_camembert"] = [
CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST, "CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
CamembertForCausalLM, "CamembertForCausalLM",
CamembertForMaskedLM, "CamembertForMaskedLM",
CamembertForMultipleChoice, "CamembertForMultipleChoice",
CamembertForQuestionAnswering, "CamembertForQuestionAnswering",
CamembertForSequenceClassification, "CamembertForSequenceClassification",
CamembertForTokenClassification, "CamembertForTokenClassification",
CamembertModel, "CamembertModel",
) ]
if is_tf_available(): if is_tf_available():
from .modeling_tf_camembert import ( _import_structure["modeling_tf_camembert"] = [
TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST, "TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
TFCamembertForMaskedLM, "TFCamembertForMaskedLM",
TFCamembertForMultipleChoice, "TFCamembertForMultipleChoice",
TFCamembertForQuestionAnswering, "TFCamembertForQuestionAnswering",
TFCamembertForSequenceClassification, "TFCamembertForSequenceClassification",
TFCamembertForTokenClassification, "TFCamembertForTokenClassification",
TFCamembertModel, "TFCamembertModel",
) ]
if TYPE_CHECKING:
from .configuration_camembert import CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, CamembertConfig
if is_sentencepiece_available():
from .tokenization_camembert import CamembertTokenizer
if is_tokenizers_available():
from .tokenization_camembert_fast import CamembertTokenizerFast
if is_torch_available():
from .modeling_camembert import (
CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
CamembertForCausalLM,
CamembertForMaskedLM,
CamembertForMultipleChoice,
CamembertForQuestionAnswering,
CamembertForSequenceClassification,
CamembertForTokenClassification,
CamembertModel,
)
if is_tf_available():
from .modeling_tf_camembert import (
TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFCamembertForMaskedLM,
TFCamembertForMultipleChoice,
TFCamembertForQuestionAnswering,
TFCamembertForSequenceClassification,
TFCamembertForTokenClassification,
TFCamembertModel,
)
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,25 +16,71 @@ ...@@ -16,25 +16,71 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_ctrl import CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP, CTRLConfig
from .tokenization_ctrl import CTRLTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_torch_available
_import_structure = {
"configuration_ctrl": ["CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP", "CTRLConfig"],
"tokenization_ctrl": ["CTRLTokenizer"],
}
if is_torch_available(): if is_torch_available():
from .modeling_ctrl import ( _import_structure["modeling_ctrl"] = [
CTRL_PRETRAINED_MODEL_ARCHIVE_LIST, "CTRL_PRETRAINED_MODEL_ARCHIVE_LIST",
CTRLForSequenceClassification, "CTRLForSequenceClassification",
CTRLLMHeadModel, "CTRLLMHeadModel",
CTRLModel, "CTRLModel",
CTRLPreTrainedModel, "CTRLPreTrainedModel",
) ]
if is_tf_available(): if is_tf_available():
from .modeling_tf_ctrl import ( _import_structure["modeling_tf_ctrl"] = [
TF_CTRL_PRETRAINED_MODEL_ARCHIVE_LIST, "TF_CTRL_PRETRAINED_MODEL_ARCHIVE_LIST",
TFCTRLForSequenceClassification, "TFCTRLForSequenceClassification",
TFCTRLLMHeadModel, "TFCTRLLMHeadModel",
TFCTRLModel, "TFCTRLModel",
TFCTRLPreTrainedModel, "TFCTRLPreTrainedModel",
) ]
if TYPE_CHECKING:
from .configuration_ctrl import CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP, CTRLConfig
from .tokenization_ctrl import CTRLTokenizer
if is_torch_available():
from .modeling_ctrl import (
CTRL_PRETRAINED_MODEL_ARCHIVE_LIST,
CTRLForSequenceClassification,
CTRLLMHeadModel,
CTRLModel,
CTRLPreTrainedModel,
)
if is_tf_available():
from .modeling_tf_ctrl import (
TF_CTRL_PRETRAINED_MODEL_ARCHIVE_LIST,
TFCTRLForSequenceClassification,
TFCTRLLMHeadModel,
TFCTRLModel,
TFCTRLPreTrainedModel,
)
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,15 +16,51 @@ ...@@ -16,15 +16,51 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_torch_available from typing import TYPE_CHECKING
from .configuration_deberta import DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, DebertaConfig
from .tokenization_deberta import DebertaTokenizer
from ...file_utils import _BaseLazyModule, is_torch_available
_import_structure = {
"configuration_deberta": ["DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP", "DebertaConfig"],
"tokenization_deberta": ["DebertaTokenizer"],
}
if is_torch_available(): if is_torch_available():
from .modeling_deberta import ( _import_structure["modeling_deberta"] = [
DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST, "DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
DebertaForSequenceClassification, "DebertaForSequenceClassification",
DebertaModel, "DebertaModel",
DebertaPreTrainedModel, "DebertaPreTrainedModel",
) ]
if TYPE_CHECKING:
from .configuration_deberta import DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, DebertaConfig
from .tokenization_deberta import DebertaTokenizer
if is_torch_available():
from .modeling_deberta import (
DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
DebertaForSequenceClassification,
DebertaModel,
DebertaPreTrainedModel,
)
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,35 +16,91 @@ ...@@ -16,35 +16,91 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_distilbert import DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, DistilBertConfig
from .tokenization_distilbert import DistilBertTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_distilbert": ["DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "DistilBertConfig"],
"tokenization_distilbert": ["DistilBertTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_distilbert_fast import DistilBertTokenizerFast _import_structure["tokenization_distilbert_fast"] = ["DistilBertTokenizerFast"]
if is_torch_available(): if is_torch_available():
from .modeling_distilbert import ( _import_structure["modeling_distilbert"] = [
DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST, "DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
DistilBertForMaskedLM, "DistilBertForMaskedLM",
DistilBertForMultipleChoice, "DistilBertForMultipleChoice",
DistilBertForQuestionAnswering, "DistilBertForQuestionAnswering",
DistilBertForSequenceClassification, "DistilBertForSequenceClassification",
DistilBertForTokenClassification, "DistilBertForTokenClassification",
DistilBertModel, "DistilBertModel",
DistilBertPreTrainedModel, "DistilBertPreTrainedModel",
) ]
if is_tf_available(): if is_tf_available():
from .modeling_tf_distilbert import ( _import_structure["modeling_tf_distilbert"] = [
TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST, "TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
TFDistilBertForMaskedLM, "TFDistilBertForMaskedLM",
TFDistilBertForMultipleChoice, "TFDistilBertForMultipleChoice",
TFDistilBertForQuestionAnswering, "TFDistilBertForQuestionAnswering",
TFDistilBertForSequenceClassification, "TFDistilBertForSequenceClassification",
TFDistilBertForTokenClassification, "TFDistilBertForTokenClassification",
TFDistilBertMainLayer, "TFDistilBertMainLayer",
TFDistilBertModel, "TFDistilBertModel",
TFDistilBertPreTrainedModel, "TFDistilBertPreTrainedModel",
) ]
if TYPE_CHECKING:
from .configuration_distilbert import DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, DistilBertConfig
from .tokenization_distilbert import DistilBertTokenizer
if is_tokenizers_available():
from .tokenization_distilbert_fast import DistilBertTokenizerFast
if is_torch_available():
from .modeling_distilbert import (
DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
DistilBertForMaskedLM,
DistilBertForMultipleChoice,
DistilBertForQuestionAnswering,
DistilBertForSequenceClassification,
DistilBertForTokenClassification,
DistilBertModel,
DistilBertPreTrainedModel,
)
if is_tf_available():
from .modeling_tf_distilbert import (
TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFDistilBertForMaskedLM,
TFDistilBertForMultipleChoice,
TFDistilBertForQuestionAnswering,
TFDistilBertForSequenceClassification,
TFDistilBertForTokenClassification,
TFDistilBertMainLayer,
TFDistilBertModel,
TFDistilBertPreTrainedModel,
)
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,45 +16,112 @@ ...@@ -16,45 +16,112 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_dpr import DPR_PRETRAINED_CONFIG_ARCHIVE_MAP, DPRConfig
from .tokenization_dpr import ( from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
DPRContextEncoderTokenizer,
DPRQuestionEncoderTokenizer,
DPRReaderOutput, _import_structure = {
DPRReaderTokenizer, "configuration_dpr": ["DPR_PRETRAINED_CONFIG_ARCHIVE_MAP", "DPRConfig"],
) "tokenization_dpr": [
"DPRContextEncoderTokenizer",
"DPRQuestionEncoderTokenizer",
"DPRReaderOutput",
"DPRReaderTokenizer",
],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_dpr_fast import ( _import_structure["tokenization_dpr_fast"] = [
DPRContextEncoderTokenizerFast, "DPRContextEncoderTokenizerFast",
DPRQuestionEncoderTokenizerFast, "DPRQuestionEncoderTokenizerFast",
DPRReaderTokenizerFast, "DPRReaderTokenizerFast",
) ]
if is_torch_available(): if is_torch_available():
from .modeling_dpr import ( _import_structure["modeling_dpr"] = [
DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST, "DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST, "DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST, "DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST",
DPRContextEncoder, "DPRContextEncoder",
DPRPretrainedContextEncoder, "DPRPretrainedContextEncoder",
DPRPretrainedQuestionEncoder, "DPRPretrainedQuestionEncoder",
DPRPretrainedReader, "DPRPretrainedReader",
DPRQuestionEncoder, "DPRQuestionEncoder",
DPRReader, "DPRReader",
) ]
if is_tf_available(): if is_tf_available():
from .modeling_tf_dpr import ( _import_structure["modeling_tf_dpr"] = [
TF_DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST, "TF_DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
TF_DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST, "TF_DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
TF_DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST, "TF_DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST",
TFDPRContextEncoder, "TFDPRContextEncoder",
TFDPRPretrainedContextEncoder, "TFDPRPretrainedContextEncoder",
TFDPRPretrainedQuestionEncoder, "TFDPRPretrainedQuestionEncoder",
TFDPRPretrainedReader, "TFDPRPretrainedReader",
TFDPRQuestionEncoder, "TFDPRQuestionEncoder",
TFDPRReader, "TFDPRReader",
]
if TYPE_CHECKING:
from .configuration_dpr import DPR_PRETRAINED_CONFIG_ARCHIVE_MAP, DPRConfig
from .tokenization_dpr import (
DPRContextEncoderTokenizer,
DPRQuestionEncoderTokenizer,
DPRReaderOutput,
DPRReaderTokenizer,
) )
if is_tokenizers_available():
from .tokenization_dpr_fast import (
DPRContextEncoderTokenizerFast,
DPRQuestionEncoderTokenizerFast,
DPRReaderTokenizerFast,
)
if is_torch_available():
from .modeling_dpr import (
DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST,
DPRContextEncoder,
DPRPretrainedContextEncoder,
DPRPretrainedQuestionEncoder,
DPRPretrainedReader,
DPRQuestionEncoder,
DPRReader,
)
if is_tf_available():
from .modeling_tf_dpr import (
TF_DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
TF_DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
TF_DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST,
TFDPRContextEncoder,
TFDPRPretrainedContextEncoder,
TFDPRPretrainedQuestionEncoder,
TFDPRPretrainedReader,
TFDPRQuestionEncoder,
TFDPRReader,
)
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,37 +16,95 @@ ...@@ -16,37 +16,95 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_electra import ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAP, ElectraConfig
from .tokenization_electra import ElectraTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_electra": ["ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAP", "ElectraConfig"],
"tokenization_electra": ["ElectraTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_electra_fast import ElectraTokenizerFast _import_structure["tokenization_electra_fast"] = ["ElectraTokenizerFast"]
if is_torch_available(): if is_torch_available():
from .modeling_electra import ( _import_structure["modeling_electra"] = [
ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST, "ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST",
ElectraForMaskedLM, "ElectraForMaskedLM",
ElectraForMultipleChoice, "ElectraForMultipleChoice",
ElectraForPreTraining, "ElectraForPreTraining",
ElectraForQuestionAnswering, "ElectraForQuestionAnswering",
ElectraForSequenceClassification, "ElectraForSequenceClassification",
ElectraForTokenClassification, "ElectraForTokenClassification",
ElectraModel, "ElectraModel",
ElectraPreTrainedModel, "ElectraPreTrainedModel",
load_tf_weights_in_electra, "load_tf_weights_in_electra",
) ]
if is_tf_available(): if is_tf_available():
from .modeling_tf_electra import ( _import_structure["modeling_tf_electra"] = [
TF_ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST, "TF_ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST",
TFElectraForMaskedLM, "TFElectraForMaskedLM",
TFElectraForMultipleChoice, "TFElectraForMultipleChoice",
TFElectraForPreTraining, "TFElectraForPreTraining",
TFElectraForQuestionAnswering, "TFElectraForQuestionAnswering",
TFElectraForSequenceClassification, "TFElectraForSequenceClassification",
TFElectraForTokenClassification, "TFElectraForTokenClassification",
TFElectraModel, "TFElectraModel",
TFElectraPreTrainedModel, "TFElectraPreTrainedModel",
) ]
if TYPE_CHECKING:
from .configuration_electra import ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAP, ElectraConfig
from .tokenization_electra import ElectraTokenizer
if is_tokenizers_available():
from .tokenization_electra_fast import ElectraTokenizerFast
if is_torch_available():
from .modeling_electra import (
ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST,
ElectraForMaskedLM,
ElectraForMultipleChoice,
ElectraForPreTraining,
ElectraForQuestionAnswering,
ElectraForSequenceClassification,
ElectraForTokenClassification,
ElectraModel,
ElectraPreTrainedModel,
load_tf_weights_in_electra,
)
if is_tf_available():
from .modeling_tf_electra import (
TF_ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST,
TFElectraForMaskedLM,
TFElectraForMultipleChoice,
TFElectraForPreTraining,
TFElectraForQuestionAnswering,
TFElectraForSequenceClassification,
TFElectraForTokenClassification,
TFElectraModel,
TFElectraPreTrainedModel,
)
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,9 +16,39 @@ ...@@ -16,9 +16,39 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_torch_available from typing import TYPE_CHECKING
from .configuration_encoder_decoder import EncoderDecoderConfig
from ...file_utils import _BaseLazyModule, is_torch_available
_import_structure = {
"configuration_encoder_decoder": ["EncoderDecoderConfig"],
}
if is_torch_available(): if is_torch_available():
from .modeling_encoder_decoder import EncoderDecoderModel _import_structure["modeling_encoder_decoder"] = ["EncoderDecoderModel"]
if TYPE_CHECKING:
from .configuration_encoder_decoder import EncoderDecoderConfig
if is_torch_available():
from .modeling_encoder_decoder import EncoderDecoderModel
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,30 +16,81 @@ ...@@ -16,30 +16,81 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_flaubert import FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, FlaubertConfig
from .tokenization_flaubert import FlaubertTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_torch_available
_import_structure = {
"configuration_flaubert": ["FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "FlaubertConfig"],
"tokenization_flaubert": ["FlaubertTokenizer"],
}
if is_torch_available(): if is_torch_available():
from .modeling_flaubert import ( _import_structure["modeling_flaubert"] = [
FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST, "FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
FlaubertForMultipleChoice, "FlaubertForMultipleChoice",
FlaubertForQuestionAnswering, "FlaubertForQuestionAnswering",
FlaubertForQuestionAnsweringSimple, "FlaubertForQuestionAnsweringSimple",
FlaubertForSequenceClassification, "FlaubertForSequenceClassification",
FlaubertForTokenClassification, "FlaubertForTokenClassification",
FlaubertModel, "FlaubertModel",
FlaubertWithLMHeadModel, "FlaubertWithLMHeadModel",
) ]
if is_tf_available(): if is_tf_available():
from .modeling_tf_flaubert import ( _import_structure["modeling_tf_flaubert"] = [
TF_FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST, "TF_FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
TFFlaubertForMultipleChoice, "TFFlaubertForMultipleChoice",
TFFlaubertForQuestionAnsweringSimple, "TFFlaubertForQuestionAnsweringSimple",
TFFlaubertForSequenceClassification, "TFFlaubertForSequenceClassification",
TFFlaubertForTokenClassification, "TFFlaubertForTokenClassification",
TFFlaubertModel, "TFFlaubertModel",
TFFlaubertWithLMHeadModel, "TFFlaubertWithLMHeadModel",
) ]
if TYPE_CHECKING:
from .configuration_flaubert import FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, FlaubertConfig
from .tokenization_flaubert import FlaubertTokenizer
if is_torch_available():
from .modeling_flaubert import (
FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
FlaubertForMultipleChoice,
FlaubertForQuestionAnswering,
FlaubertForQuestionAnsweringSimple,
FlaubertForSequenceClassification,
FlaubertForTokenClassification,
FlaubertModel,
FlaubertWithLMHeadModel,
)
if is_tf_available():
from .modeling_tf_flaubert import (
TF_FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFFlaubertForMultipleChoice,
TFFlaubertForQuestionAnsweringSimple,
TFFlaubertForSequenceClassification,
TFFlaubertForTokenClassification,
TFFlaubertModel,
TFFlaubertWithLMHeadModel,
)
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,10 +16,41 @@ ...@@ -16,10 +16,41 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_torch_available from typing import TYPE_CHECKING
from .configuration_fsmt import FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP, FSMTConfig
from .tokenization_fsmt import FSMTTokenizer
from ...file_utils import _BaseLazyModule, is_torch_available
_import_structure = {
"configuration_fsmt": ["FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP", "FSMTConfig"],
"tokenization_fsmt": ["FSMTTokenizer"],
}
if is_torch_available(): if is_torch_available():
from .modeling_fsmt import FSMTForConditionalGeneration, FSMTModel, PretrainedFSMTModel _import_structure["modeling_fsmt"] = ["FSMTForConditionalGeneration", "FSMTModel", "PretrainedFSMTModel"]
if TYPE_CHECKING:
from .configuration_fsmt import FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP, FSMTConfig
from .tokenization_fsmt import FSMTTokenizer
if is_torch_available():
from .modeling_fsmt import FSMTForConditionalGeneration, FSMTModel, PretrainedFSMTModel
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment