Unverified Commit 1bdf4240 authored by Sylvain Gugger's avatar Sylvain Gugger Committed by GitHub
Browse files

Fast imports part 3 (#9474)

* New intermediate inits

* Update template

* Avoid importing torch/tf/flax in tokenization unless necessary

* Styling

* Shutup flake8

* Better python version check
parent 79bbcc52
...@@ -51,7 +51,7 @@ from .utils import logging ...@@ -51,7 +51,7 @@ from .utils import logging
# The package importlib_metadata is in a different place, depending on the python version. # The package importlib_metadata is in a different place, depending on the python version.
if version.parse(sys.version) < version.parse("3.8"): if sys.version_info < (3, 8):
import importlib_metadata import importlib_metadata
else: else:
import importlib.metadata as importlib_metadata import importlib.metadata as importlib_metadata
......
...@@ -16,17 +16,66 @@ ...@@ -16,17 +16,66 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_albert import ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, AlbertConfig
from ...file_utils import (
_BaseLazyModule,
is_sentencepiece_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = {
"configuration_albert": ["ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "AlbertConfig"],
}
if is_sentencepiece_available(): if is_sentencepiece_available():
from .tokenization_albert import AlbertTokenizer _import_structure["tokenization_albert"] = ["AlbertTokenizer"]
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_albert_fast import AlbertTokenizerFast _import_structure["tokenization_albert_fast"] = ["AlbertTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_albert"] = [
"ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"AlbertForMaskedLM",
"AlbertForMultipleChoice",
"AlbertForPreTraining",
"AlbertForQuestionAnswering",
"AlbertForSequenceClassification",
"AlbertForTokenClassification",
"AlbertModel",
"AlbertPreTrainedModel",
"load_tf_weights_in_albert",
]
if is_tf_available():
_import_structure["modeling_tf_albert"] = [
"TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFAlbertForMaskedLM",
"TFAlbertForMultipleChoice",
"TFAlbertForPreTraining",
"TFAlbertForQuestionAnswering",
"TFAlbertForSequenceClassification",
"TFAlbertForTokenClassification",
"TFAlbertMainLayer",
"TFAlbertModel",
"TFAlbertPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_albert import ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, AlbertConfig
if is_sentencepiece_available():
from .tokenization_albert import AlbertTokenizer
if is_tokenizers_available():
from .tokenization_albert_fast import AlbertTokenizerFast
if is_torch_available():
from .modeling_albert import ( from .modeling_albert import (
ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST, ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
AlbertForMaskedLM, AlbertForMaskedLM,
...@@ -40,7 +89,7 @@ if is_torch_available(): ...@@ -40,7 +89,7 @@ if is_torch_available():
load_tf_weights_in_albert, load_tf_weights_in_albert,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_albert import ( from .modeling_tf_albert import (
TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST, TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFAlbertForMaskedLM, TFAlbertForMaskedLM,
...@@ -53,3 +102,21 @@ if is_tf_available(): ...@@ -53,3 +102,21 @@ if is_tf_available():
TFAlbertModel, TFAlbertModel,
TFAlbertPreTrainedModel, TFAlbertPreTrainedModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,12 +16,78 @@ ...@@ -16,12 +16,78 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_flax_available, is_tf_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_auto import ALL_PRETRAINED_CONFIG_ARCHIVE_MAP, CONFIG_MAPPING, MODEL_NAMES_MAPPING, AutoConfig
from .tokenization_auto import TOKENIZER_MAPPING, AutoTokenizer
from ...file_utils import _BaseLazyModule, is_flax_available, is_tf_available, is_torch_available
_import_structure = {
"configuration_auto": ["ALL_PRETRAINED_CONFIG_ARCHIVE_MAP", "CONFIG_MAPPING", "MODEL_NAMES_MAPPING", "AutoConfig"],
"tokenization_auto": ["TOKENIZER_MAPPING", "AutoTokenizer"],
}
if is_torch_available(): if is_torch_available():
_import_structure["modeling_auto"] = [
"MODEL_FOR_CAUSAL_LM_MAPPING",
"MODEL_FOR_MASKED_LM_MAPPING",
"MODEL_FOR_MULTIPLE_CHOICE_MAPPING",
"MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING",
"MODEL_FOR_PRETRAINING_MAPPING",
"MODEL_FOR_QUESTION_ANSWERING_MAPPING",
"MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING",
"MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING",
"MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING",
"MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING",
"MODEL_MAPPING",
"MODEL_WITH_LM_HEAD_MAPPING",
"AutoModel",
"AutoModelForCausalLM",
"AutoModelForMaskedLM",
"AutoModelForMultipleChoice",
"AutoModelForNextSentencePrediction",
"AutoModelForPreTraining",
"AutoModelForQuestionAnswering",
"AutoModelForSeq2SeqLM",
"AutoModelForSequenceClassification",
"AutoModelForTableQuestionAnswering",
"AutoModelForTokenClassification",
"AutoModelWithLMHead",
]
if is_tf_available():
_import_structure["modeling_tf_auto"] = [
"TF_MODEL_FOR_CAUSAL_LM_MAPPING",
"TF_MODEL_FOR_MASKED_LM_MAPPING",
"TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING",
"TF_MODEL_FOR_NEXT_SENTENCE_PREDICTION_MAPPING",
"TF_MODEL_FOR_PRETRAINING_MAPPING",
"TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING",
"TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING",
"TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING",
"TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING",
"TF_MODEL_MAPPING",
"TF_MODEL_WITH_LM_HEAD_MAPPING",
"TFAutoModel",
"TFAutoModelForCausalLM",
"TFAutoModelForMaskedLM",
"TFAutoModelForMultipleChoice",
"TFAutoModelForPreTraining",
"TFAutoModelForQuestionAnswering",
"TFAutoModelForSeq2SeqLM",
"TFAutoModelForSequenceClassification",
"TFAutoModelForTokenClassification",
"TFAutoModelWithLMHead",
]
if is_flax_available():
_import_structure["modeling_flax_auto"] = ["FLAX_MODEL_MAPPING", "FlaxAutoModel"]
if TYPE_CHECKING:
from .configuration_auto import ALL_PRETRAINED_CONFIG_ARCHIVE_MAP, CONFIG_MAPPING, MODEL_NAMES_MAPPING, AutoConfig
from .tokenization_auto import TOKENIZER_MAPPING, AutoTokenizer
if is_torch_available():
from .modeling_auto import ( from .modeling_auto import (
MODEL_FOR_CAUSAL_LM_MAPPING, MODEL_FOR_CAUSAL_LM_MAPPING,
MODEL_FOR_MASKED_LM_MAPPING, MODEL_FOR_MASKED_LM_MAPPING,
...@@ -49,7 +115,7 @@ if is_torch_available(): ...@@ -49,7 +115,7 @@ if is_torch_available():
AutoModelWithLMHead, AutoModelWithLMHead,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_auto import ( from .modeling_tf_auto import (
TF_MODEL_FOR_CAUSAL_LM_MAPPING, TF_MODEL_FOR_CAUSAL_LM_MAPPING,
TF_MODEL_FOR_MASKED_LM_MAPPING, TF_MODEL_FOR_MASKED_LM_MAPPING,
...@@ -74,5 +140,23 @@ if is_tf_available(): ...@@ -74,5 +140,23 @@ if is_tf_available():
TFAutoModelWithLMHead, TFAutoModelWithLMHead,
) )
if is_flax_available(): if is_flax_available():
from .modeling_flax_auto import FLAX_MODEL_MAPPING, FlaxAutoModel from .modeling_flax_auto import FLAX_MODEL_MAPPING, FlaxAutoModel
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -15,15 +15,42 @@ ...@@ -15,15 +15,42 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_bart import BART_PRETRAINED_CONFIG_ARCHIVE_MAP, BartConfig
from .tokenization_bart import BartTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_bart": ["BART_PRETRAINED_CONFIG_ARCHIVE_MAP", "BartConfig"],
"tokenization_bart": ["BartTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_bart_fast import BartTokenizerFast _import_structure["tokenization_bart_fast"] = ["BartTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_bart"] = [
"BART_PRETRAINED_MODEL_ARCHIVE_LIST",
"BartForConditionalGeneration",
"BartForQuestionAnswering",
"BartForSequenceClassification",
"BartModel",
"BartPretrainedModel",
"PretrainedBartModel",
]
if is_tf_available():
_import_structure["modeling_tf_bart"] = ["TFBartForConditionalGeneration", "TFBartModel", "TFBartPretrainedModel"]
if TYPE_CHECKING:
from .configuration_bart import BART_PRETRAINED_CONFIG_ARCHIVE_MAP, BartConfig
from .tokenization_bart import BartTokenizer
if is_tokenizers_available():
from .tokenization_bart_fast import BartTokenizerFast
if is_torch_available():
from .modeling_bart import ( from .modeling_bart import (
BART_PRETRAINED_MODEL_ARCHIVE_LIST, BART_PRETRAINED_MODEL_ARCHIVE_LIST,
BartForConditionalGeneration, BartForConditionalGeneration,
...@@ -34,5 +61,23 @@ if is_torch_available(): ...@@ -34,5 +61,23 @@ if is_torch_available():
PretrainedBartModel, PretrainedBartModel,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_bart import TFBartForConditionalGeneration, TFBartModel, TFBartPretrainedModel from .modeling_tf_bart import TFBartForConditionalGeneration, TFBartModel, TFBartPretrainedModel
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,11 +16,42 @@ ...@@ -16,11 +16,42 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_sentencepiece_available, is_tokenizers_available from typing import TYPE_CHECKING
from ...file_utils import _BaseLazyModule, is_sentencepiece_available, is_tokenizers_available
_import_structure = {}
if is_sentencepiece_available(): if is_sentencepiece_available():
from .tokenization_barthez import BarthezTokenizer _import_structure["tokenization_barthez"] = ["BarthezTokenizer"]
if is_tokenizers_available(): if is_tokenizers_available():
_import_structure["tokenization_barthez_fast"] = ["BarthezTokenizerFast"]
if TYPE_CHECKING:
if is_sentencepiece_available():
from .tokenization_barthez import BarthezTokenizer
if is_tokenizers_available():
from .tokenization_barthez_fast import BarthezTokenizerFast from .tokenization_barthez_fast import BarthezTokenizerFast
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,15 +16,71 @@ ...@@ -16,15 +16,71 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_bert import BERT_PRETRAINED_CONFIG_ARCHIVE_MAP, BertConfig
from .tokenization_bert import BasicTokenizer, BertTokenizer, WordpieceTokenizer
from ...file_utils import (
_BaseLazyModule,
is_flax_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = {
"configuration_bert": ["BERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "BertConfig"],
"tokenization_bert": ["BasicTokenizer", "BertTokenizer", "WordpieceTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_bert_fast import BertTokenizerFast _import_structure["tokenization_bert_fast"] = ["BertTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_bert"] = [
"BERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"BertForMaskedLM",
"BertForMultipleChoice",
"BertForNextSentencePrediction",
"BertForPreTraining",
"BertForQuestionAnswering",
"BertForSequenceClassification",
"BertForTokenClassification",
"BertLayer",
"BertLMHeadModel",
"BertModel",
"BertPreTrainedModel",
"load_tf_weights_in_bert",
]
if is_tf_available():
_import_structure["modeling_tf_bert"] = [
"TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFBertEmbeddings",
"TFBertForMaskedLM",
"TFBertForMultipleChoice",
"TFBertForNextSentencePrediction",
"TFBertForPreTraining",
"TFBertForQuestionAnswering",
"TFBertForSequenceClassification",
"TFBertForTokenClassification",
"TFBertLMHeadModel",
"TFBertMainLayer",
"TFBertModel",
"TFBertPreTrainedModel",
]
if is_flax_available():
_import_structure["modeling_flax_bert"] = ["FlaxBertForMaskedLM", "FlaxBertModel"]
if TYPE_CHECKING:
from .configuration_bert import BERT_PRETRAINED_CONFIG_ARCHIVE_MAP, BertConfig
from .tokenization_bert import BasicTokenizer, BertTokenizer, WordpieceTokenizer
if is_tokenizers_available():
from .tokenization_bert_fast import BertTokenizerFast
if is_torch_available():
from .modeling_bert import ( from .modeling_bert import (
BERT_PRETRAINED_MODEL_ARCHIVE_LIST, BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
BertForMaskedLM, BertForMaskedLM,
...@@ -41,7 +97,7 @@ if is_torch_available(): ...@@ -41,7 +97,7 @@ if is_torch_available():
load_tf_weights_in_bert, load_tf_weights_in_bert,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_bert import ( from .modeling_tf_bert import (
TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST, TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFBertEmbeddings, TFBertEmbeddings,
...@@ -58,5 +114,23 @@ if is_tf_available(): ...@@ -58,5 +114,23 @@ if is_tf_available():
TFBertPreTrainedModel, TFBertPreTrainedModel,
) )
if is_flax_available(): if is_flax_available():
from .modeling_flax_bert import FlaxBertForMaskedLM, FlaxBertModel from .modeling_flax_bert import FlaxBertForMaskedLM, FlaxBertModel
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,16 +16,53 @@ ...@@ -16,16 +16,53 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_sentencepiece_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_bert_generation import BertGenerationConfig
from ...file_utils import _BaseLazyModule, is_sentencepiece_available, is_torch_available
_import_structure = {
"configuration_bert_generation": ["BertGenerationConfig"],
}
if is_sentencepiece_available(): if is_sentencepiece_available():
from .tokenization_bert_generation import BertGenerationTokenizer _import_structure["tokenization_bert_generation"] = ["BertGenerationTokenizer"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_bert_generation"] = [
"BertGenerationDecoder",
"BertGenerationEncoder",
"load_tf_weights_in_bert_generation",
]
if TYPE_CHECKING:
from .configuration_bert_generation import BertGenerationConfig
if is_sentencepiece_available():
from .tokenization_bert_generation import BertGenerationTokenizer
if is_torch_available():
from .modeling_bert_generation import ( from .modeling_bert_generation import (
BertGenerationDecoder, BertGenerationDecoder,
BertGenerationEncoder, BertGenerationEncoder,
load_tf_weights_in_bert_generation, load_tf_weights_in_bert_generation,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,4 +16,33 @@ ...@@ -16,4 +16,33 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from .tokenization_bert_japanese import BertJapaneseTokenizer, CharacterTokenizer, MecabTokenizer from typing import TYPE_CHECKING
from ...file_utils import _BaseLazyModule
_import_structure = {
"tokenization_bert_japanese": ["BertJapaneseTokenizer", "CharacterTokenizer", "MecabTokenizer"],
}
if TYPE_CHECKING:
from .tokenization_bert_japanese import BertJapaneseTokenizer, CharacterTokenizer, MecabTokenizer
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,4 +16,33 @@ ...@@ -16,4 +16,33 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from .tokenization_bertweet import BertweetTokenizer from typing import TYPE_CHECKING
from ...file_utils import _BaseLazyModule
_import_structure = {
"tokenization_bertweet": ["BertweetTokenizer"],
}
if TYPE_CHECKING:
from .tokenization_bertweet import BertweetTokenizer
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,12 +16,34 @@ ...@@ -16,12 +16,34 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_blenderbot import BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP, BlenderbotConfig
from .tokenization_blenderbot import BlenderbotTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_torch_available
_import_structure = {
"configuration_blenderbot": ["BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP", "BlenderbotConfig"],
"tokenization_blenderbot": ["BlenderbotTokenizer"],
}
if is_torch_available(): if is_torch_available():
_import_structure["modeling_blenderbot"] = [
"BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST",
"BlenderbotForConditionalGeneration",
"BlenderbotModel",
"BlenderbotPreTrainedModel",
]
if is_tf_available():
_import_structure["modeling_tf_blenderbot"] = ["TFBlenderbotForConditionalGeneration"]
if TYPE_CHECKING:
from .configuration_blenderbot import BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP, BlenderbotConfig
from .tokenization_blenderbot import BlenderbotTokenizer
if is_torch_available():
from .modeling_blenderbot import ( from .modeling_blenderbot import (
BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST, BLENDERBOT_PRETRAINED_MODEL_ARCHIVE_LIST,
BlenderbotForConditionalGeneration, BlenderbotForConditionalGeneration,
...@@ -29,6 +51,23 @@ if is_torch_available(): ...@@ -29,6 +51,23 @@ if is_torch_available():
BlenderbotPreTrainedModel, BlenderbotPreTrainedModel,
) )
if is_tf_available():
if is_tf_available():
from .modeling_tf_blenderbot import TFBlenderbotForConditionalGeneration from .modeling_tf_blenderbot import TFBlenderbotForConditionalGeneration
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -15,15 +15,51 @@ ...@@ -15,15 +15,51 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_torch_available from typing import TYPE_CHECKING
from .configuration_blenderbot_small import BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP, BlenderbotSmallConfig
from .tokenization_blenderbot_small import BlenderbotSmallTokenizer
from ...file_utils import _BaseLazyModule, is_torch_available
_import_structure = {
"configuration_blenderbot_small": ["BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP", "BlenderbotSmallConfig"],
"tokenization_blenderbot_small": ["BlenderbotSmallTokenizer"],
}
if is_torch_available(): if is_torch_available():
_import_structure["modeling_blenderbot_small"] = [
"BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST",
"BlenderbotSmallForConditionalGeneration",
"BlenderbotSmallModel",
"BlenderbotSmallPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_blenderbot_small import BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP, BlenderbotSmallConfig
from .tokenization_blenderbot_small import BlenderbotSmallTokenizer
if is_torch_available():
from .modeling_blenderbot_small import ( from .modeling_blenderbot_small import (
BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST, BLENDERBOT_SMALL_PRETRAINED_MODEL_ARCHIVE_LIST,
BlenderbotSmallForConditionalGeneration, BlenderbotSmallForConditionalGeneration,
BlenderbotSmallModel, BlenderbotSmallModel,
BlenderbotSmallPreTrainedModel, BlenderbotSmallPreTrainedModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,17 +16,61 @@ ...@@ -16,17 +16,61 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_camembert import CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, CamembertConfig
from ...file_utils import (
_BaseLazyModule,
is_sentencepiece_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = {
"configuration_camembert": ["CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "CamembertConfig"],
}
if is_sentencepiece_available(): if is_sentencepiece_available():
from .tokenization_camembert import CamembertTokenizer _import_structure["tokenization_camembert"] = ["CamembertTokenizer"]
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_camembert_fast import CamembertTokenizerFast _import_structure["tokenization_camembert_fast"] = ["CamembertTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_camembert"] = [
"CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"CamembertForCausalLM",
"CamembertForMaskedLM",
"CamembertForMultipleChoice",
"CamembertForQuestionAnswering",
"CamembertForSequenceClassification",
"CamembertForTokenClassification",
"CamembertModel",
]
if is_tf_available():
_import_structure["modeling_tf_camembert"] = [
"TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFCamembertForMaskedLM",
"TFCamembertForMultipleChoice",
"TFCamembertForQuestionAnswering",
"TFCamembertForSequenceClassification",
"TFCamembertForTokenClassification",
"TFCamembertModel",
]
if TYPE_CHECKING:
from .configuration_camembert import CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, CamembertConfig
if is_sentencepiece_available():
from .tokenization_camembert import CamembertTokenizer
if is_tokenizers_available():
from .tokenization_camembert_fast import CamembertTokenizerFast
if is_torch_available():
from .modeling_camembert import ( from .modeling_camembert import (
CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST, CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
CamembertForCausalLM, CamembertForCausalLM,
...@@ -38,7 +82,7 @@ if is_torch_available(): ...@@ -38,7 +82,7 @@ if is_torch_available():
CamembertModel, CamembertModel,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_camembert import ( from .modeling_tf_camembert import (
TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST, TF_CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFCamembertForMaskedLM, TFCamembertForMaskedLM,
...@@ -48,3 +92,21 @@ if is_tf_available(): ...@@ -48,3 +92,21 @@ if is_tf_available():
TFCamembertForTokenClassification, TFCamembertForTokenClassification,
TFCamembertModel, TFCamembertModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,12 +16,40 @@ ...@@ -16,12 +16,40 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_ctrl import CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP, CTRLConfig
from .tokenization_ctrl import CTRLTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_torch_available
_import_structure = {
"configuration_ctrl": ["CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP", "CTRLConfig"],
"tokenization_ctrl": ["CTRLTokenizer"],
}
if is_torch_available(): if is_torch_available():
_import_structure["modeling_ctrl"] = [
"CTRL_PRETRAINED_MODEL_ARCHIVE_LIST",
"CTRLForSequenceClassification",
"CTRLLMHeadModel",
"CTRLModel",
"CTRLPreTrainedModel",
]
if is_tf_available():
_import_structure["modeling_tf_ctrl"] = [
"TF_CTRL_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFCTRLForSequenceClassification",
"TFCTRLLMHeadModel",
"TFCTRLModel",
"TFCTRLPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_ctrl import CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP, CTRLConfig
from .tokenization_ctrl import CTRLTokenizer
if is_torch_available():
from .modeling_ctrl import ( from .modeling_ctrl import (
CTRL_PRETRAINED_MODEL_ARCHIVE_LIST, CTRL_PRETRAINED_MODEL_ARCHIVE_LIST,
CTRLForSequenceClassification, CTRLForSequenceClassification,
...@@ -30,7 +58,7 @@ if is_torch_available(): ...@@ -30,7 +58,7 @@ if is_torch_available():
CTRLPreTrainedModel, CTRLPreTrainedModel,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_ctrl import ( from .modeling_tf_ctrl import (
TF_CTRL_PRETRAINED_MODEL_ARCHIVE_LIST, TF_CTRL_PRETRAINED_MODEL_ARCHIVE_LIST,
TFCTRLForSequenceClassification, TFCTRLForSequenceClassification,
...@@ -38,3 +66,21 @@ if is_tf_available(): ...@@ -38,3 +66,21 @@ if is_tf_available():
TFCTRLModel, TFCTRLModel,
TFCTRLPreTrainedModel, TFCTRLPreTrainedModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,15 +16,51 @@ ...@@ -16,15 +16,51 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_torch_available from typing import TYPE_CHECKING
from .configuration_deberta import DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, DebertaConfig
from .tokenization_deberta import DebertaTokenizer
from ...file_utils import _BaseLazyModule, is_torch_available
_import_structure = {
"configuration_deberta": ["DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP", "DebertaConfig"],
"tokenization_deberta": ["DebertaTokenizer"],
}
if is_torch_available(): if is_torch_available():
_import_structure["modeling_deberta"] = [
"DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"DebertaForSequenceClassification",
"DebertaModel",
"DebertaPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_deberta import DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, DebertaConfig
from .tokenization_deberta import DebertaTokenizer
if is_torch_available():
from .modeling_deberta import ( from .modeling_deberta import (
DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST, DEBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
DebertaForSequenceClassification, DebertaForSequenceClassification,
DebertaModel, DebertaModel,
DebertaPreTrainedModel, DebertaPreTrainedModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,15 +16,53 @@ ...@@ -16,15 +16,53 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_distilbert import DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, DistilBertConfig
from .tokenization_distilbert import DistilBertTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_distilbert": ["DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "DistilBertConfig"],
"tokenization_distilbert": ["DistilBertTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_distilbert_fast import DistilBertTokenizerFast _import_structure["tokenization_distilbert_fast"] = ["DistilBertTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_distilbert"] = [
"DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"DistilBertForMaskedLM",
"DistilBertForMultipleChoice",
"DistilBertForQuestionAnswering",
"DistilBertForSequenceClassification",
"DistilBertForTokenClassification",
"DistilBertModel",
"DistilBertPreTrainedModel",
]
if is_tf_available():
_import_structure["modeling_tf_distilbert"] = [
"TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFDistilBertForMaskedLM",
"TFDistilBertForMultipleChoice",
"TFDistilBertForQuestionAnswering",
"TFDistilBertForSequenceClassification",
"TFDistilBertForTokenClassification",
"TFDistilBertMainLayer",
"TFDistilBertModel",
"TFDistilBertPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_distilbert import DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, DistilBertConfig
from .tokenization_distilbert import DistilBertTokenizer
if is_tokenizers_available():
from .tokenization_distilbert_fast import DistilBertTokenizerFast
if is_torch_available():
from .modeling_distilbert import ( from .modeling_distilbert import (
DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST, DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
DistilBertForMaskedLM, DistilBertForMaskedLM,
...@@ -36,7 +74,7 @@ if is_torch_available(): ...@@ -36,7 +74,7 @@ if is_torch_available():
DistilBertPreTrainedModel, DistilBertPreTrainedModel,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_distilbert import ( from .modeling_tf_distilbert import (
TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST, TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFDistilBertForMaskedLM, TFDistilBertForMaskedLM,
...@@ -48,3 +86,21 @@ if is_tf_available(): ...@@ -48,3 +86,21 @@ if is_tf_available():
TFDistilBertModel, TFDistilBertModel,
TFDistilBertPreTrainedModel, TFDistilBertPreTrainedModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,24 +16,73 @@ ...@@ -16,24 +16,73 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_dpr import DPR_PRETRAINED_CONFIG_ARCHIVE_MAP, DPRConfig
from .tokenization_dpr import ( from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_dpr": ["DPR_PRETRAINED_CONFIG_ARCHIVE_MAP", "DPRConfig"],
"tokenization_dpr": [
"DPRContextEncoderTokenizer",
"DPRQuestionEncoderTokenizer",
"DPRReaderOutput",
"DPRReaderTokenizer",
],
}
if is_tokenizers_available():
_import_structure["tokenization_dpr_fast"] = [
"DPRContextEncoderTokenizerFast",
"DPRQuestionEncoderTokenizerFast",
"DPRReaderTokenizerFast",
]
if is_torch_available():
_import_structure["modeling_dpr"] = [
"DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
"DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
"DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST",
"DPRContextEncoder",
"DPRPretrainedContextEncoder",
"DPRPretrainedQuestionEncoder",
"DPRPretrainedReader",
"DPRQuestionEncoder",
"DPRReader",
]
if is_tf_available():
_import_structure["modeling_tf_dpr"] = [
"TF_DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
"TF_DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST",
"TF_DPR_READER_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFDPRContextEncoder",
"TFDPRPretrainedContextEncoder",
"TFDPRPretrainedQuestionEncoder",
"TFDPRPretrainedReader",
"TFDPRQuestionEncoder",
"TFDPRReader",
]
if TYPE_CHECKING:
from .configuration_dpr import DPR_PRETRAINED_CONFIG_ARCHIVE_MAP, DPRConfig
from .tokenization_dpr import (
DPRContextEncoderTokenizer, DPRContextEncoderTokenizer,
DPRQuestionEncoderTokenizer, DPRQuestionEncoderTokenizer,
DPRReaderOutput, DPRReaderOutput,
DPRReaderTokenizer, DPRReaderTokenizer,
) )
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_dpr_fast import ( from .tokenization_dpr_fast import (
DPRContextEncoderTokenizerFast, DPRContextEncoderTokenizerFast,
DPRQuestionEncoderTokenizerFast, DPRQuestionEncoderTokenizerFast,
DPRReaderTokenizerFast, DPRReaderTokenizerFast,
) )
if is_torch_available(): if is_torch_available():
from .modeling_dpr import ( from .modeling_dpr import (
DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST, DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST, DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
...@@ -46,7 +95,7 @@ if is_torch_available(): ...@@ -46,7 +95,7 @@ if is_torch_available():
DPRReader, DPRReader,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_dpr import ( from .modeling_tf_dpr import (
TF_DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST, TF_DPR_CONTEXT_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
TF_DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST, TF_DPR_QUESTION_ENCODER_PRETRAINED_MODEL_ARCHIVE_LIST,
...@@ -58,3 +107,21 @@ if is_tf_available(): ...@@ -58,3 +107,21 @@ if is_tf_available():
TFDPRQuestionEncoder, TFDPRQuestionEncoder,
TFDPRReader, TFDPRReader,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,15 +16,55 @@ ...@@ -16,15 +16,55 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_electra import ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAP, ElectraConfig
from .tokenization_electra import ElectraTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_electra": ["ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAP", "ElectraConfig"],
"tokenization_electra": ["ElectraTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_electra_fast import ElectraTokenizerFast _import_structure["tokenization_electra_fast"] = ["ElectraTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_electra"] = [
"ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST",
"ElectraForMaskedLM",
"ElectraForMultipleChoice",
"ElectraForPreTraining",
"ElectraForQuestionAnswering",
"ElectraForSequenceClassification",
"ElectraForTokenClassification",
"ElectraModel",
"ElectraPreTrainedModel",
"load_tf_weights_in_electra",
]
if is_tf_available():
_import_structure["modeling_tf_electra"] = [
"TF_ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFElectraForMaskedLM",
"TFElectraForMultipleChoice",
"TFElectraForPreTraining",
"TFElectraForQuestionAnswering",
"TFElectraForSequenceClassification",
"TFElectraForTokenClassification",
"TFElectraModel",
"TFElectraPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_electra import ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAP, ElectraConfig
from .tokenization_electra import ElectraTokenizer
if is_tokenizers_available():
from .tokenization_electra_fast import ElectraTokenizerFast
if is_torch_available():
from .modeling_electra import ( from .modeling_electra import (
ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST, ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST,
ElectraForMaskedLM, ElectraForMaskedLM,
...@@ -38,7 +78,7 @@ if is_torch_available(): ...@@ -38,7 +78,7 @@ if is_torch_available():
load_tf_weights_in_electra, load_tf_weights_in_electra,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_electra import ( from .modeling_tf_electra import (
TF_ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST, TF_ELECTRA_PRETRAINED_MODEL_ARCHIVE_LIST,
TFElectraForMaskedLM, TFElectraForMaskedLM,
...@@ -50,3 +90,21 @@ if is_tf_available(): ...@@ -50,3 +90,21 @@ if is_tf_available():
TFElectraModel, TFElectraModel,
TFElectraPreTrainedModel, TFElectraPreTrainedModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,9 +16,39 @@ ...@@ -16,9 +16,39 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_torch_available from typing import TYPE_CHECKING
from .configuration_encoder_decoder import EncoderDecoderConfig
from ...file_utils import _BaseLazyModule, is_torch_available
_import_structure = {
"configuration_encoder_decoder": ["EncoderDecoderConfig"],
}
if is_torch_available(): if is_torch_available():
_import_structure["modeling_encoder_decoder"] = ["EncoderDecoderModel"]
if TYPE_CHECKING:
from .configuration_encoder_decoder import EncoderDecoderConfig
if is_torch_available():
from .modeling_encoder_decoder import EncoderDecoderModel from .modeling_encoder_decoder import EncoderDecoderModel
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,12 +16,45 @@ ...@@ -16,12 +16,45 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_flaubert import FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, FlaubertConfig
from .tokenization_flaubert import FlaubertTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_torch_available
_import_structure = {
"configuration_flaubert": ["FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "FlaubertConfig"],
"tokenization_flaubert": ["FlaubertTokenizer"],
}
if is_torch_available(): if is_torch_available():
_import_structure["modeling_flaubert"] = [
"FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"FlaubertForMultipleChoice",
"FlaubertForQuestionAnswering",
"FlaubertForQuestionAnsweringSimple",
"FlaubertForSequenceClassification",
"FlaubertForTokenClassification",
"FlaubertModel",
"FlaubertWithLMHeadModel",
]
if is_tf_available():
_import_structure["modeling_tf_flaubert"] = [
"TF_FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFFlaubertForMultipleChoice",
"TFFlaubertForQuestionAnsweringSimple",
"TFFlaubertForSequenceClassification",
"TFFlaubertForTokenClassification",
"TFFlaubertModel",
"TFFlaubertWithLMHeadModel",
]
if TYPE_CHECKING:
from .configuration_flaubert import FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, FlaubertConfig
from .tokenization_flaubert import FlaubertTokenizer
if is_torch_available():
from .modeling_flaubert import ( from .modeling_flaubert import (
FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST, FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
FlaubertForMultipleChoice, FlaubertForMultipleChoice,
...@@ -33,7 +66,7 @@ if is_torch_available(): ...@@ -33,7 +66,7 @@ if is_torch_available():
FlaubertWithLMHeadModel, FlaubertWithLMHeadModel,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_flaubert import ( from .modeling_tf_flaubert import (
TF_FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST, TF_FLAUBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFFlaubertForMultipleChoice, TFFlaubertForMultipleChoice,
...@@ -43,3 +76,21 @@ if is_tf_available(): ...@@ -43,3 +76,21 @@ if is_tf_available():
TFFlaubertModel, TFFlaubertModel,
TFFlaubertWithLMHeadModel, TFFlaubertWithLMHeadModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,10 +16,41 @@ ...@@ -16,10 +16,41 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_torch_available from typing import TYPE_CHECKING
from .configuration_fsmt import FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP, FSMTConfig
from .tokenization_fsmt import FSMTTokenizer
from ...file_utils import _BaseLazyModule, is_torch_available
_import_structure = {
"configuration_fsmt": ["FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP", "FSMTConfig"],
"tokenization_fsmt": ["FSMTTokenizer"],
}
if is_torch_available(): if is_torch_available():
_import_structure["modeling_fsmt"] = ["FSMTForConditionalGeneration", "FSMTModel", "PretrainedFSMTModel"]
if TYPE_CHECKING:
from .configuration_fsmt import FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP, FSMTConfig
from .tokenization_fsmt import FSMTTokenizer
if is_torch_available():
from .modeling_fsmt import FSMTForConditionalGeneration, FSMTModel, PretrainedFSMTModel from .modeling_fsmt import FSMTForConditionalGeneration, FSMTModel, PretrainedFSMTModel
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment