Unverified Commit df735d13 authored by Dom Miketa's avatar Dom Miketa Committed by GitHub
Browse files

[WIP] Fix Pyright static type checking by replacing if-else imports with try-except (#16578)



* rebase and isort

* modify cookiecutter init

* fix cookiecutter auto imports

* fix clean_frameworks_in_init

* fix add_model_to_main_init

* blackify

* replace unnecessary f-strings

* update yolos imports

* fix roberta import bug

* fix yolos missing dependency

* fix add_model_like and cookiecutter bug

* fix repository consistency error

* modify cookiecutter, fix add_new_model_like

* remove stale line
Co-authored-by: default avatarDom Miketa <dmiketa@exscientia.co.uk>
parent 7783fa6b
...@@ -18,7 +18,14 @@ ...@@ -18,7 +18,14 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = { _import_structure = {
...@@ -26,10 +33,20 @@ _import_structure = { ...@@ -26,10 +33,20 @@ _import_structure = {
"tokenization_mpnet": ["MPNetTokenizer"], "tokenization_mpnet": ["MPNetTokenizer"],
} }
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_mpnet_fast"] = ["MPNetTokenizerFast"] _import_structure["tokenization_mpnet_fast"] = ["MPNetTokenizerFast"]
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_mpnet"] = [ _import_structure["modeling_mpnet"] = [
"MPNET_PRETRAINED_MODEL_ARCHIVE_LIST", "MPNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"MPNetForMaskedLM", "MPNetForMaskedLM",
...@@ -42,7 +59,12 @@ if is_torch_available(): ...@@ -42,7 +59,12 @@ if is_torch_available():
"MPNetPreTrainedModel", "MPNetPreTrainedModel",
] ]
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_mpnet"] = [ _import_structure["modeling_tf_mpnet"] = [
"TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST", "TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFMPNetEmbeddings", "TFMPNetEmbeddings",
...@@ -61,10 +83,20 @@ if TYPE_CHECKING: ...@@ -61,10 +83,20 @@ if TYPE_CHECKING:
from .configuration_mpnet import MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP, MPNetConfig from .configuration_mpnet import MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP, MPNetConfig
from .tokenization_mpnet import MPNetTokenizer from .tokenization_mpnet import MPNetTokenizer
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_mpnet_fast import MPNetTokenizerFast from .tokenization_mpnet_fast import MPNetTokenizerFast
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_mpnet import ( from .modeling_mpnet import (
MPNET_PRETRAINED_MODEL_ARCHIVE_LIST, MPNET_PRETRAINED_MODEL_ARCHIVE_LIST,
MPNetForMaskedLM, MPNetForMaskedLM,
...@@ -77,7 +109,12 @@ if TYPE_CHECKING: ...@@ -77,7 +109,12 @@ if TYPE_CHECKING:
MPNetPreTrainedModel, MPNetPreTrainedModel,
) )
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_tf_mpnet import ( from .modeling_tf_mpnet import (
TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST, TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST,
TFMPNetEmbeddings, TFMPNetEmbeddings,
......
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
is_flax_available, is_flax_available,
is_sentencepiece_available, is_sentencepiece_available,
...@@ -46,26 +47,56 @@ _import_structure = { ...@@ -46,26 +47,56 @@ _import_structure = {
"configuration_mt5": ["MT5Config"], "configuration_mt5": ["MT5Config"],
} }
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_mt5"] = ["MT5EncoderModel", "MT5ForConditionalGeneration", "MT5Model"] _import_structure["modeling_mt5"] = ["MT5EncoderModel", "MT5ForConditionalGeneration", "MT5Model"]
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_mt5"] = ["TFMT5EncoderModel", "TFMT5ForConditionalGeneration", "TFMT5Model"] _import_structure["modeling_tf_mt5"] = ["TFMT5EncoderModel", "TFMT5ForConditionalGeneration", "TFMT5Model"]
if is_flax_available(): try:
if not is_flax_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_flax_mt5"] = ["FlaxMT5ForConditionalGeneration", "FlaxMT5Model"] _import_structure["modeling_flax_mt5"] = ["FlaxMT5ForConditionalGeneration", "FlaxMT5Model"]
if TYPE_CHECKING: if TYPE_CHECKING:
from .configuration_mt5 import MT5Config from .configuration_mt5 import MT5Config
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_mt5 import MT5EncoderModel, MT5ForConditionalGeneration, MT5Model from .modeling_mt5 import MT5EncoderModel, MT5ForConditionalGeneration, MT5Model
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_tf_mt5 import TFMT5EncoderModel, TFMT5ForConditionalGeneration, TFMT5Model from .modeling_tf_mt5 import TFMT5EncoderModel, TFMT5ForConditionalGeneration, TFMT5Model
if is_flax_available(): try:
if not is_flax_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_flax_mt5 import FlaxMT5ForConditionalGeneration, FlaxMT5Model from .modeling_flax_mt5 import FlaxMT5ForConditionalGeneration, FlaxMT5Model
else: else:
......
...@@ -18,14 +18,19 @@ ...@@ -18,14 +18,19 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
# rely on isort to merge the imports # rely on isort to merge the imports
from ...utils import _LazyModule, is_tokenizers_available, is_torch_available from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available
_import_structure = { _import_structure = {
"configuration_nystromformer": ["NYSTROMFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "NystromformerConfig"], "configuration_nystromformer": ["NYSTROMFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "NystromformerConfig"],
} }
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_nystromformer"] = [ _import_structure["modeling_nystromformer"] = [
"NYSTROMFORMER_PRETRAINED_MODEL_ARCHIVE_LIST", "NYSTROMFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"NystromformerForMaskedLM", "NystromformerForMaskedLM",
...@@ -42,7 +47,12 @@ if is_torch_available(): ...@@ -42,7 +47,12 @@ if is_torch_available():
if TYPE_CHECKING: if TYPE_CHECKING:
from .configuration_nystromformer import NYSTROMFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, NystromformerConfig from .configuration_nystromformer import NYSTROMFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, NystromformerConfig
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_nystromformer import ( from .modeling_nystromformer import (
NYSTROMFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, NYSTROMFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
NystromformerForMaskedLM, NystromformerForMaskedLM,
......
...@@ -18,7 +18,13 @@ ...@@ -18,7 +18,13 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule, is_tf_available, is_tokenizers_available, is_torch_available from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = { _import_structure = {
...@@ -26,10 +32,20 @@ _import_structure = { ...@@ -26,10 +32,20 @@ _import_structure = {
"tokenization_openai": ["OpenAIGPTTokenizer"], "tokenization_openai": ["OpenAIGPTTokenizer"],
} }
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_openai_fast"] = ["OpenAIGPTTokenizerFast"] _import_structure["tokenization_openai_fast"] = ["OpenAIGPTTokenizerFast"]
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_openai"] = [ _import_structure["modeling_openai"] = [
"OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST", "OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST",
"OpenAIGPTDoubleHeadsModel", "OpenAIGPTDoubleHeadsModel",
...@@ -40,7 +56,12 @@ if is_torch_available(): ...@@ -40,7 +56,12 @@ if is_torch_available():
"load_tf_weights_in_openai_gpt", "load_tf_weights_in_openai_gpt",
] ]
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_openai"] = [ _import_structure["modeling_tf_openai"] = [
"TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST", "TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFOpenAIGPTDoubleHeadsModel", "TFOpenAIGPTDoubleHeadsModel",
...@@ -56,10 +77,20 @@ if TYPE_CHECKING: ...@@ -56,10 +77,20 @@ if TYPE_CHECKING:
from .configuration_openai import OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP, OpenAIGPTConfig from .configuration_openai import OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP, OpenAIGPTConfig
from .tokenization_openai import OpenAIGPTTokenizer from .tokenization_openai import OpenAIGPTTokenizer
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_openai_fast import OpenAIGPTTokenizerFast from .tokenization_openai_fast import OpenAIGPTTokenizerFast
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_openai import ( from .modeling_openai import (
OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST, OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST,
OpenAIGPTDoubleHeadsModel, OpenAIGPTDoubleHeadsModel,
...@@ -70,7 +101,12 @@ if TYPE_CHECKING: ...@@ -70,7 +101,12 @@ if TYPE_CHECKING:
load_tf_weights_in_openai_gpt, load_tf_weights_in_openai_gpt,
) )
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_tf_openai import ( from .modeling_tf_openai import (
TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST, TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFOpenAIGPTDoubleHeadsModel, TFOpenAIGPTDoubleHeadsModel,
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
is_flax_available, is_flax_available,
is_sentencepiece_available, is_sentencepiece_available,
...@@ -31,13 +32,28 @@ _import_structure = { ...@@ -31,13 +32,28 @@ _import_structure = {
"configuration_pegasus": ["PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP", "PegasusConfig"], "configuration_pegasus": ["PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP", "PegasusConfig"],
} }
if is_sentencepiece_available(): try:
if not is_sentencepiece_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_pegasus"] = ["PegasusTokenizer"] _import_structure["tokenization_pegasus"] = ["PegasusTokenizer"]
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_pegasus_fast"] = ["PegasusTokenizerFast"] _import_structure["tokenization_pegasus_fast"] = ["PegasusTokenizerFast"]
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_pegasus"] = [ _import_structure["modeling_pegasus"] = [
"PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST", "PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST",
"PegasusForCausalLM", "PegasusForCausalLM",
...@@ -46,14 +62,24 @@ if is_torch_available(): ...@@ -46,14 +62,24 @@ if is_torch_available():
"PegasusPreTrainedModel", "PegasusPreTrainedModel",
] ]
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_pegasus"] = [ _import_structure["modeling_tf_pegasus"] = [
"TFPegasusForConditionalGeneration", "TFPegasusForConditionalGeneration",
"TFPegasusModel", "TFPegasusModel",
"TFPegasusPreTrainedModel", "TFPegasusPreTrainedModel",
] ]
if is_flax_available(): try:
if not is_flax_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_flax_pegasus"] = [ _import_structure["modeling_flax_pegasus"] = [
"FlaxPegasusForConditionalGeneration", "FlaxPegasusForConditionalGeneration",
"FlaxPegasusModel", "FlaxPegasusModel",
...@@ -64,13 +90,28 @@ if is_flax_available(): ...@@ -64,13 +90,28 @@ if is_flax_available():
if TYPE_CHECKING: if TYPE_CHECKING:
from .configuration_pegasus import PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP, PegasusConfig from .configuration_pegasus import PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP, PegasusConfig
if is_sentencepiece_available(): try:
if not is_sentencepiece_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_pegasus import PegasusTokenizer from .tokenization_pegasus import PegasusTokenizer
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_pegasus_fast import PegasusTokenizerFast from .tokenization_pegasus_fast import PegasusTokenizerFast
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_pegasus import ( from .modeling_pegasus import (
PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST, PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST,
PegasusForCausalLM, PegasusForCausalLM,
...@@ -79,10 +120,20 @@ if TYPE_CHECKING: ...@@ -79,10 +120,20 @@ if TYPE_CHECKING:
PegasusPreTrainedModel, PegasusPreTrainedModel,
) )
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_tf_pegasus import TFPegasusForConditionalGeneration, TFPegasusModel, TFPegasusPreTrainedModel from .modeling_tf_pegasus import TFPegasusForConditionalGeneration, TFPegasusModel, TFPegasusPreTrainedModel
if is_flax_available(): try:
if not is_flax_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_flax_pegasus import ( from .modeling_flax_pegasus import (
FlaxPegasusForConditionalGeneration, FlaxPegasusForConditionalGeneration,
FlaxPegasusModel, FlaxPegasusModel,
......
...@@ -17,7 +17,13 @@ ...@@ -17,7 +17,13 @@
# limitations under the License. # limitations under the License.
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule, is_tokenizers_available, is_torch_available, is_vision_available from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_tokenizers_available,
is_torch_available,
is_vision_available,
)
_import_structure = { _import_structure = {
...@@ -25,10 +31,20 @@ _import_structure = { ...@@ -25,10 +31,20 @@ _import_structure = {
"tokenization_perceiver": ["PerceiverTokenizer"], "tokenization_perceiver": ["PerceiverTokenizer"],
} }
if is_vision_available(): try:
if not is_vision_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["feature_extraction_perceiver"] = ["PerceiverFeatureExtractor"] _import_structure["feature_extraction_perceiver"] = ["PerceiverFeatureExtractor"]
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_perceiver"] = [ _import_structure["modeling_perceiver"] = [
"PERCEIVER_PRETRAINED_MODEL_ARCHIVE_LIST", "PERCEIVER_PRETRAINED_MODEL_ARCHIVE_LIST",
"PerceiverForImageClassificationConvProcessing", "PerceiverForImageClassificationConvProcessing",
...@@ -48,10 +64,20 @@ if TYPE_CHECKING: ...@@ -48,10 +64,20 @@ if TYPE_CHECKING:
from .configuration_perceiver import PERCEIVER_PRETRAINED_CONFIG_ARCHIVE_MAP, PerceiverConfig from .configuration_perceiver import PERCEIVER_PRETRAINED_CONFIG_ARCHIVE_MAP, PerceiverConfig
from .tokenization_perceiver import PerceiverTokenizer from .tokenization_perceiver import PerceiverTokenizer
if is_vision_available(): try:
if not is_vision_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .feature_extraction_perceiver import PerceiverFeatureExtractor from .feature_extraction_perceiver import PerceiverFeatureExtractor
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_perceiver import ( from .modeling_perceiver import (
PERCEIVER_PRETRAINED_MODEL_ARCHIVE_LIST, PERCEIVER_PRETRAINED_MODEL_ARCHIVE_LIST,
PerceiverForImageClassificationConvProcessing, PerceiverForImageClassificationConvProcessing,
......
...@@ -17,17 +17,33 @@ ...@@ -17,17 +17,33 @@
# limitations under the License. # limitations under the License.
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule, is_sentencepiece_available, is_tokenizers_available, is_torch_available from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_sentencepiece_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = { _import_structure = {
"configuration_plbart": ["PLBART_PRETRAINED_CONFIG_ARCHIVE_MAP", "PLBartConfig"], "configuration_plbart": ["PLBART_PRETRAINED_CONFIG_ARCHIVE_MAP", "PLBartConfig"],
} }
if is_sentencepiece_available(): try:
if not is_sentencepiece_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_plbart"] = ["PLBartTokenizer"] _import_structure["tokenization_plbart"] = ["PLBartTokenizer"]
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_plbart"] = [ _import_structure["modeling_plbart"] = [
"PLBART_PRETRAINED_MODEL_ARCHIVE_LIST", "PLBART_PRETRAINED_MODEL_ARCHIVE_LIST",
"PLBartForCausalLM", "PLBartForCausalLM",
...@@ -41,10 +57,20 @@ if is_torch_available(): ...@@ -41,10 +57,20 @@ if is_torch_available():
if TYPE_CHECKING: if TYPE_CHECKING:
from .configuration_plbart import PLBART_PRETRAINED_CONFIG_ARCHIVE_MAP, PLBartConfig from .configuration_plbart import PLBART_PRETRAINED_CONFIG_ARCHIVE_MAP, PLBartConfig
if is_sentencepiece_available(): try:
if not is_sentencepiece_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_plbart import PLBartTokenizer from .tokenization_plbart import PLBartTokenizer
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_plbart import ( from .modeling_plbart import (
PLBART_PRETRAINED_MODEL_ARCHIVE_LIST, PLBART_PRETRAINED_MODEL_ARCHIVE_LIST,
PLBartForCausalLM, PLBartForCausalLM,
......
...@@ -18,17 +18,27 @@ ...@@ -18,17 +18,27 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
# rely on isort to merge the imports # rely on isort to merge the imports
from ...utils import _LazyModule, is_torch_available, is_vision_available from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available
_import_structure = { _import_structure = {
"configuration_poolformer": ["POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "PoolFormerConfig"], "configuration_poolformer": ["POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "PoolFormerConfig"],
} }
if is_vision_available(): try:
if not is_vision_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["feature_extraction_poolformer"] = ["PoolFormerFeatureExtractor"] _import_structure["feature_extraction_poolformer"] = ["PoolFormerFeatureExtractor"]
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_poolformer"] = [ _import_structure["modeling_poolformer"] = [
"POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST", "POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"PoolFormerForImageClassification", "PoolFormerForImageClassification",
...@@ -40,10 +50,20 @@ if is_torch_available(): ...@@ -40,10 +50,20 @@ if is_torch_available():
if TYPE_CHECKING: if TYPE_CHECKING:
from .configuration_poolformer import POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, PoolFormerConfig from .configuration_poolformer import POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, PoolFormerConfig
if is_vision_available(): try:
if not is_vision_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .feature_extraction_poolformer import PoolFormerFeatureExtractor from .feature_extraction_poolformer import PoolFormerFeatureExtractor
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_poolformer import ( from .modeling_poolformer import (
POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
PoolFormerForImageClassification, PoolFormerForImageClassification,
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule, is_torch_available from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
_import_structure = { _import_structure = {
...@@ -26,7 +26,12 @@ _import_structure = { ...@@ -26,7 +26,12 @@ _import_structure = {
"tokenization_prophetnet": ["ProphetNetTokenizer"], "tokenization_prophetnet": ["ProphetNetTokenizer"],
} }
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_prophetnet"] = [ _import_structure["modeling_prophetnet"] = [
"PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST", "PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"ProphetNetDecoder", "ProphetNetDecoder",
...@@ -42,7 +47,12 @@ if TYPE_CHECKING: ...@@ -42,7 +47,12 @@ if TYPE_CHECKING:
from .configuration_prophetnet import PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP, ProphetNetConfig from .configuration_prophetnet import PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP, ProphetNetConfig
from .tokenization_prophetnet import ProphetNetTokenizer from .tokenization_prophetnet import ProphetNetTokenizer
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_prophetnet import ( from .modeling_prophetnet import (
PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST, PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST,
ProphetNetDecoder, ProphetNetDecoder,
......
...@@ -17,14 +17,19 @@ ...@@ -17,14 +17,19 @@
# limitations under the License. # limitations under the License.
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule, is_torch_available from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
_import_structure = { _import_structure = {
"configuration_qdqbert": ["QDQBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "QDQBertConfig"], "configuration_qdqbert": ["QDQBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "QDQBertConfig"],
} }
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_qdqbert"] = [ _import_structure["modeling_qdqbert"] = [
"QDQBERT_PRETRAINED_MODEL_ARCHIVE_LIST", "QDQBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"QDQBertForMaskedLM", "QDQBertForMaskedLM",
...@@ -44,7 +49,12 @@ if is_torch_available(): ...@@ -44,7 +49,12 @@ if is_torch_available():
if TYPE_CHECKING: if TYPE_CHECKING:
from .configuration_qdqbert import QDQBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, QDQBertConfig from .configuration_qdqbert import QDQBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, QDQBertConfig
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_qdqbert import ( from .modeling_qdqbert import (
QDQBERT_PRETRAINED_MODEL_ARCHIVE_LIST, QDQBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
QDQBertForMaskedLM, QDQBertForMaskedLM,
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule, is_tf_available, is_torch_available from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_torch_available
_import_structure = { _import_structure = {
...@@ -27,7 +27,12 @@ _import_structure = { ...@@ -27,7 +27,12 @@ _import_structure = {
"tokenization_rag": ["RagTokenizer"], "tokenization_rag": ["RagTokenizer"],
} }
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_rag"] = [ _import_structure["modeling_rag"] = [
"RagModel", "RagModel",
"RagPreTrainedModel", "RagPreTrainedModel",
...@@ -35,7 +40,12 @@ if is_torch_available(): ...@@ -35,7 +40,12 @@ if is_torch_available():
"RagTokenForGeneration", "RagTokenForGeneration",
] ]
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_rag"] = [ _import_structure["modeling_tf_rag"] = [
"TFRagModel", "TFRagModel",
"TFRagPreTrainedModel", "TFRagPreTrainedModel",
...@@ -49,10 +59,20 @@ if TYPE_CHECKING: ...@@ -49,10 +59,20 @@ if TYPE_CHECKING:
from .retrieval_rag import RagRetriever from .retrieval_rag import RagRetriever
from .tokenization_rag import RagTokenizer from .tokenization_rag import RagTokenizer
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_rag import RagModel, RagPreTrainedModel, RagSequenceForGeneration, RagTokenForGeneration from .modeling_rag import RagModel, RagPreTrainedModel, RagSequenceForGeneration, RagTokenForGeneration
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_tf_rag import ( from .modeling_tf_rag import (
TFRagModel, TFRagModel,
TFRagPreTrainedModel, TFRagPreTrainedModel,
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
# limitations under the License. # limitations under the License.
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule, is_tokenizers_available, is_torch_available from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available
_import_structure = { _import_structure = {
...@@ -25,10 +25,20 @@ _import_structure = { ...@@ -25,10 +25,20 @@ _import_structure = {
"tokenization_realm": ["RealmTokenizer"], "tokenization_realm": ["RealmTokenizer"],
} }
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_realm_fast"] = ["RealmTokenizerFast"] _import_structure["tokenization_realm_fast"] = ["RealmTokenizerFast"]
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_realm"] = [ _import_structure["modeling_realm"] = [
"REALM_PRETRAINED_MODEL_ARCHIVE_LIST", "REALM_PRETRAINED_MODEL_ARCHIVE_LIST",
"RealmEmbedder", "RealmEmbedder",
...@@ -46,10 +56,20 @@ if TYPE_CHECKING: ...@@ -46,10 +56,20 @@ if TYPE_CHECKING:
from .configuration_realm import REALM_PRETRAINED_CONFIG_ARCHIVE_MAP, RealmConfig from .configuration_realm import REALM_PRETRAINED_CONFIG_ARCHIVE_MAP, RealmConfig
from .tokenization_realm import RealmTokenizer from .tokenization_realm import RealmTokenizer
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_realm import RealmTokenizerFast from .tokenization_realm import RealmTokenizerFast
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_realm import ( from .modeling_realm import (
REALM_PRETRAINED_MODEL_ARCHIVE_LIST, REALM_PRETRAINED_MODEL_ARCHIVE_LIST,
RealmEmbedder, RealmEmbedder,
......
...@@ -18,20 +18,41 @@ ...@@ -18,20 +18,41 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule, is_sentencepiece_available, is_tokenizers_available, is_torch_available from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_sentencepiece_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = { _import_structure = {
"configuration_reformer": ["REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "ReformerConfig"], "configuration_reformer": ["REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "ReformerConfig"],
} }
if is_sentencepiece_available(): try:
if not is_sentencepiece_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_reformer"] = ["ReformerTokenizer"] _import_structure["tokenization_reformer"] = ["ReformerTokenizer"]
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_reformer_fast"] = ["ReformerTokenizerFast"] _import_structure["tokenization_reformer_fast"] = ["ReformerTokenizerFast"]
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_reformer"] = [ _import_structure["modeling_reformer"] = [
"REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST", "REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"ReformerAttention", "ReformerAttention",
...@@ -48,13 +69,28 @@ if is_torch_available(): ...@@ -48,13 +69,28 @@ if is_torch_available():
if TYPE_CHECKING: if TYPE_CHECKING:
from .configuration_reformer import REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, ReformerConfig from .configuration_reformer import REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, ReformerConfig
if is_sentencepiece_available(): try:
if not is_sentencepiece_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_reformer import ReformerTokenizer from .tokenization_reformer import ReformerTokenizer
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_reformer_fast import ReformerTokenizerFast from .tokenization_reformer_fast import ReformerTokenizerFast
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_reformer import ( from .modeling_reformer import (
REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
ReformerAttention, ReformerAttention,
......
...@@ -19,13 +19,19 @@ from typing import TYPE_CHECKING ...@@ -19,13 +19,19 @@ from typing import TYPE_CHECKING
# rely on isort to merge the imports # rely on isort to merge the imports
from ...file_utils import _LazyModule, is_torch_available from ...file_utils import _LazyModule, is_torch_available
from ...utils import OptionalDependencyNotAvailable
_import_structure = { _import_structure = {
"configuration_regnet": ["REGNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "RegNetConfig"], "configuration_regnet": ["REGNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "RegNetConfig"],
} }
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_regnet"] = [ _import_structure["modeling_regnet"] = [
"REGNET_PRETRAINED_MODEL_ARCHIVE_LIST", "REGNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"RegNetForImageClassification", "RegNetForImageClassification",
...@@ -37,7 +43,12 @@ if is_torch_available(): ...@@ -37,7 +43,12 @@ if is_torch_available():
if TYPE_CHECKING: if TYPE_CHECKING:
from .configuration_regnet import REGNET_PRETRAINED_CONFIG_ARCHIVE_MAP, RegNetConfig from .configuration_regnet import REGNET_PRETRAINED_CONFIG_ARCHIVE_MAP, RegNetConfig
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_regnet import ( from .modeling_regnet import (
REGNET_PRETRAINED_MODEL_ARCHIVE_LIST, REGNET_PRETRAINED_MODEL_ARCHIVE_LIST,
RegNetForImageClassification, RegNetForImageClassification,
......
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
is_sentencepiece_available, is_sentencepiece_available,
is_tf_available, is_tf_available,
...@@ -31,13 +32,28 @@ _import_structure = { ...@@ -31,13 +32,28 @@ _import_structure = {
"configuration_rembert": ["REMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "RemBertConfig"], "configuration_rembert": ["REMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "RemBertConfig"],
} }
if is_sentencepiece_available(): try:
if not is_sentencepiece_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_rembert"] = ["RemBertTokenizer"] _import_structure["tokenization_rembert"] = ["RemBertTokenizer"]
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_rembert_fast"] = ["RemBertTokenizerFast"] _import_structure["tokenization_rembert_fast"] = ["RemBertTokenizerFast"]
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_rembert"] = [ _import_structure["modeling_rembert"] = [
"REMBERT_PRETRAINED_MODEL_ARCHIVE_LIST", "REMBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"RemBertForCausalLM", "RemBertForCausalLM",
...@@ -53,7 +69,12 @@ if is_torch_available(): ...@@ -53,7 +69,12 @@ if is_torch_available():
] ]
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_rembert"] = [ _import_structure["modeling_tf_rembert"] = [
"TF_REMBERT_PRETRAINED_MODEL_ARCHIVE_LIST", "TF_REMBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFRemBertForCausalLM", "TFRemBertForCausalLM",
...@@ -71,13 +92,28 @@ if is_tf_available(): ...@@ -71,13 +92,28 @@ if is_tf_available():
if TYPE_CHECKING: if TYPE_CHECKING:
from .configuration_rembert import REMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, RemBertConfig from .configuration_rembert import REMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, RemBertConfig
if is_sentencepiece_available(): try:
if not is_sentencepiece_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_rembert import RemBertTokenizer from .tokenization_rembert import RemBertTokenizer
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_rembert_fast import RemBertTokenizerFast from .tokenization_rembert_fast import RemBertTokenizerFast
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_rembert import ( from .modeling_rembert import (
REMBERT_PRETRAINED_MODEL_ARCHIVE_LIST, REMBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
RemBertForCausalLM, RemBertForCausalLM,
...@@ -92,7 +128,12 @@ if TYPE_CHECKING: ...@@ -92,7 +128,12 @@ if TYPE_CHECKING:
load_tf_weights_in_rembert, load_tf_weights_in_rembert,
) )
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_tf_rembert import ( from .modeling_tf_rembert import (
TF_REMBERT_PRETRAINED_MODEL_ARCHIVE_LIST, TF_REMBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFRemBertForCausalLM, TFRemBertForCausalLM,
......
...@@ -18,14 +18,19 @@ ...@@ -18,14 +18,19 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
# rely on isort to merge the imports # rely on isort to merge the imports
from ...utils import _LazyModule, is_torch_available from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
_import_structure = { _import_structure = {
"configuration_resnet": ["RESNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "ResNetConfig"], "configuration_resnet": ["RESNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "ResNetConfig"],
} }
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_resnet"] = [ _import_structure["modeling_resnet"] = [
"RESNET_PRETRAINED_MODEL_ARCHIVE_LIST", "RESNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"ResNetForImageClassification", "ResNetForImageClassification",
...@@ -37,7 +42,12 @@ if is_torch_available(): ...@@ -37,7 +42,12 @@ if is_torch_available():
if TYPE_CHECKING: if TYPE_CHECKING:
from .configuration_resnet import RESNET_PRETRAINED_CONFIG_ARCHIVE_MAP, ResNetConfig from .configuration_resnet import RESNET_PRETRAINED_CONFIG_ARCHIVE_MAP, ResNetConfig
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_resnet import ( from .modeling_resnet import (
RESNET_PRETRAINED_MODEL_ARCHIVE_LIST, RESNET_PRETRAINED_MODEL_ARCHIVE_LIST,
ResNetForImageClassification, ResNetForImageClassification,
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule, is_tokenizers_available, is_torch_available from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available
_import_structure = { _import_structure = {
...@@ -26,10 +26,20 @@ _import_structure = { ...@@ -26,10 +26,20 @@ _import_structure = {
"tokenization_retribert": ["RetriBertTokenizer"], "tokenization_retribert": ["RetriBertTokenizer"],
} }
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_retribert_fast"] = ["RetriBertTokenizerFast"] _import_structure["tokenization_retribert_fast"] = ["RetriBertTokenizerFast"]
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_retribert"] = [ _import_structure["modeling_retribert"] = [
"RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST", "RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"RetriBertModel", "RetriBertModel",
...@@ -41,10 +51,20 @@ if TYPE_CHECKING: ...@@ -41,10 +51,20 @@ if TYPE_CHECKING:
from .configuration_retribert import RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, RetriBertConfig from .configuration_retribert import RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, RetriBertConfig
from .tokenization_retribert import RetriBertTokenizer from .tokenization_retribert import RetriBertTokenizer
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_retribert_fast import RetriBertTokenizerFast from .tokenization_retribert_fast import RetriBertTokenizerFast
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_retribert import ( from .modeling_retribert import (
RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST, RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
RetriBertModel, RetriBertModel,
......
...@@ -18,7 +18,14 @@ ...@@ -18,7 +18,14 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = { _import_structure = {
...@@ -26,10 +33,20 @@ _import_structure = { ...@@ -26,10 +33,20 @@ _import_structure = {
"tokenization_roberta": ["RobertaTokenizer"], "tokenization_roberta": ["RobertaTokenizer"],
} }
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_roberta_fast"] = ["RobertaTokenizerFast"] _import_structure["tokenization_roberta_fast"] = ["RobertaTokenizerFast"]
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_roberta"] = [ _import_structure["modeling_roberta"] = [
"ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST", "ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"RobertaForCausalLM", "RobertaForCausalLM",
...@@ -42,7 +59,12 @@ if is_torch_available(): ...@@ -42,7 +59,12 @@ if is_torch_available():
"RobertaPreTrainedModel", "RobertaPreTrainedModel",
] ]
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_roberta"] = [ _import_structure["modeling_tf_roberta"] = [
"TF_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST", "TF_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFRobertaForCausalLM", "TFRobertaForCausalLM",
...@@ -56,7 +78,12 @@ if is_tf_available(): ...@@ -56,7 +78,12 @@ if is_tf_available():
"TFRobertaPreTrainedModel", "TFRobertaPreTrainedModel",
] ]
if is_flax_available(): try:
if not is_flax_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_flax_roberta"] = [ _import_structure["modeling_flax_roberta"] = [
"FlaxRobertaForCausalLM", "FlaxRobertaForCausalLM",
"FlaxRobertaForMaskedLM", "FlaxRobertaForMaskedLM",
...@@ -73,10 +100,20 @@ if TYPE_CHECKING: ...@@ -73,10 +100,20 @@ if TYPE_CHECKING:
from .configuration_roberta import ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, RobertaConfig, RobertaOnnxConfig from .configuration_roberta import ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP, RobertaConfig, RobertaOnnxConfig
from .tokenization_roberta import RobertaTokenizer from .tokenization_roberta import RobertaTokenizer
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_roberta_fast import RobertaTokenizerFast from .tokenization_roberta_fast import RobertaTokenizerFast
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_roberta import ( from .modeling_roberta import (
ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST, ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
RobertaForCausalLM, RobertaForCausalLM,
...@@ -89,7 +126,12 @@ if TYPE_CHECKING: ...@@ -89,7 +126,12 @@ if TYPE_CHECKING:
RobertaPreTrainedModel, RobertaPreTrainedModel,
) )
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_tf_roberta import ( from .modeling_tf_roberta import (
TF_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST, TF_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
TFRobertaForCausalLM, TFRobertaForCausalLM,
...@@ -103,7 +145,12 @@ if TYPE_CHECKING: ...@@ -103,7 +145,12 @@ if TYPE_CHECKING:
TFRobertaPreTrainedModel, TFRobertaPreTrainedModel,
) )
if is_flax_available(): try:
if not is_flax_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_flax_roberta import ( from .modeling_flax_roberta import (
FlaxRobertaForCausalLM, FlaxRobertaForCausalLM,
FlaxRobertaForMaskedLM, FlaxRobertaForMaskedLM,
......
...@@ -17,7 +17,14 @@ ...@@ -17,7 +17,14 @@
# limitations under the License. # limitations under the License.
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_flax_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = { _import_structure = {
...@@ -25,10 +32,20 @@ _import_structure = { ...@@ -25,10 +32,20 @@ _import_structure = {
"tokenization_roformer": ["RoFormerTokenizer"], "tokenization_roformer": ["RoFormerTokenizer"],
} }
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_roformer_fast"] = ["RoFormerTokenizerFast"] _import_structure["tokenization_roformer_fast"] = ["RoFormerTokenizerFast"]
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_roformer"] = [ _import_structure["modeling_roformer"] = [
"ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST", "ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"RoFormerForCausalLM", "RoFormerForCausalLM",
...@@ -44,7 +61,12 @@ if is_torch_available(): ...@@ -44,7 +61,12 @@ if is_torch_available():
] ]
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_roformer"] = [ _import_structure["modeling_tf_roformer"] = [
"TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST", "TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFRoFormerForCausalLM", "TFRoFormerForCausalLM",
...@@ -59,7 +81,12 @@ if is_tf_available(): ...@@ -59,7 +81,12 @@ if is_tf_available():
] ]
if is_flax_available(): try:
if not is_flax_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_flax_roformer"] = [ _import_structure["modeling_flax_roformer"] = [
"FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST", "FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"FlaxRoFormerForMaskedLM", "FlaxRoFormerForMaskedLM",
...@@ -76,10 +103,20 @@ if TYPE_CHECKING: ...@@ -76,10 +103,20 @@ if TYPE_CHECKING:
from .configuration_roformer import ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, RoFormerConfig, RoFormerOnnxConfig from .configuration_roformer import ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, RoFormerConfig, RoFormerOnnxConfig
from .tokenization_roformer import RoFormerTokenizer from .tokenization_roformer import RoFormerTokenizer
if is_tokenizers_available(): try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_roformer_fast import RoFormerTokenizerFast from .tokenization_roformer_fast import RoFormerTokenizerFast
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_roformer import ( from .modeling_roformer import (
ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
RoFormerForCausalLM, RoFormerForCausalLM,
...@@ -94,7 +131,12 @@ if TYPE_CHECKING: ...@@ -94,7 +131,12 @@ if TYPE_CHECKING:
load_tf_weights_in_roformer, load_tf_weights_in_roformer,
) )
if is_tf_available(): try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_tf_roformer import ( from .modeling_tf_roformer import (
TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
TFRoFormerForCausalLM, TFRoFormerForCausalLM,
...@@ -108,7 +150,12 @@ if TYPE_CHECKING: ...@@ -108,7 +150,12 @@ if TYPE_CHECKING:
TFRoFormerPreTrainedModel, TFRoFormerPreTrainedModel,
) )
if is_flax_available(): try:
if not is_flax_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_flax_roformer import ( from .modeling_flax_roformer import (
FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
FlaxRoFormerForMaskedLM, FlaxRoFormerForMaskedLM,
......
...@@ -17,17 +17,27 @@ ...@@ -17,17 +17,27 @@
# limitations under the License. # limitations under the License.
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule, is_torch_available, is_vision_available from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available
_import_structure = { _import_structure = {
"configuration_segformer": ["SEGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "SegformerConfig"], "configuration_segformer": ["SEGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "SegformerConfig"],
} }
if is_vision_available(): try:
if not is_vision_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["feature_extraction_segformer"] = ["SegformerFeatureExtractor"] _import_structure["feature_extraction_segformer"] = ["SegformerFeatureExtractor"]
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_segformer"] = [ _import_structure["modeling_segformer"] = [
"SEGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST", "SEGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"SegformerDecodeHead", "SegformerDecodeHead",
...@@ -42,10 +52,20 @@ if is_torch_available(): ...@@ -42,10 +52,20 @@ if is_torch_available():
if TYPE_CHECKING: if TYPE_CHECKING:
from .configuration_segformer import SEGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, SegformerConfig from .configuration_segformer import SEGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, SegformerConfig
if is_vision_available(): try:
if not is_vision_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .feature_extraction_segformer import SegformerFeatureExtractor from .feature_extraction_segformer import SegformerFeatureExtractor
if is_torch_available(): try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_segformer import ( from .modeling_segformer import (
SEGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, SEGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
SegformerDecodeHead, SegformerDecodeHead,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment