Unverified Commit 1bdf4240 authored by Sylvain Gugger's avatar Sylvain Gugger Committed by GitHub
Browse files

Fast imports part 3 (#9474)

* New intermediate inits

* Update template

* Avoid importing torch/tf/flax in tokenization unless necessary

* Styling

* Shutup flake8

* Better python version check
parent 79bbcc52
...@@ -16,15 +16,55 @@ ...@@ -16,15 +16,55 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_funnel import FUNNEL_PRETRAINED_CONFIG_ARCHIVE_MAP, FunnelConfig
from .tokenization_funnel import FunnelTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_funnel": ["FUNNEL_PRETRAINED_CONFIG_ARCHIVE_MAP", "FunnelConfig"],
"tokenization_funnel": ["FunnelTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_funnel_fast import FunnelTokenizerFast _import_structure["tokenization_funnel_fast"] = ["FunnelTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_funnel"] = [
"FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST",
"FunnelBaseModel",
"FunnelForMaskedLM",
"FunnelForMultipleChoice",
"FunnelForPreTraining",
"FunnelForQuestionAnswering",
"FunnelForSequenceClassification",
"FunnelForTokenClassification",
"FunnelModel",
"load_tf_weights_in_funnel",
]
if is_tf_available():
_import_structure["modeling_tf_funnel"] = [
"TF_FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFFunnelBaseModel",
"TFFunnelForMaskedLM",
"TFFunnelForMultipleChoice",
"TFFunnelForPreTraining",
"TFFunnelForQuestionAnswering",
"TFFunnelForSequenceClassification",
"TFFunnelForTokenClassification",
"TFFunnelModel",
]
if TYPE_CHECKING:
from .configuration_funnel import FUNNEL_PRETRAINED_CONFIG_ARCHIVE_MAP, FunnelConfig
from .tokenization_funnel import FunnelTokenizer
if is_tokenizers_available():
from .tokenization_funnel_fast import FunnelTokenizerFast
if is_torch_available():
from .modeling_funnel import ( from .modeling_funnel import (
FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST, FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST,
FunnelBaseModel, FunnelBaseModel,
...@@ -38,7 +78,7 @@ if is_torch_available(): ...@@ -38,7 +78,7 @@ if is_torch_available():
load_tf_weights_in_funnel, load_tf_weights_in_funnel,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_funnel import ( from .modeling_tf_funnel import (
TF_FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST, TF_FUNNEL_PRETRAINED_MODEL_ARCHIVE_LIST,
TFFunnelBaseModel, TFFunnelBaseModel,
...@@ -50,3 +90,21 @@ if is_tf_available(): ...@@ -50,3 +90,21 @@ if is_tf_available():
TFFunnelForTokenClassification, TFFunnelForTokenClassification,
TFFunnelModel, TFFunnelModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,15 +16,50 @@ ...@@ -16,15 +16,50 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_gpt2 import GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP, GPT2Config
from .tokenization_gpt2 import GPT2Tokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_gpt2": ["GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP", "GPT2Config"],
"tokenization_gpt2": ["GPT2Tokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_gpt2_fast import GPT2TokenizerFast _import_structure["tokenization_gpt2_fast"] = ["GPT2TokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_gpt2"] = [
"GPT2_PRETRAINED_MODEL_ARCHIVE_LIST",
"GPT2DoubleHeadsModel",
"GPT2ForSequenceClassification",
"GPT2LMHeadModel",
"GPT2Model",
"GPT2PreTrainedModel",
"load_tf_weights_in_gpt2",
]
if is_tf_available():
_import_structure["modeling_tf_gpt2"] = [
"TF_GPT2_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFGPT2DoubleHeadsModel",
"TFGPT2ForSequenceClassification",
"TFGPT2LMHeadModel",
"TFGPT2MainLayer",
"TFGPT2Model",
"TFGPT2PreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_gpt2 import GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP, GPT2Config
from .tokenization_gpt2 import GPT2Tokenizer
if is_tokenizers_available():
from .tokenization_gpt2_fast import GPT2TokenizerFast
if is_torch_available():
from .modeling_gpt2 import ( from .modeling_gpt2 import (
GPT2_PRETRAINED_MODEL_ARCHIVE_LIST, GPT2_PRETRAINED_MODEL_ARCHIVE_LIST,
GPT2DoubleHeadsModel, GPT2DoubleHeadsModel,
...@@ -35,7 +70,7 @@ if is_torch_available(): ...@@ -35,7 +70,7 @@ if is_torch_available():
load_tf_weights_in_gpt2, load_tf_weights_in_gpt2,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_gpt2 import ( from .modeling_tf_gpt2 import (
TF_GPT2_PRETRAINED_MODEL_ARCHIVE_LIST, TF_GPT2_PRETRAINED_MODEL_ARCHIVE_LIST,
TFGPT2DoubleHeadsModel, TFGPT2DoubleHeadsModel,
...@@ -45,3 +80,21 @@ if is_tf_available(): ...@@ -45,3 +80,21 @@ if is_tf_available():
TFGPT2Model, TFGPT2Model,
TFGPT2PreTrainedModel, TFGPT2PreTrainedModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,9 +16,39 @@ ...@@ -16,9 +16,39 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tokenizers_available from typing import TYPE_CHECKING
from .tokenization_herbert import HerbertTokenizer
from ...file_utils import _BaseLazyModule, is_tokenizers_available
_import_structure = {
"tokenization_herbert": ["HerbertTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
_import_structure["tokenization_herbert_fast"] = ["HerbertTokenizerFast"]
if TYPE_CHECKING:
from .tokenization_herbert import HerbertTokenizer
if is_tokenizers_available():
from .tokenization_herbert_fast import HerbertTokenizerFast from .tokenization_herbert_fast import HerbertTokenizerFast
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,18 +16,57 @@ ...@@ -16,18 +16,57 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_layoutlm import LAYOUTLM_PRETRAINED_CONFIG_ARCHIVE_MAP, LayoutLMConfig
from .tokenization_layoutlm import LayoutLMTokenizer
from ...file_utils import _BaseLazyModule, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_layoutlm": ["LAYOUTLM_PRETRAINED_CONFIG_ARCHIVE_MAP", "LayoutLMConfig"],
"tokenization_layoutlm": ["LayoutLMTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_layoutlm_fast import LayoutLMTokenizerFast _import_structure["tokenization_layoutlm_fast"] = ["LayoutLMTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_layoutlm"] = [
"LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST",
"LayoutLMForMaskedLM",
"LayoutLMForTokenClassification",
"LayoutLMModel",
]
if TYPE_CHECKING:
from .configuration_layoutlm import LAYOUTLM_PRETRAINED_CONFIG_ARCHIVE_MAP, LayoutLMConfig
from .tokenization_layoutlm import LayoutLMTokenizer
if is_tokenizers_available():
from .tokenization_layoutlm_fast import LayoutLMTokenizerFast
if is_torch_available():
from .modeling_layoutlm import ( from .modeling_layoutlm import (
LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST, LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST,
LayoutLMForMaskedLM, LayoutLMForMaskedLM,
LayoutLMForTokenClassification, LayoutLMForTokenClassification,
LayoutLMModel, LayoutLMModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -15,15 +15,42 @@ ...@@ -15,15 +15,42 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_led import LED_PRETRAINED_CONFIG_ARCHIVE_MAP, LEDConfig
from .tokenization_led import LEDTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_led": ["LED_PRETRAINED_CONFIG_ARCHIVE_MAP", "LEDConfig"],
"tokenization_led": ["LEDTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_led_fast import LEDTokenizerFast _import_structure["tokenization_led_fast"] = ["LEDTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_led"] = [
"LED_PRETRAINED_MODEL_ARCHIVE_LIST",
"LEDForConditionalGeneration",
"LEDForQuestionAnswering",
"LEDForSequenceClassification",
"LEDModel",
"LEDPreTrainedModel",
]
if is_tf_available():
_import_structure["modeling_tf_led"] = ["TFLEDForConditionalGeneration", "TFLEDModel", "TFLEDPreTrainedModel"]
if TYPE_CHECKING:
from .configuration_led import LED_PRETRAINED_CONFIG_ARCHIVE_MAP, LEDConfig
from .tokenization_led import LEDTokenizer
if is_tokenizers_available():
from .tokenization_led_fast import LEDTokenizerFast
if is_torch_available():
from .modeling_led import ( from .modeling_led import (
LED_PRETRAINED_MODEL_ARCHIVE_LIST, LED_PRETRAINED_MODEL_ARCHIVE_LIST,
LEDForConditionalGeneration, LEDForConditionalGeneration,
...@@ -33,6 +60,23 @@ if is_torch_available(): ...@@ -33,6 +60,23 @@ if is_torch_available():
LEDPreTrainedModel, LEDPreTrainedModel,
) )
if is_tf_available():
if is_tf_available():
from .modeling_tf_led import TFLEDForConditionalGeneration, TFLEDModel, TFLEDPreTrainedModel from .modeling_tf_led import TFLEDForConditionalGeneration, TFLEDModel, TFLEDPreTrainedModel
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,15 +16,52 @@ ...@@ -16,15 +16,52 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_longformer import LONGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, LongformerConfig
from .tokenization_longformer import LongformerTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_longformer": ["LONGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "LongformerConfig"],
"tokenization_longformer": ["LongformerTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_longformer_fast import LongformerTokenizerFast _import_structure["tokenization_longformer_fast"] = ["LongformerTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_longformer"] = [
"LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"LongformerForMaskedLM",
"LongformerForMultipleChoice",
"LongformerForQuestionAnswering",
"LongformerForSequenceClassification",
"LongformerForTokenClassification",
"LongformerModel",
"LongformerSelfAttention",
]
if is_tf_available():
_import_structure["modeling_tf_longformer"] = [
"TF_LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFLongformerForMaskedLM",
"TFLongformerForMultipleChoice",
"TFLongformerForQuestionAnswering",
"TFLongformerForSequenceClassification",
"TFLongformerForTokenClassification",
"TFLongformerModel",
"TFLongformerSelfAttention",
]
if TYPE_CHECKING:
from .configuration_longformer import LONGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, LongformerConfig
from .tokenization_longformer import LongformerTokenizer
if is_tokenizers_available():
from .tokenization_longformer_fast import LongformerTokenizerFast
if is_torch_available():
from .modeling_longformer import ( from .modeling_longformer import (
LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
LongformerForMaskedLM, LongformerForMaskedLM,
...@@ -36,7 +73,7 @@ if is_torch_available(): ...@@ -36,7 +73,7 @@ if is_torch_available():
LongformerSelfAttention, LongformerSelfAttention,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_longformer import ( from .modeling_tf_longformer import (
TF_LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, TF_LONGFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
TFLongformerForMaskedLM, TFLongformerForMaskedLM,
...@@ -47,3 +84,21 @@ if is_tf_available(): ...@@ -47,3 +84,21 @@ if is_tf_available():
TFLongformerModel, TFLongformerModel,
TFLongformerSelfAttention, TFLongformerSelfAttention,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,15 +16,49 @@ ...@@ -16,15 +16,49 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_lxmert import LXMERT_PRETRAINED_CONFIG_ARCHIVE_MAP, LxmertConfig
from .tokenization_lxmert import LxmertTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_lxmert": ["LXMERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "LxmertConfig"],
"tokenization_lxmert": ["LxmertTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_lxmert_fast import LxmertTokenizerFast _import_structure["tokenization_lxmert_fast"] = ["LxmertTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_lxmert"] = [
"LxmertEncoder",
"LxmertForPreTraining",
"LxmertForQuestionAnswering",
"LxmertModel",
"LxmertPreTrainedModel",
"LxmertVisualFeatureEncoder",
"LxmertXLayer",
]
if is_tf_available():
_import_structure["modeling_tf_lxmert"] = [
"TF_LXMERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFLxmertForPreTraining",
"TFLxmertMainLayer",
"TFLxmertModel",
"TFLxmertPreTrainedModel",
"TFLxmertVisualFeatureEncoder",
]
if TYPE_CHECKING:
from .configuration_lxmert import LXMERT_PRETRAINED_CONFIG_ARCHIVE_MAP, LxmertConfig
from .tokenization_lxmert import LxmertTokenizer
if is_tokenizers_available():
from .tokenization_lxmert_fast import LxmertTokenizerFast
if is_torch_available():
from .modeling_lxmert import ( from .modeling_lxmert import (
LxmertEncoder, LxmertEncoder,
LxmertForPreTraining, LxmertForPreTraining,
...@@ -35,7 +69,7 @@ if is_torch_available(): ...@@ -35,7 +69,7 @@ if is_torch_available():
LxmertXLayer, LxmertXLayer,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_lxmert import ( from .modeling_tf_lxmert import (
TF_LXMERT_PRETRAINED_MODEL_ARCHIVE_LIST, TF_LXMERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFLxmertForPreTraining, TFLxmertForPreTraining,
...@@ -44,3 +78,21 @@ if is_tf_available(): ...@@ -44,3 +78,21 @@ if is_tf_available():
TFLxmertPreTrainedModel, TFLxmertPreTrainedModel,
TFLxmertVisualFeatureEncoder, TFLxmertVisualFeatureEncoder,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -15,14 +15,43 @@ ...@@ -15,14 +15,43 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_marian import MARIAN_PRETRAINED_CONFIG_ARCHIVE_MAP, MarianConfig
from ...file_utils import (
_BaseLazyModule,
is_sentencepiece_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = {
"configuration_marian": ["MARIAN_PRETRAINED_CONFIG_ARCHIVE_MAP", "MarianConfig"],
}
if is_sentencepiece_available(): if is_sentencepiece_available():
from .tokenization_marian import MarianTokenizer _import_structure["tokenization_marian"] = ["MarianTokenizer"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_marian"] = [
"MARIAN_PRETRAINED_MODEL_ARCHIVE_LIST",
"MarianModel",
"MarianMTModel",
"MarianPreTrainedModel",
]
if is_tf_available():
_import_structure["modeling_tf_marian"] = ["TFMarianMTModel"]
if TYPE_CHECKING:
from .configuration_marian import MARIAN_PRETRAINED_CONFIG_ARCHIVE_MAP, MarianConfig
if is_sentencepiece_available():
from .tokenization_marian import MarianTokenizer
if is_torch_available():
from .modeling_marian import ( from .modeling_marian import (
MARIAN_PRETRAINED_MODEL_ARCHIVE_LIST, MARIAN_PRETRAINED_MODEL_ARCHIVE_LIST,
MarianModel, MarianModel,
...@@ -30,5 +59,23 @@ if is_torch_available(): ...@@ -30,5 +59,23 @@ if is_torch_available():
MarianPreTrainedModel, MarianPreTrainedModel,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_marian import TFMarianMTModel from .modeling_tf_marian import TFMarianMTModel
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -15,17 +15,51 @@ ...@@ -15,17 +15,51 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_mbart import MBART_PRETRAINED_CONFIG_ARCHIVE_MAP, MBartConfig
from ...file_utils import (
_BaseLazyModule,
is_sentencepiece_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = {
"configuration_mbart": ["MBART_PRETRAINED_CONFIG_ARCHIVE_MAP", "MBartConfig"],
}
if is_sentencepiece_available(): if is_sentencepiece_available():
from .tokenization_mbart import MBartTokenizer _import_structure["tokenization_mbart"] = ["MBartTokenizer"]
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_mbart_fast import MBartTokenizerFast _import_structure["tokenization_mbart_fast"] = ["MBartTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_mbart"] = [
"MBART_PRETRAINED_MODEL_ARCHIVE_LIST",
"MBartForConditionalGeneration",
"MBartForQuestionAnswering",
"MBartForSequenceClassification",
"MBartModel",
"MBartPreTrainedModel",
]
if is_tf_available():
_import_structure["modeling_tf_mbart"] = ["TFMBartForConditionalGeneration"]
if TYPE_CHECKING:
from .configuration_mbart import MBART_PRETRAINED_CONFIG_ARCHIVE_MAP, MBartConfig
if is_sentencepiece_available():
from .tokenization_mbart import MBartTokenizer
if is_tokenizers_available():
from .tokenization_mbart_fast import MBartTokenizerFast
if is_torch_available():
from .modeling_mbart import ( from .modeling_mbart import (
MBART_PRETRAINED_MODEL_ARCHIVE_LIST, MBART_PRETRAINED_MODEL_ARCHIVE_LIST,
MBartForConditionalGeneration, MBartForConditionalGeneration,
...@@ -35,5 +69,23 @@ if is_torch_available(): ...@@ -35,5 +69,23 @@ if is_torch_available():
MBartPreTrainedModel, MBartPreTrainedModel,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_mbart import TFMBartForConditionalGeneration from .modeling_tf_mbart import TFMBartForConditionalGeneration
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,9 +16,39 @@ ...@@ -16,9 +16,39 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_torch_available from typing import TYPE_CHECKING
from .configuration_mmbt import MMBTConfig
from ...file_utils import _BaseLazyModule, is_torch_available
_import_structure = {
"configuration_mmbt": ["MMBTConfig"],
}
if is_torch_available(): if is_torch_available():
_import_structure["modeling_mmbt"] = ["MMBTForClassification", "MMBTModel", "ModalEmbeddings"]
if TYPE_CHECKING:
from .configuration_mmbt import MMBTConfig
if is_torch_available():
from .modeling_mmbt import MMBTForClassification, MMBTModel, ModalEmbeddings from .modeling_mmbt import MMBTForClassification, MMBTModel, ModalEmbeddings
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,15 +16,59 @@ ...@@ -16,15 +16,59 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_mobilebert import MOBILEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, MobileBertConfig
from .tokenization_mobilebert import MobileBertTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_mobilebert": ["MOBILEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "MobileBertConfig"],
"tokenization_mobilebert": ["MobileBertTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_mobilebert_fast import MobileBertTokenizerFast _import_structure["tokenization_mobilebert_fast"] = ["MobileBertTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_mobilebert"] = [
"MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"MobileBertForMaskedLM",
"MobileBertForMultipleChoice",
"MobileBertForNextSentencePrediction",
"MobileBertForPreTraining",
"MobileBertForQuestionAnswering",
"MobileBertForSequenceClassification",
"MobileBertForTokenClassification",
"MobileBertLayer",
"MobileBertModel",
"MobileBertPreTrainedModel",
"load_tf_weights_in_mobilebert",
]
if is_tf_available():
_import_structure["modeling_tf_mobilebert"] = [
"TF_MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFMobileBertForMaskedLM",
"TFMobileBertForMultipleChoice",
"TFMobileBertForNextSentencePrediction",
"TFMobileBertForPreTraining",
"TFMobileBertForQuestionAnswering",
"TFMobileBertForSequenceClassification",
"TFMobileBertForTokenClassification",
"TFMobileBertMainLayer",
"TFMobileBertModel",
"TFMobileBertPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_mobilebert import MOBILEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, MobileBertConfig
from .tokenization_mobilebert import MobileBertTokenizer
if is_tokenizers_available():
from .tokenization_mobilebert_fast import MobileBertTokenizerFast
if is_torch_available():
from .modeling_mobilebert import ( from .modeling_mobilebert import (
MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST, MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
MobileBertForMaskedLM, MobileBertForMaskedLM,
...@@ -40,7 +84,7 @@ if is_torch_available(): ...@@ -40,7 +84,7 @@ if is_torch_available():
load_tf_weights_in_mobilebert, load_tf_weights_in_mobilebert,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_mobilebert import ( from .modeling_tf_mobilebert import (
TF_MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST, TF_MOBILEBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFMobileBertForMaskedLM, TFMobileBertForMaskedLM,
...@@ -54,3 +98,21 @@ if is_tf_available(): ...@@ -54,3 +98,21 @@ if is_tf_available():
TFMobileBertModel, TFMobileBertModel,
TFMobileBertPreTrainedModel, TFMobileBertPreTrainedModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,15 +16,61 @@ ...@@ -16,15 +16,61 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_mpnet import MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP, MPNetConfig
from .tokenization_mpnet import MPNetTokenizer
from ...file_utils import (
_BaseLazyModule,
is_flax_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = {
"configuration_mpnet": ["MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "MPNetConfig"],
"tokenization_mpnet": ["MPNetTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_mpnet_fast import MPNetTokenizerFast _import_structure["tokenization_mpnet_fast"] = ["MPNetTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_mpnet"] = [
"MPNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"MPNetForMaskedLM",
"MPNetForMultipleChoice",
"MPNetForQuestionAnswering",
"MPNetForSequenceClassification",
"MPNetForTokenClassification",
"MPNetLayer",
"MPNetModel",
"MPNetPreTrainedModel",
]
if is_tf_available():
_import_structure["modeling_tf_mpnet"] = [
"TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFMPNetEmbeddings",
"TFMPNetForMaskedLM",
"TFMPNetForMultipleChoice",
"TFMPNetForQuestionAnswering",
"TFMPNetForSequenceClassification",
"TFMPNetForTokenClassification",
"TFMPNetMainLayer",
"TFMPNetModel",
"TFMPNetPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_mpnet import MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP, MPNetConfig
from .tokenization_mpnet import MPNetTokenizer
if is_tokenizers_available():
from .tokenization_mpnet_fast import MPNetTokenizerFast
if is_torch_available():
from .modeling_mpnet import ( from .modeling_mpnet import (
MPNET_PRETRAINED_MODEL_ARCHIVE_LIST, MPNET_PRETRAINED_MODEL_ARCHIVE_LIST,
MPNetForMaskedLM, MPNetForMaskedLM,
...@@ -37,7 +83,7 @@ if is_torch_available(): ...@@ -37,7 +83,7 @@ if is_torch_available():
MPNetPreTrainedModel, MPNetPreTrainedModel,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_mpnet import ( from .modeling_tf_mpnet import (
TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST, TF_MPNET_PRETRAINED_MODEL_ARCHIVE_LIST,
TFMPNetEmbeddings, TFMPNetEmbeddings,
...@@ -50,3 +96,21 @@ if is_tf_available(): ...@@ -50,3 +96,21 @@ if is_tf_available():
TFMPNetModel, TFMPNetModel,
TFMPNetPreTrainedModel, TFMPNetPreTrainedModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,8 +16,15 @@ ...@@ -16,8 +16,15 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_mt5 import MT5Config
from ...file_utils import (
_BaseLazyModule,
is_sentencepiece_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
if is_sentencepiece_available(): if is_sentencepiece_available():
...@@ -30,8 +37,58 @@ if is_tokenizers_available(): ...@@ -30,8 +37,58 @@ if is_tokenizers_available():
MT5TokenizerFast = T5TokenizerFast MT5TokenizerFast = T5TokenizerFast
_import_structure = {
"configuration_mt5": ["MT5Config"],
}
if is_torch_available(): if is_torch_available():
from .modeling_mt5 import MT5EncoderModel, MT5ForConditionalGeneration, MT5Model _import_structure["modeling_mt5"] = ["MT5EncoderModel", "MT5ForConditionalGeneration", "MT5Model"]
if is_tf_available(): if is_tf_available():
_import_structure["modeling_tf_mt5"] = ["TFMT5EncoderModel", "TFMT5ForConditionalGeneration", "TFMT5Model"]
if TYPE_CHECKING:
from .configuration_mt5 import MT5Config
if is_sentencepiece_available():
from ..t5.tokenization_t5 import T5Tokenizer
MT5Tokenizer = T5Tokenizer
if is_tokenizers_available():
from ..t5.tokenization_t5_fast import T5TokenizerFast
MT5TokenizerFast = T5TokenizerFast
if is_torch_available():
from .modeling_mt5 import MT5EncoderModel, MT5ForConditionalGeneration, MT5Model
if is_tf_available():
from .modeling_tf_mt5 import TFMT5EncoderModel, TFMT5ForConditionalGeneration, TFMT5Model from .modeling_tf_mt5 import TFMT5EncoderModel, TFMT5ForConditionalGeneration, TFMT5Model
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
def __getattr__(self, name):
if name == "MT5Tokenizer":
return MT5Tokenizer
elif name == name == "MT5TokenizerFast":
return MT5TokenizerFast
else:
return super().__getattr__(name)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,15 +16,50 @@ ...@@ -16,15 +16,50 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_openai import OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP, OpenAIGPTConfig
from .tokenization_openai import OpenAIGPTTokenizer
from ...file_utils import _BaseLazyModule, is_tf_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_openai": ["OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP", "OpenAIGPTConfig"],
"tokenization_openai": ["OpenAIGPTTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_openai_fast import OpenAIGPTTokenizerFast _import_structure["tokenization_openai_fast"] = ["OpenAIGPTTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_openai"] = [
"OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST",
"OpenAIGPTDoubleHeadsModel",
"OpenAIGPTForSequenceClassification",
"OpenAIGPTLMHeadModel",
"OpenAIGPTModel",
"OpenAIGPTPreTrainedModel",
"load_tf_weights_in_openai_gpt",
]
if is_tf_available():
_import_structure["modeling_tf_openai"] = [
"TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFOpenAIGPTDoubleHeadsModel",
"TFOpenAIGPTForSequenceClassification",
"TFOpenAIGPTLMHeadModel",
"TFOpenAIGPTMainLayer",
"TFOpenAIGPTModel",
"TFOpenAIGPTPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_openai import OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP, OpenAIGPTConfig
from .tokenization_openai import OpenAIGPTTokenizer
if is_tokenizers_available():
from .tokenization_openai_fast import OpenAIGPTTokenizerFast
if is_torch_available():
from .modeling_openai import ( from .modeling_openai import (
OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST, OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST,
OpenAIGPTDoubleHeadsModel, OpenAIGPTDoubleHeadsModel,
...@@ -35,7 +70,7 @@ if is_torch_available(): ...@@ -35,7 +70,7 @@ if is_torch_available():
load_tf_weights_in_openai_gpt, load_tf_weights_in_openai_gpt,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_openai import ( from .modeling_tf_openai import (
TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST, TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFOpenAIGPTDoubleHeadsModel, TFOpenAIGPTDoubleHeadsModel,
...@@ -45,3 +80,21 @@ if is_tf_available(): ...@@ -45,3 +80,21 @@ if is_tf_available():
TFOpenAIGPTModel, TFOpenAIGPTModel,
TFOpenAIGPTPreTrainedModel, TFOpenAIGPTPreTrainedModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -15,17 +15,49 @@ ...@@ -15,17 +15,49 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_sentencepiece_available, is_tf_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_pegasus import PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP, PegasusConfig
from ...file_utils import (
_BaseLazyModule,
is_sentencepiece_available,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = {
"configuration_pegasus": ["PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP", "PegasusConfig"],
}
if is_sentencepiece_available(): if is_sentencepiece_available():
from .tokenization_pegasus import PegasusTokenizer _import_structure["tokenization_pegasus"] = ["PegasusTokenizer"]
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_pegasus_fast import PegasusTokenizerFast _import_structure["tokenization_pegasus_fast"] = ["PegasusTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_pegasus"] = [
"PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST",
"PegasusForConditionalGeneration",
"PegasusModel",
"PegasusPreTrainedModel",
]
if is_tf_available():
_import_structure["modeling_tf_pegasus"] = ["TFPegasusForConditionalGeneration"]
if TYPE_CHECKING:
from .configuration_pegasus import PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP, PegasusConfig
if is_sentencepiece_available():
from .tokenization_pegasus import PegasusTokenizer
if is_tokenizers_available():
from .tokenization_pegasus_fast import PegasusTokenizerFast
if is_torch_available():
from .modeling_pegasus import ( from .modeling_pegasus import (
PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST, PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST,
PegasusForConditionalGeneration, PegasusForConditionalGeneration,
...@@ -33,5 +65,23 @@ if is_torch_available(): ...@@ -33,5 +65,23 @@ if is_torch_available():
PegasusPreTrainedModel, PegasusPreTrainedModel,
) )
if is_tf_available(): if is_tf_available():
from .modeling_tf_pegasus import TFPegasusForConditionalGeneration from .modeling_tf_pegasus import TFPegasusForConditionalGeneration
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,4 +16,33 @@ ...@@ -16,4 +16,33 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from .tokenization_phobert import PhobertTokenizer from typing import TYPE_CHECKING
from ...file_utils import _BaseLazyModule
_import_structure = {
"tokenization_phobert": ["PhobertTokenizer"],
}
if TYPE_CHECKING:
from .tokenization_phobert import PhobertTokenizer
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,12 +16,33 @@ ...@@ -16,12 +16,33 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_torch_available from typing import TYPE_CHECKING
from .configuration_prophetnet import PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP, ProphetNetConfig
from .tokenization_prophetnet import ProphetNetTokenizer
from ...file_utils import _BaseLazyModule, is_torch_available
_import_structure = {
"configuration_prophetnet": ["PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP", "ProphetNetConfig"],
"tokenization_prophetnet": ["ProphetNetTokenizer"],
}
if is_torch_available(): if is_torch_available():
_import_structure["modeling_prophetnet"] = [
"PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST",
"ProphetNetDecoder",
"ProphetNetEncoder",
"ProphetNetForCausalLM",
"ProphetNetForConditionalGeneration",
"ProphetNetModel",
"ProphetNetPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_prophetnet import PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP, ProphetNetConfig
from .tokenization_prophetnet import ProphetNetTokenizer
if is_torch_available():
from .modeling_prophetnet import ( from .modeling_prophetnet import (
PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST, PROPHETNET_PRETRAINED_MODEL_ARCHIVE_LIST,
ProphetNetDecoder, ProphetNetDecoder,
...@@ -31,3 +52,21 @@ if is_torch_available(): ...@@ -31,3 +52,21 @@ if is_torch_available():
ProphetNetModel, ProphetNetModel,
ProphetNetPreTrainedModel, ProphetNetPreTrainedModel,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,11 +16,43 @@ ...@@ -16,11 +16,43 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_torch_available from typing import TYPE_CHECKING
from .configuration_rag import RagConfig
from .retrieval_rag import RagRetriever
from .tokenization_rag import RagTokenizer
from ...file_utils import _BaseLazyModule, is_torch_available
_import_structure = {
"configuration_rag": ["RagConfig"],
"retrieval_rag": ["RagRetriever"],
"tokenization_rag": ["RagTokenizer"],
}
if is_torch_available(): if is_torch_available():
_import_structure["modeling_rag"] = ["RagModel", "RagSequenceForGeneration", "RagTokenForGeneration"]
if TYPE_CHECKING:
from .configuration_rag import RagConfig
from .retrieval_rag import RagRetriever
from .tokenization_rag import RagTokenizer
if is_torch_available():
from .modeling_rag import RagModel, RagSequenceForGeneration, RagTokenForGeneration from .modeling_rag import RagModel, RagSequenceForGeneration, RagTokenForGeneration
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,17 +16,44 @@ ...@@ -16,17 +16,44 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_sentencepiece_available, is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_reformer import REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, ReformerConfig
from ...file_utils import _BaseLazyModule, is_sentencepiece_available, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_reformer": ["REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "ReformerConfig"],
}
if is_sentencepiece_available(): if is_sentencepiece_available():
from .tokenization_reformer import ReformerTokenizer _import_structure["tokenization_reformer"] = ["ReformerTokenizer"]
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_reformer_fast import ReformerTokenizerFast _import_structure["tokenization_reformer_fast"] = ["ReformerTokenizerFast"]
if is_torch_available(): if is_torch_available():
_import_structure["modeling_reformer"] = [
"REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST",
"ReformerAttention",
"ReformerForMaskedLM",
"ReformerForQuestionAnswering",
"ReformerForSequenceClassification",
"ReformerLayer",
"ReformerModel",
"ReformerModelWithLMHead",
]
if TYPE_CHECKING:
from .configuration_reformer import REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, ReformerConfig
if is_sentencepiece_available():
from .tokenization_reformer import ReformerTokenizer
if is_tokenizers_available():
from .tokenization_reformer_fast import ReformerTokenizerFast
if is_torch_available():
from .modeling_reformer import ( from .modeling_reformer import (
REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,
ReformerAttention, ReformerAttention,
...@@ -37,3 +64,21 @@ if is_torch_available(): ...@@ -37,3 +64,21 @@ if is_torch_available():
ReformerModel, ReformerModel,
ReformerModelWithLMHead, ReformerModelWithLMHead,
) )
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
...@@ -16,13 +16,55 @@ ...@@ -16,13 +16,55 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from ...file_utils import is_tokenizers_available, is_torch_available from typing import TYPE_CHECKING
from .configuration_retribert import RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, RetriBertConfig
from .tokenization_retribert import RetriBertTokenizer
from ...file_utils import _BaseLazyModule, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_retribert": ["RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "RetriBertConfig"],
"tokenization_retribert": ["RetriBertTokenizer"],
}
if is_tokenizers_available(): if is_tokenizers_available():
from .tokenization_retribert_fast import RetriBertTokenizerFast _import_structure["tokenization_retribert_fast"] = ["RetriBertTokenizerFast"]
if is_torch_available(): if is_torch_available():
from .modeling_retribert import RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST, RetriBertModel, RetriBertPreTrainedModel _import_structure["modeling_retribert"] = [
"RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST",
"RetriBertModel",
"RetriBertPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_retribert import RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, RetriBertConfig
from .tokenization_retribert import RetriBertTokenizer
if is_tokenizers_available():
from .tokenization_retribert_fast import RetriBertTokenizerFast
if is_torch_available():
from .modeling_retribert import (
RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
RetriBertModel,
RetriBertPreTrainedModel,
)
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment