Unverified Commit 018d61fa authored by Andrés Felipe Cruz's avatar Andrés Felipe Cruz Committed by GitHub
Browse files

Moving transformers package import statements to relative imports in some files (#5796)



* Moving rom transformers statements to relative imports in some files under src/

* Import order
Co-authored-by: default avatarLysandre Debut <lysandre@huggingface.co>
parent 7214954d
...@@ -69,7 +69,7 @@ class EncoderDecoderConfig(PretrainedConfig): ...@@ -69,7 +69,7 @@ class EncoderDecoderConfig(PretrainedConfig):
decoder_config = kwargs.pop("decoder") decoder_config = kwargs.pop("decoder")
decoder_model_type = decoder_config.pop("model_type") decoder_model_type = decoder_config.pop("model_type")
from transformers import AutoConfig from .configuration_auto import AutoConfig
self.encoder = AutoConfig.for_model(encoder_model_type, **encoder_config) self.encoder = AutoConfig.for_model(encoder_model_type, **encoder_config)
self.decoder = AutoConfig.for_model(decoder_model_type, **decoder_config) self.decoder = AutoConfig.for_model(decoder_model_type, **decoder_config)
......
...@@ -56,12 +56,12 @@ class EncoderDecoderModel(PreTrainedModel): ...@@ -56,12 +56,12 @@ class EncoderDecoderModel(PreTrainedModel):
super().__init__(config) super().__init__(config)
if encoder is None: if encoder is None:
from transformers import AutoModel from .modeling_auto import AutoModel
encoder = AutoModel.from_config(config.encoder) encoder = AutoModel.from_config(config.encoder)
if decoder is None: if decoder is None:
from transformers import AutoModelForCausalLM from .modeling_auto import AutoModelForCausalLM
decoder = AutoModelForCausalLM.from_config(config.decoder) decoder = AutoModelForCausalLM.from_config(config.decoder)
...@@ -159,7 +159,7 @@ class EncoderDecoderModel(PreTrainedModel): ...@@ -159,7 +159,7 @@ class EncoderDecoderModel(PreTrainedModel):
from .modeling_auto import AutoModelForCausalLM from .modeling_auto import AutoModelForCausalLM
if "config" not in kwargs_decoder: if "config" not in kwargs_decoder:
from transformers import AutoConfig from .configuration_auto import AutoConfig
decoder_config = AutoConfig.from_pretrained(decoder_pretrained_model_name_or_path) decoder_config = AutoConfig.from_pretrained(decoder_pretrained_model_name_or_path)
if decoder_config.is_decoder is False: if decoder_config.is_decoder is False:
......
...@@ -15,8 +15,8 @@ ...@@ -15,8 +15,8 @@
"""PyTorch MarianMTModel model, ported from the Marian C++ repo.""" """PyTorch MarianMTModel model, ported from the Marian C++ repo."""
from transformers.configuration_marian import MarianConfig from .configuration_marian import MarianConfig
from transformers.modeling_bart import BartForConditionalGeneration from .modeling_bart import BartForConditionalGeneration
MARIAN_PRETRAINED_MODEL_ARCHIVE_LIST = [ MARIAN_PRETRAINED_MODEL_ARCHIVE_LIST = [
......
...@@ -32,8 +32,6 @@ import torch.nn.functional as F ...@@ -32,8 +32,6 @@ import torch.nn.functional as F
from torch import nn from torch import nn
from torch.nn import CrossEntropyLoss, MSELoss from torch.nn import CrossEntropyLoss, MSELoss
from transformers.modeling_bert import BertIntermediate
from .activations import gelu, gelu_new, swish from .activations import gelu, gelu_new, swish
from .configuration_mobilebert import MobileBertConfig from .configuration_mobilebert import MobileBertConfig
from .file_utils import ( from .file_utils import (
...@@ -43,6 +41,7 @@ from .file_utils import ( ...@@ -43,6 +41,7 @@ from .file_utils import (
add_start_docstrings_to_callable, add_start_docstrings_to_callable,
replace_return_docstrings, replace_return_docstrings,
) )
from .modeling_bert import BertIntermediate
from .modeling_outputs import ( from .modeling_outputs import (
BaseModelOutput, BaseModelOutput,
BaseModelOutputWithPooling, BaseModelOutputWithPooling,
......
...@@ -2,8 +2,7 @@ import logging ...@@ -2,8 +2,7 @@ import logging
import tensorflow as tf import tensorflow as tf
from transformers import ElectraConfig from .configuration_electra import ElectraConfig
from .file_utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_callable from .file_utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_callable
from .modeling_tf_bert import ACT2FN, TFBertEncoder, TFBertPreTrainedModel from .modeling_tf_bert import ACT2FN, TFBertEncoder, TFBertPreTrainedModel
from .modeling_tf_utils import ( from .modeling_tf_utils import (
......
...@@ -2,7 +2,7 @@ import os ...@@ -2,7 +2,7 @@ import os
import unittest import unittest
from distutils.util import strtobool from distutils.util import strtobool
from transformers.file_utils import _tf_available, _torch_available, _torch_tpu_available from .file_utils import _tf_available, _torch_available, _torch_tpu_available
SMALL_MODEL_IDENTIFIER = "julien-c/bert-xsmall-dummy" SMALL_MODEL_IDENTIFIER = "julien-c/bert-xsmall-dummy"
......
...@@ -18,8 +18,6 @@ ...@@ -18,8 +18,6 @@
import logging import logging
from collections import OrderedDict from collections import OrderedDict
from transformers.configuration_mobilebert import MobileBertConfig
from .configuration_auto import ( from .configuration_auto import (
AlbertConfig, AlbertConfig,
AutoConfig, AutoConfig,
...@@ -44,6 +42,7 @@ from .configuration_auto import ( ...@@ -44,6 +42,7 @@ from .configuration_auto import (
XLNetConfig, XLNetConfig,
) )
from .configuration_marian import MarianConfig from .configuration_marian import MarianConfig
from .configuration_mobilebert import MobileBertConfig
from .configuration_utils import PretrainedConfig from .configuration_utils import PretrainedConfig
from .tokenization_albert import AlbertTokenizer from .tokenization_albert import AlbertTokenizer
from .tokenization_bart import BartTokenizer, MBartTokenizer from .tokenization_bart import BartTokenizer, MBartTokenizer
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment