"docs/source/vscode:/vscode.git/clone" did not exist on "331db8cc02359bb01d1dde539d7d4dae789cf82e"
Unverified Commit 018d61fa authored by Andrés Felipe Cruz's avatar Andrés Felipe Cruz Committed by GitHub
Browse files

Moving transformers package import statements to relative imports in some files (#5796)



* Moving rom transformers statements to relative imports in some files under src/

* Import order
Co-authored-by: default avatarLysandre Debut <lysandre@huggingface.co>
parent 7214954d
......@@ -69,7 +69,7 @@ class EncoderDecoderConfig(PretrainedConfig):
decoder_config = kwargs.pop("decoder")
decoder_model_type = decoder_config.pop("model_type")
from transformers import AutoConfig
from .configuration_auto import AutoConfig
self.encoder = AutoConfig.for_model(encoder_model_type, **encoder_config)
self.decoder = AutoConfig.for_model(decoder_model_type, **decoder_config)
......
......@@ -56,12 +56,12 @@ class EncoderDecoderModel(PreTrainedModel):
super().__init__(config)
if encoder is None:
from transformers import AutoModel
from .modeling_auto import AutoModel
encoder = AutoModel.from_config(config.encoder)
if decoder is None:
from transformers import AutoModelForCausalLM
from .modeling_auto import AutoModelForCausalLM
decoder = AutoModelForCausalLM.from_config(config.decoder)
......@@ -159,7 +159,7 @@ class EncoderDecoderModel(PreTrainedModel):
from .modeling_auto import AutoModelForCausalLM
if "config" not in kwargs_decoder:
from transformers import AutoConfig
from .configuration_auto import AutoConfig
decoder_config = AutoConfig.from_pretrained(decoder_pretrained_model_name_or_path)
if decoder_config.is_decoder is False:
......
......@@ -15,8 +15,8 @@
"""PyTorch MarianMTModel model, ported from the Marian C++ repo."""
from transformers.configuration_marian import MarianConfig
from transformers.modeling_bart import BartForConditionalGeneration
from .configuration_marian import MarianConfig
from .modeling_bart import BartForConditionalGeneration
MARIAN_PRETRAINED_MODEL_ARCHIVE_LIST = [
......
......@@ -32,8 +32,6 @@ import torch.nn.functional as F
from torch import nn
from torch.nn import CrossEntropyLoss, MSELoss
from transformers.modeling_bert import BertIntermediate
from .activations import gelu, gelu_new, swish
from .configuration_mobilebert import MobileBertConfig
from .file_utils import (
......@@ -43,6 +41,7 @@ from .file_utils import (
add_start_docstrings_to_callable,
replace_return_docstrings,
)
from .modeling_bert import BertIntermediate
from .modeling_outputs import (
BaseModelOutput,
BaseModelOutputWithPooling,
......
......@@ -2,8 +2,7 @@ import logging
import tensorflow as tf
from transformers import ElectraConfig
from .configuration_electra import ElectraConfig
from .file_utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_callable
from .modeling_tf_bert import ACT2FN, TFBertEncoder, TFBertPreTrainedModel
from .modeling_tf_utils import (
......
......@@ -2,7 +2,7 @@ import os
import unittest
from distutils.util import strtobool
from transformers.file_utils import _tf_available, _torch_available, _torch_tpu_available
from .file_utils import _tf_available, _torch_available, _torch_tpu_available
SMALL_MODEL_IDENTIFIER = "julien-c/bert-xsmall-dummy"
......
......@@ -18,8 +18,6 @@
import logging
from collections import OrderedDict
from transformers.configuration_mobilebert import MobileBertConfig
from .configuration_auto import (
AlbertConfig,
AutoConfig,
......@@ -44,6 +42,7 @@ from .configuration_auto import (
XLNetConfig,
)
from .configuration_marian import MarianConfig
from .configuration_mobilebert import MobileBertConfig
from .configuration_utils import PretrainedConfig
from .tokenization_albert import AlbertTokenizer
from .tokenization_bart import BartTokenizer, MBartTokenizer
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment