Unverified Commit 053efc5d authored by Sylvain Gugger's avatar Sylvain Gugger Committed by GitHub
Browse files

Fix imports in conversion scripts (#9674)

parent 2390c16f
......@@ -19,6 +19,8 @@ import argparse
import torch
from transformers import ProphetNetForConditionalGeneration, XLMProphetNetForConditionalGeneration, logging
# transformers_old should correspond to branch `save_old_prophetnet_model_structure` here
# original prophetnet_checkpoints are saved under `patrickvonplaten/..._old` respectively
from transformers_old.modeling_prophetnet import (
......@@ -28,8 +30,6 @@ from transformers_old.modeling_xlm_prophetnet import (
XLMProphetNetForConditionalGeneration as XLMProphetNetForConditionalGenerationOld,
)
from . import ProphetNetForConditionalGeneration, XLMProphetNetForConditionalGeneration, logging
logger = logging.get_logger(__name__)
logging.set_verbosity_info()
......
......@@ -21,8 +21,8 @@ import pickle
import numpy as np
import torch
from ...utils import logging
from . import ReformerConfig, ReformerModelWithLMHead
from transformers import ReformerConfig, ReformerModelWithLMHead
from transformers.utils import logging
logging.set_verbosity_info()
......
......@@ -24,9 +24,15 @@ from fairseq.models.roberta import RobertaModel as FairseqRobertaModel
from fairseq.modules import TransformerSentenceEncoderLayer
from packaging import version
from ...models.bert.modeling_bert import BertIntermediate, BertLayer, BertOutput, BertSelfAttention, BertSelfOutput
from ...utils import logging
from .modeling_roberta import RobertaConfig, RobertaForMaskedLM, RobertaForSequenceClassification
from transformers import RobertaConfig, RobertaForMaskedLM, RobertaForSequenceClassification
from transformers.models.bert.modeling_bert import (
BertIntermediate,
BertLayer,
BertOutput,
BertSelfAttention,
BertSelfOutput,
)
from transformers.utils import logging
if version.parse(fairseq.__version__) < version.parse("0.9.0"):
......
......@@ -17,8 +17,8 @@
import argparse
from ...utils import logging
from . import T5Config, T5ForConditionalGeneration, load_tf_weights_in_t5
from transformers import T5Config, T5ForConditionalGeneration, load_tf_weights_in_t5
from transformers.utils import logging
logging.set_verbosity_info()
......
......@@ -17,8 +17,7 @@
import argparse
from ...utils import logging
from . import (
from transformers import (
TapasConfig,
TapasForMaskedLM,
TapasForQuestionAnswering,
......@@ -27,6 +26,7 @@ from . import (
TapasTokenizer,
load_tf_weights_in_tapas,
)
from transformers.utils import logging
logging.set_verbosity_info()
......
......@@ -22,11 +22,11 @@ import sys
import torch
from ...file_utils import CONFIG_NAME, WEIGHTS_NAME
from ...utils import logging
from . import TransfoXLConfig, TransfoXLLMHeadModel, load_tf_weights_in_transfo_xl
from . import tokenization_transfo_xl as data_utils
from .tokenization_transfo_xl import CORPUS_NAME, VOCAB_FILES_NAMES
from transformers import TransfoXLConfig, TransfoXLLMHeadModel, load_tf_weights_in_transfo_xl
from transformers.file_utils import CONFIG_NAME, WEIGHTS_NAME
from transformers.models.transfo_xl import tokenization_transfo_xl as data_utils
from transformers.models.transfo_xl.tokenization_transfo_xl import CORPUS_NAME, VOCAB_FILES_NAMES
from transformers.utils import logging
logging.set_verbosity_info()
......
......@@ -21,9 +21,9 @@ import json
import numpy
import torch
from ...file_utils import CONFIG_NAME, WEIGHTS_NAME
from ...utils import logging
from .tokenization_xlm import VOCAB_FILES_NAMES
from transformers.file_utils import CONFIG_NAME, WEIGHTS_NAME
from transformers.models.xlm.tokenization_xlm import VOCAB_FILES_NAMES
from transformers.utils import logging
logging.set_verbosity_info()
......
......@@ -20,15 +20,15 @@ import os
import torch
from ...file_utils import CONFIG_NAME, WEIGHTS_NAME
from ...utils import logging
from . import (
from transformers import (
XLNetConfig,
XLNetForQuestionAnswering,
XLNetForSequenceClassification,
XLNetLMHeadModel,
load_tf_weights_in_xlnet,
)
from transformers.file_utils import CONFIG_NAME, WEIGHTS_NAME
from transformers.utils import logging
GLUE_TASKS_NUM_LABELS = {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment