"src/vscode:/vscode.git/clone" did not exist on "4ff8a2925275b177aa56875931699239aaeaf644"
__init__.py 5.37 KB
Newer Older
thomwolf's avatar
thomwolf committed
1
__version__ = "1.2.0"
thomwolf's avatar
thomwolf committed
2

thomwolf's avatar
thomwolf committed
3
4
5
6
7
8
9
10
11
12
13
# Work around to update TensorFlow's absl.logging threshold which alters the
# default Python logging output behavior when present.
# see: https://github.com/abseil/abseil-py/issues/99
# and: https://github.com/tensorflow/tensorflow/issues/26691#issuecomment-500369493
try:
    import absl.logging
    absl.logging.set_verbosity('info')
    absl.logging.set_stderrthreshold('info')
    absl.logging._warn_preinit_stderr = False
except:
    pass
14

thomwolf's avatar
thomwolf committed
15
16
17
18
import logging

logger = logging.getLogger(__name__)  # pylint: disable=invalid-name

19
20
# Tokenizer
from .tokenization_utils import (PreTrainedTokenizer)
thomwolf's avatar
thomwolf committed
21
from .tokenization_auto import AutoTokenizer
thomwolf's avatar
thomwolf committed
22
from .tokenization_bert import BertTokenizer, BasicTokenizer, WordpieceTokenizer
thomwolf's avatar
thomwolf committed
23
from .tokenization_openai import OpenAIGPTTokenizer
24
from .tokenization_transfo_xl import (TransfoXLTokenizer, TransfoXLCorpus)
thomwolf's avatar
thomwolf committed
25
from .tokenization_gpt2 import GPT2Tokenizer
26
from .tokenization_xlnet import XLNetTokenizer, SPIECE_UNDERLINE
thomwolf's avatar
thomwolf committed
27
from .tokenization_xlm import XLMTokenizer
28
from .tokenization_roberta import RobertaTokenizer
thomwolf's avatar
thomwolf committed
29
from .tokenization_distilbert import DistilBertTokenizer
30

31
# Configurations
32
from .configuration_utils import PretrainedConfig
33
34
35
36
37
38
39
40
41
from .configuration_auto import AutoConfig
from .configuration_bert import BertConfig, BERT_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_openai import OpenAIGPTConfig, OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_transfo_xl import TransfoXLConfig, TRANSFO_XL_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_gpt2 import GPT2Config, GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_xlnet import XLNetConfig, XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_xlm import XLMConfig, XLM_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_roberta import RobertaConfig, ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_distilbert import DistilBertConfig, DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP
42

43
# Modeling
thomwolf's avatar
thomwolf committed
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
try:
    import torch
    torch_available = True  # pylint: disable=invalid-name
except ImportError:
    torch_available = False  # pylint: disable=invalid-name

if torch_available:
    logger.info("PyTorch version {} available.".format(torch.__version__))

    from .modeling_utils import (PreTrainedModel, prune_layer, Conv1D)
    from .modeling_auto import (AutoModel, AutoModelForSequenceClassification, AutoModelForQuestionAnswering,
                                AutoModelWithLMHead)

    from .modeling_bert import (BertPreTrainedModel, BertModel, BertForPreTraining,
                                BertForMaskedLM, BertForNextSentencePrediction,
                                BertForSequenceClassification, BertForMultipleChoice,
                                BertForTokenClassification, BertForQuestionAnswering,
                                load_tf_weights_in_bert, BERT_PRETRAINED_MODEL_ARCHIVE_MAP)
    from .modeling_openai import (OpenAIGPTPreTrainedModel, OpenAIGPTModel,
                                OpenAIGPTLMHeadModel, OpenAIGPTDoubleHeadsModel,
                                load_tf_weights_in_openai_gpt, OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_MAP)
    from .modeling_transfo_xl import (TransfoXLPreTrainedModel, TransfoXLModel, TransfoXLLMHeadModel,
                                    load_tf_weights_in_transfo_xl, TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_MAP)
    from .modeling_gpt2 import (GPT2PreTrainedModel, GPT2Model,
                                GPT2LMHeadModel, GPT2DoubleHeadsModel,
                                load_tf_weights_in_gpt2, GPT2_PRETRAINED_MODEL_ARCHIVE_MAP)
    from .modeling_xlnet import (XLNetPreTrainedModel, XLNetModel, XLNetLMHeadModel,
                                XLNetForSequenceClassification, XLNetForQuestionAnswering,
                                load_tf_weights_in_xlnet, XLNET_PRETRAINED_MODEL_ARCHIVE_MAP)
    from .modeling_xlm import (XLMPreTrainedModel , XLMModel,
                            XLMWithLMHeadModel, XLMForSequenceClassification,
                            XLMForQuestionAnswering, XLM_PRETRAINED_MODEL_ARCHIVE_MAP)
    from .modeling_roberta import (RobertaForMaskedLM, RobertaModel, RobertaForSequenceClassification,
                                ROBERTA_PRETRAINED_MODEL_ARCHIVE_MAP)
    from .modeling_distilbert import (DistilBertForMaskedLM, DistilBertModel,
                                DistilBertForSequenceClassification, DistilBertForQuestionAnswering,
                                DISTILBERT_PRETRAINED_MODEL_ARCHIVE_MAP)

    # Optimization
    from .optimization import (AdamW, ConstantLRSchedule, WarmupConstantSchedule, WarmupCosineSchedule,
                               WarmupCosineWithHardRestartsSchedule, WarmupLinearSchedule)


# TensorFlow
try:
    import tensorflow as tf
    tf_available = True  # pylint: disable=invalid-name
except ImportError:
    tf_available = False  # pylint: disable=invalid-name

if tf_available:
    logger.info("TensorFlow version {} available.".format(tf.__version__))

    from .modeling_tf_utils import TFPreTrainedModel
    from .modeling_tf_bert import (TFBertPreTrainedModel, TFBertModel, TFBertForPreTraining,
                                TFBertForMaskedLM, TFBertForNextSentencePrediction, load_pt_weights_in_bert)

101

102
# Files and general utilities
103
104
105
from .file_utils import (PYTORCH_TRANSFORMERS_CACHE, PYTORCH_PRETRAINED_BERT_CACHE,
                         cached_path, add_start_docstrings, add_end_docstrings,
                         WEIGHTS_NAME, TF_WEIGHTS_NAME, CONFIG_NAME)