configuration_auto.py 32.1 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
# coding=utf-8
# Copyright 2018 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Sylvain Gugger's avatar
Sylvain Gugger committed
15
""" Auto Config class."""
16
import importlib
17
import re
18
import warnings
19
from collections import OrderedDict
20
from typing import List, Union
21

Sylvain Gugger's avatar
Sylvain Gugger committed
22
from ...configuration_utils import PretrainedConfig
23
from ...dynamic_module_utils import get_class_from_dynamic_module
24
from ...utils import CONFIG_NAME, logging
Aymeric Augustin's avatar
Aymeric Augustin committed
25

26

27
28
logger = logging.get_logger(__name__)

29
30
31
CONFIG_MAPPING_NAMES = OrderedDict(
    [
        # Add configs here
32
33
        ("albert", "AlbertConfig"),
        ("bart", "BartConfig"),
34
        ("beit", "BeitConfig"),
35
36
37
38
39
40
        ("bert", "BertConfig"),
        ("bert-generation", "BertGenerationConfig"),
        ("big_bird", "BigBirdConfig"),
        ("bigbird_pegasus", "BigBirdPegasusConfig"),
        ("blenderbot", "BlenderbotConfig"),
        ("blenderbot-small", "BlenderbotSmallConfig"),
Younes Belkada's avatar
Younes Belkada committed
41
        ("bloom", "BloomConfig"),
42
        ("camembert", "CamembertConfig"),
43
44
        ("canine", "CanineConfig"),
        ("clip", "CLIPConfig"),
rooa's avatar
rooa committed
45
        ("codegen", "CodeGenConfig"),
46
47
48
        ("convbert", "ConvBertConfig"),
        ("convnext", "ConvNextConfig"),
        ("ctrl", "CTRLConfig"),
NielsRogge's avatar
NielsRogge committed
49
        ("cvt", "CvtConfig"),
50
51
52
53
54
55
        ("data2vec-audio", "Data2VecAudioConfig"),
        ("data2vec-text", "Data2VecTextConfig"),
        ("data2vec-vision", "Data2VecVisionConfig"),
        ("deberta", "DebertaConfig"),
        ("deberta-v2", "DebertaV2Config"),
        ("decision_transformer", "DecisionTransformerConfig"),
56
57
        ("deit", "DeiTConfig"),
        ("detr", "DetrConfig"),
58
59
60
61
62
63
64
65
66
67
68
69
        ("distilbert", "DistilBertConfig"),
        ("dpr", "DPRConfig"),
        ("dpt", "DPTConfig"),
        ("electra", "ElectraConfig"),
        ("encoder-decoder", "EncoderDecoderConfig"),
        ("flaubert", "FlaubertConfig"),
        ("flava", "FlavaConfig"),
        ("fnet", "FNetConfig"),
        ("fsmt", "FSMTConfig"),
        ("funnel", "FunnelConfig"),
        ("glpn", "GLPNConfig"),
        ("gpt2", "GPT2Config"),
70
        ("gpt_neo", "GPTNeoConfig"),
71
        ("gpt_neox", "GPTNeoXConfig"),
72
        ("gptj", "GPTJConfig"),
73
        ("groupvit", "GroupViTConfig"),
74
        ("hubert", "HubertConfig"),
75
        ("ibert", "IBertConfig"),
76
77
78
        ("imagegpt", "ImageGPTConfig"),
        ("layoutlm", "LayoutLMConfig"),
        ("layoutlmv2", "LayoutLMv2Config"),
NielsRogge's avatar
NielsRogge committed
79
        ("layoutlmv3", "LayoutLMv3Config"),
80
        ("led", "LEDConfig"),
81
        ("levit", "LevitConfig"),
82
        ("longformer", "LongformerConfig"),
Daniel Stancl's avatar
Daniel Stancl committed
83
        ("longt5", "LongT5Config"),
84
85
86
        ("luke", "LukeConfig"),
        ("lxmert", "LxmertConfig"),
        ("m2m_100", "M2M100Config"),
87
        ("marian", "MarianConfig"),
88
        ("maskformer", "MaskFormerConfig"),
89
        ("mbart", "MBartConfig"),
Chan Woo Kim's avatar
Chan Woo Kim committed
90
        ("mctct", "MCTCTConfig"),
91
        ("megatron-bert", "MegatronBertConfig"),
92
        ("mobilebert", "MobileBertConfig"),
93
        ("mpnet", "MPNetConfig"),
94
        ("mt5", "MT5Config"),
95
        ("nezha", "NezhaConfig"),
96
97
        ("nystromformer", "NystromformerConfig"),
        ("openai-gpt", "OpenAIGPTConfig"),
Younes Belkada's avatar
Younes Belkada committed
98
        ("opt", "OPTConfig"),
99
100
101
102
103
104
105
106
        ("pegasus", "PegasusConfig"),
        ("perceiver", "PerceiverConfig"),
        ("plbart", "PLBartConfig"),
        ("poolformer", "PoolFormerConfig"),
        ("prophetnet", "ProphetNetConfig"),
        ("qdqbert", "QDQBertConfig"),
        ("rag", "RagConfig"),
        ("realm", "RealmConfig"),
107
        ("reformer", "ReformerConfig"),
108
109
110
111
        ("regnet", "RegNetConfig"),
        ("rembert", "RemBertConfig"),
        ("resnet", "ResNetConfig"),
        ("retribert", "RetriBertConfig"),
112
        ("roberta", "RobertaConfig"),
113
114
115
116
        ("roformer", "RoFormerConfig"),
        ("segformer", "SegformerConfig"),
        ("sew", "SEWConfig"),
        ("sew-d", "SEWDConfig"),
117
        ("speech-encoder-decoder", "SpeechEncoderDecoderConfig"),
118
119
        ("speech_to_text", "Speech2TextConfig"),
        ("speech_to_text_2", "Speech2Text2Config"),
Ori Ram's avatar
Ori Ram committed
120
        ("splinter", "SplinterConfig"),
121
122
123
124
        ("squeezebert", "SqueezeBertConfig"),
        ("swin", "SwinConfig"),
        ("t5", "T5Config"),
        ("tapas", "TapasConfig"),
Carl's avatar
Carl committed
125
        ("trajectory_transformer", "TrajectoryTransformerConfig"),
126
127
        ("transfo-xl", "TransfoXLConfig"),
        ("trocr", "TrOCRConfig"),
128
        ("unispeech", "UniSpeechConfig"),
129
130
131
132
133
134
135
136
137
        ("unispeech-sat", "UniSpeechSatConfig"),
        ("van", "VanConfig"),
        ("vilt", "ViltConfig"),
        ("vision-encoder-decoder", "VisionEncoderDecoderConfig"),
        ("vision-text-dual-encoder", "VisionTextDualEncoderConfig"),
        ("visual_bert", "VisualBertConfig"),
        ("vit", "ViTConfig"),
        ("vit_mae", "ViTMAEConfig"),
        ("wav2vec2", "Wav2Vec2Config"),
138
        ("wav2vec2-conformer", "Wav2Vec2ConformerConfig"),
Patrick von Platen's avatar
Patrick von Platen committed
139
        ("wavlm", "WavLMConfig"),
140
141
142
143
144
145
146
147
        ("xglm", "XGLMConfig"),
        ("xlm", "XLMConfig"),
        ("xlm-prophetnet", "XLMProphetNetConfig"),
        ("xlm-roberta", "XLMRobertaConfig"),
        ("xlm-roberta-xl", "XLMRobertaXLConfig"),
        ("xlnet", "XLNetConfig"),
        ("yolos", "YolosConfig"),
        ("yoso", "YosoConfig"),
148
149
    ]
)
150

151
CONFIG_ARCHIVE_MAP_MAPPING_NAMES = OrderedDict(
152
    [
NielsRogge's avatar
NielsRogge committed
153
        # Add archive maps here)
154
155
        ("albert", "ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("bart", "BART_PRETRAINED_CONFIG_ARCHIVE_MAP"),
156
        ("beit", "BEIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
157
158
159
160
161
        ("bert", "BERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("big_bird", "BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("bigbird_pegasus", "BIGBIRD_PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("blenderbot", "BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("blenderbot-small", "BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP"),
Younes Belkada's avatar
Younes Belkada committed
162
        ("bloom", "BLOOM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
163
        ("camembert", "CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
164
165
        ("canine", "CANINE_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("clip", "CLIP_PRETRAINED_CONFIG_ARCHIVE_MAP"),
rooa's avatar
rooa committed
166
        ("codegen", "CODEGEN_PRETRAINED_CONFIG_ARCHIVE_MAP"),
167
        ("convbert", "CONVBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
168
        ("convnext", "CONVNEXT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
169
        ("ctrl", "CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP"),
NielsRogge's avatar
NielsRogge committed
170
        ("cvt", "CVT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
171
        ("data2vec-audio", "DATA2VEC_AUDIO_PRETRAINED_CONFIG_ARCHIVE_MAP"),
172
173
174
175
176
177
        ("data2vec-text", "DATA2VEC_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("data2vec-vision", "DATA2VEC_VISION_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("deberta", "DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("deberta-v2", "DEBERTA_V2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("deit", "DEIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("detr", "DETR_PRETRAINED_CONFIG_ARCHIVE_MAP"),
178
        ("distilbert", "DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
179
180
181
        ("dpr", "DPR_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("dpt", "DPT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("electra", "ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
182
        ("flaubert", "FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
183
184
        ("flava", "FLAVA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("fnet", "FNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
185
186
        ("fsmt", "FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("funnel", "FUNNEL_PRETRAINED_CONFIG_ARCHIVE_MAP"),
187
188
189
        ("glpn", "GLPN_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("gpt2", "GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("gpt_neo", "GPT_NEO_PRETRAINED_CONFIG_ARCHIVE_MAP"),
190
        ("gpt_neox", "GPT_NEOX_PRETRAINED_CONFIG_ARCHIVE_MAP"),
191
        ("gptj", "GPTJ_PRETRAINED_CONFIG_ARCHIVE_MAP"),
192
        ("groupvit", "GROUPVIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
193
194
195
        ("hubert", "HUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("ibert", "IBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("imagegpt", "IMAGEGPT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
196
        ("layoutlm", "LAYOUTLM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
197
        ("layoutlmv2", "LAYOUTLMV2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
NielsRogge's avatar
NielsRogge committed
198
        ("layoutlmv3", "LAYOUTLMV3_PRETRAINED_CONFIG_ARCHIVE_MAP"),
199
        ("led", "LED_PRETRAINED_CONFIG_ARCHIVE_MAP"),
200
        ("levit", "LEVIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
201
        ("longformer", "LONGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
Daniel Stancl's avatar
Daniel Stancl committed
202
        ("longt5", "LONGT5_PRETRAINED_CONFIG_ARCHIVE_MAP"),
203
204
205
206
207
        ("luke", "LUKE_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("lxmert", "LXMERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("m2m_100", "M2M_100_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("maskformer", "MASKFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("mbart", "MBART_PRETRAINED_CONFIG_ARCHIVE_MAP"),
Chan Woo Kim's avatar
Chan Woo Kim committed
208
        ("mctct", "MCTCT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
209
        ("megatron-bert", "MEGATRON_BERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
210
        ("mpnet", "MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
211
        ("nezha", "NEZHA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
        ("nystromformer", "NYSTROMFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("openai-gpt", "OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("opt", "OPT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("pegasus", "PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("perceiver", "PERCEIVER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("plbart", "PLBART_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("poolformer", "POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("prophetnet", "PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("qdqbert", "QDQBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("realm", "REALM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("regnet", "REGNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("rembert", "REMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("resnet", "RESNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("retribert", "RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("roberta", "ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("roformer", "ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("segformer", "SEGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
229
        ("sew", "SEW_PRETRAINED_CONFIG_ARCHIVE_MAP"),
230
231
232
233
234
235
236
237
238
        ("sew-d", "SEW_D_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("speech_to_text", "SPEECH_TO_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("speech_to_text_2", "SPEECH_TO_TEXT_2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("splinter", "SPLINTER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("squeezebert", "SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("swin", "SWIN_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("t5", "T5_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("tapas", "TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("transfo-xl", "TRANSFO_XL_PRETRAINED_CONFIG_ARCHIVE_MAP"),
239
        ("unispeech", "UNISPEECH_PRETRAINED_CONFIG_ARCHIVE_MAP"),
240
241
242
243
244
245
246
        ("unispeech-sat", "UNISPEECH_SAT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("van", "VAN_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("vilt", "VILT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("visual_bert", "VISUAL_BERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("vit", "VIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("vit_mae", "VIT_MAE_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("wav2vec2", "WAV_2_VEC_2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
247
        ("wav2vec2-conformer", "WAV2VEC2_CONFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
248
249
250
251
252
253
254
        ("xglm", "XGLM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("xlm", "XLM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("xlm-prophetnet", "XLM_PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("xlm-roberta", "XLM_ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("xlnet", "XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("yolos", "YOLOS_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("yoso", "YOSO_PRETRAINED_CONFIG_ARCHIVE_MAP"),
255
256
257
258
259
    ]
)

MODEL_NAMES_MAPPING = OrderedDict(
    [
260
        # Add full (and cased) model names here
261
262
263
264
        ("albert", "ALBERT"),
        ("bart", "BART"),
        ("barthez", "BARThez"),
        ("bartpho", "BARTpho"),
NielsRogge's avatar
NielsRogge committed
265
        ("beit", "BEiT"),
266
267
268
        ("bert", "BERT"),
        ("bert-generation", "Bert Generation"),
        ("bert-japanese", "BertJapanese"),
269
        ("bertweet", "BERTweet"),
270
        ("big_bird", "BigBird"),
271
        ("bigbird_pegasus", "BigBird-Pegasus"),
272
273
        ("blenderbot", "Blenderbot"),
        ("blenderbot-small", "BlenderbotSmall"),
Younes Belkada's avatar
Younes Belkada committed
274
        ("bloom", "BLOOM"),
275
276
277
        ("bort", "BORT"),
        ("byt5", "ByT5"),
        ("camembert", "CamemBERT"),
278
        ("canine", "CANINE"),
Suraj Patil's avatar
Suraj Patil committed
279
        ("clip", "CLIP"),
rooa's avatar
rooa committed
280
        ("codegen", "CodeGen"),
281
        ("convbert", "ConvBERT"),
282
        ("convnext", "ConvNeXT"),
283
284
        ("cpm", "CPM"),
        ("ctrl", "CTRL"),
NielsRogge's avatar
NielsRogge committed
285
        ("cvt", "CvT"),
286
287
288
289
290
291
        ("data2vec-audio", "Data2VecAudio"),
        ("data2vec-text", "Data2VecText"),
        ("data2vec-vision", "Data2VecVision"),
        ("deberta", "DeBERTa"),
        ("deberta-v2", "DeBERTa-v2"),
        ("decision_transformer", "Decision Transformer"),
NielsRogge's avatar
NielsRogge committed
292
        ("deit", "DeiT"),
NielsRogge's avatar
NielsRogge committed
293
        ("detr", "DETR"),
294
295
296
297
298
299
300
301
        ("dialogpt", "DialoGPT"),
        ("distilbert", "DistilBERT"),
        ("dit", "DiT"),
        ("dpr", "DPR"),
        ("dpt", "DPT"),
        ("electra", "ELECTRA"),
        ("encoder-decoder", "Encoder decoder"),
        ("flaubert", "FlauBERT"),
302
        ("flava", "FLAVA"),
303
304
305
306
307
        ("fnet", "FNet"),
        ("fsmt", "FairSeq Machine-Translation"),
        ("funnel", "Funnel Transformer"),
        ("glpn", "GLPN"),
        ("gpt2", "OpenAI GPT-2"),
Suraj Patil's avatar
Suraj Patil committed
308
        ("gpt_neo", "GPT Neo"),
309
        ("gpt_neox", "GPT NeoX"),
310
        ("gptj", "GPT-J"),
311
        ("groupvit", "GroupViT"),
312
313
        ("herbert", "HerBERT"),
        ("hubert", "Hubert"),
Sehoon Kim's avatar
Sehoon Kim committed
314
        ("ibert", "I-BERT"),
315
316
317
        ("imagegpt", "ImageGPT"),
        ("layoutlm", "LayoutLM"),
        ("layoutlmv2", "LayoutLMv2"),
NielsRogge's avatar
NielsRogge committed
318
        ("layoutlmv3", "LayoutLMv3"),
319
320
        ("layoutxlm", "LayoutXLM"),
        ("led", "LED"),
321
        ("levit", "LeViT"),
322
        ("longformer", "Longformer"),
Daniel Stancl's avatar
Daniel Stancl committed
323
        ("longt5", "LongT5"),
324
325
326
        ("luke", "LUKE"),
        ("lxmert", "LXMERT"),
        ("m2m_100", "M2M100"),
327
        ("marian", "Marian"),
328
        ("maskformer", "MaskFormer"),
329
        ("mbart", "mBART"),
330
        ("mbart50", "mBART-50"),
Chan Woo Kim's avatar
Chan Woo Kim committed
331
        ("mctct", "M-CTC-T"),
332
333
        ("megatron-bert", "Megatron-BERT"),
        ("megatron_gpt2", "Megatron-GPT2"),
334
335
336
        ("mluke", "mLUKE"),
        ("mobilebert", "MobileBERT"),
        ("mpnet", "MPNet"),
337
        ("mt5", "MT5"),
338
        ("nezha", "Nezha"),
339
        ("nystromformer", "Nystr枚mformer"),
340
        ("openai-gpt", "OpenAI GPT"),
Younes Belkada's avatar
Younes Belkada committed
341
        ("opt", "OPT"),
342
343
344
345
346
347
348
349
        ("pegasus", "Pegasus"),
        ("perceiver", "Perceiver"),
        ("phobert", "PhoBERT"),
        ("plbart", "PLBart"),
        ("poolformer", "PoolFormer"),
        ("prophetnet", "ProphetNet"),
        ("qdqbert", "QDQBert"),
        ("rag", "RAG"),
350
        ("realm", "REALM"),
351
        ("reformer", "Reformer"),
352
353
354
355
        ("regnet", "RegNet"),
        ("rembert", "RemBERT"),
        ("resnet", "ResNet"),
        ("retribert", "RetriBERT"),
356
        ("roberta", "RoBERTa"),
357
358
359
360
        ("roformer", "RoFormer"),
        ("segformer", "SegFormer"),
        ("sew", "SEW"),
        ("sew-d", "SEW-D"),
361
        ("speech-encoder-decoder", "Speech Encoder decoder"),
362
363
        ("speech_to_text", "Speech2Text"),
        ("speech_to_text_2", "Speech2Text2"),
Ori Ram's avatar
Ori Ram committed
364
        ("splinter", "Splinter"),
365
        ("squeezebert", "SqueezeBERT"),
366
        ("swin", "Swin Transformer"),
367
368
369
370
        ("t5", "T5"),
        ("t5v1.1", "T5v1.1"),
        ("tapas", "TAPAS"),
        ("tapex", "TAPEX"),
Carl's avatar
Carl committed
371
        ("trajectory_transformer", "Trajectory Transformer"),
372
373
        ("transfo-xl", "Transformer-XL"),
        ("trocr", "TrOCR"),
374
        ("ul2", "UL2"),
375
        ("unispeech", "UniSpeech"),
376
377
378
379
380
        ("unispeech-sat", "UniSpeechSat"),
        ("van", "VAN"),
        ("vilt", "ViLT"),
        ("vision-encoder-decoder", "Vision Encoder decoder"),
        ("vision-text-dual-encoder", "VisionTextDualEncoder"),
381
        ("visual_bert", "VisualBERT"),
382
383
384
        ("vit", "ViT"),
        ("vit_mae", "ViTMAE"),
        ("wav2vec2", "Wav2Vec2"),
385
        ("wav2vec2-conformer", "Wav2Vec2-Conformer"),
386
        ("wav2vec2_phoneme", "Wav2Vec2Phoneme"),
Patrick von Platen's avatar
Patrick von Platen committed
387
        ("wavlm", "WavLM"),
388
389
        ("xglm", "XGLM"),
        ("xlm", "XLM"),
390
        ("xlm-prophetnet", "XLM-ProphetNet"),
391
392
393
        ("xlm-roberta", "XLM-RoBERTa"),
        ("xlm-roberta-xl", "XLM-RoBERTa-XL"),
        ("xlnet", "XLNet"),
394
395
        ("xls_r", "XLS-R"),
        ("xlsr_wav2vec2", "XLSR-Wav2Vec2"),
396
397
        ("yolos", "YOLOS"),
        ("yoso", "YOSO"),
398
399
400
    ]
)

401
SPECIAL_MODEL_TYPE_TO_MODULE_NAME = OrderedDict(
402
403
404
405
406
407
    [
        ("openai-gpt", "openai"),
        ("data2vec-audio", "data2vec"),
        ("data2vec-text", "data2vec"),
        ("data2vec-vision", "data2vec"),
    ]
408
)
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434


def model_type_to_module_name(key):
    """Converts a config key to the corresponding module."""
    # Special treatment
    if key in SPECIAL_MODEL_TYPE_TO_MODULE_NAME:
        return SPECIAL_MODEL_TYPE_TO_MODULE_NAME[key]

    return key.replace("-", "_")


def config_class_to_model_type(config):
    """Converts a config class name to the corresponding model type"""
    for key, cls in CONFIG_MAPPING_NAMES.items():
        if cls == config:
            return key
    return None


class _LazyConfigMapping(OrderedDict):
    """
    A dictionary that lazily load its values when they are requested.
    """

    def __init__(self, mapping):
        self._mapping = mapping
435
        self._extra_content = {}
436
437
438
        self._modules = {}

    def __getitem__(self, key):
439
440
        if key in self._extra_content:
            return self._extra_content[key]
441
442
443
444
445
446
        if key not in self._mapping:
            raise KeyError(key)
        value = self._mapping[key]
        module_name = model_type_to_module_name(key)
        if module_name not in self._modules:
            self._modules[module_name] = importlib.import_module(f".{module_name}", "transformers.models")
447
448
449
450
451
452
453
        if hasattr(self._modules[module_name], value):
            return getattr(self._modules[module_name], value)

        # Some of the mappings have entries model_type -> config of another model type. In that case we try to grab the
        # object at the top level.
        transformers_module = importlib.import_module("transformers")
        return getattr(transformers_module, value)
454
455

    def keys(self):
456
        return list(self._mapping.keys()) + list(self._extra_content.keys())
457
458

    def values(self):
459
        return [self[k] for k in self._mapping.keys()] + list(self._extra_content.values())
460

461
    def items(self):
462
        return [(k, self[k]) for k in self._mapping.keys()] + list(self._extra_content.items())
463
464

    def __iter__(self):
465
        return iter(list(self._mapping.keys()) + list(self._extra_content.keys()))
466
467

    def __contains__(self, item):
468
469
470
471
472
473
474
475
476
        return item in self._mapping or item in self._extra_content

    def register(self, key, value):
        """
        Register a new configuration in this mapping.
        """
        if key in self._mapping.keys():
            raise ValueError(f"'{key}' is already used by a Transformers config, pick another name.")
        self._extra_content[key] = value
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541


CONFIG_MAPPING = _LazyConfigMapping(CONFIG_MAPPING_NAMES)


class _LazyLoadAllMappings(OrderedDict):
    """
    A mapping that will load all pairs of key values at the first access (either by indexing, requestions keys, values,
    etc.)

    Args:
        mapping: The mapping to load.
    """

    def __init__(self, mapping):
        self._mapping = mapping
        self._initialized = False
        self._data = {}

    def _initialize(self):
        if self._initialized:
            return
        warnings.warn(
            "ALL_PRETRAINED_CONFIG_ARCHIVE_MAP is deprecated and will be removed in v5 of Transformers. "
            "It does not contain all available model checkpoints, far from it. Checkout hf.co/models for that.",
            FutureWarning,
        )

        for model_type, map_name in self._mapping.items():
            module_name = model_type_to_module_name(model_type)
            module = importlib.import_module(f".{module_name}", "transformers.models")
            mapping = getattr(module, map_name)
            self._data.update(mapping)

        self._initialized = True

    def __getitem__(self, key):
        self._initialize()
        return self._data[key]

    def keys(self):
        self._initialize()
        return self._data.keys()

    def values(self):
        self._initialize()
        return self._data.values()

    def items(self):
        self._initialize()
        return self._data.keys()

    def __iter__(self):
        self._initialize()
        return iter(self._data)

    def __contains__(self, item):
        self._initialize()
        return item in self._data


ALL_PRETRAINED_CONFIG_ARCHIVE_MAP = _LazyLoadAllMappings(CONFIG_ARCHIVE_MAP_MAPPING_NAMES)


def _get_class_name(model_class: Union[str, List[str]]):
542
    if isinstance(model_class, (list, tuple)):
Stas Bekman's avatar
Stas Bekman committed
543
544
        return " or ".join([f"[`{c}`]" for c in model_class if c is not None])
    return f"[`{model_class}`]"
545
546


547
548
549
550
551
def _list_model_options(indent, config_to_class=None, use_model_types=True):
    if config_to_class is None and not use_model_types:
        raise ValueError("Using `use_model_types=False` requires a `config_to_class` dictionary.")
    if use_model_types:
        if config_to_class is None:
Stas Bekman's avatar
Stas Bekman committed
552
            model_type_to_name = {model_type: f"[`{config}`]" for model_type, config in CONFIG_MAPPING_NAMES.items()}
553
554
        else:
            model_type_to_name = {
555
556
557
                model_type: _get_class_name(model_class)
                for model_type, model_class in config_to_class.items()
                if model_type in MODEL_NAMES_MAPPING
558
559
            }
        lines = [
560
            f"{indent}- **{model_type}** -- {model_type_to_name[model_type]} ({MODEL_NAMES_MAPPING[model_type]} model)"
561
            for model_type in sorted(model_type_to_name.keys())
562
563
        ]
    else:
564
565
566
567
568
        config_to_name = {
            CONFIG_MAPPING_NAMES[config]: _get_class_name(clas)
            for config, clas in config_to_class.items()
            if config in CONFIG_MAPPING_NAMES
        }
569
        config_to_model_name = {
570
            config: MODEL_NAMES_MAPPING[model_type] for model_type, config in CONFIG_MAPPING_NAMES.items()
571
572
        }
        lines = [
Sylvain Gugger's avatar
Sylvain Gugger committed
573
574
            f"{indent}- [`{config_name}`] configuration class:"
            f" {config_to_name[config_name]} ({config_to_model_name[config_name]} model)"
575
            for config_name in sorted(config_to_name.keys())
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
        ]
    return "\n".join(lines)


def replace_list_option_in_docstrings(config_to_class=None, use_model_types=True):
    def docstring_decorator(fn):
        docstrings = fn.__doc__
        lines = docstrings.split("\n")
        i = 0
        while i < len(lines) and re.search(r"^(\s*)List options\s*$", lines[i]) is None:
            i += 1
        if i < len(lines):
            indent = re.search(r"^(\s*)List options\s*$", lines[i]).groups()[0]
            if use_model_types:
                indent = f"{indent}    "
            lines[i] = _list_model_options(indent, config_to_class=config_to_class, use_model_types=use_model_types)
            docstrings = "\n".join(lines)
        else:
            raise ValueError(
Sylvain Gugger's avatar
Sylvain Gugger committed
595
596
                f"The function {fn} should have an empty 'List options' in its docstring as placeholder, current"
                f" docstring is:\n{docstrings}"
597
598
599
600
601
602
603
            )
        fn.__doc__ = docstrings
        return fn

    return docstring_decorator


Julien Chaumond's avatar
Julien Chaumond committed
604
class AutoConfig:
Lysandre Debut's avatar
Lysandre Debut committed
605
    r"""
606
    This is a generic configuration class that will be instantiated as one of the configuration classes of the library
607
    when created with the [`~AutoConfig.from_pretrained`] class method.
608

609
    This class cannot be instantiated directly using `__init__()` (throws an error).
610
    """
611

612
    def __init__(self):
613
614
615
616
        raise EnvironmentError(
            "AutoConfig is designed to be instantiated "
            "using the `AutoConfig.from_pretrained(pretrained_model_name_or_path)` method."
        )
617

618
    @classmethod
619
620
621
622
    def for_model(cls, model_type: str, *args, **kwargs):
        if model_type in CONFIG_MAPPING:
            config_class = CONFIG_MAPPING[model_type]
            return config_class(*args, **kwargs)
623
        raise ValueError(
624
            f"Unrecognized model identifier: {model_type}. Should contain one of {', '.join(CONFIG_MAPPING.keys())}"
625
        )
626

627
    @classmethod
628
    @replace_list_option_in_docstrings()
629
    def from_pretrained(cls, pretrained_model_name_or_path, **kwargs):
630
631
        r"""
        Instantiate one of the configuration classes of the library from a pretrained model configuration.
632

Sylvain Gugger's avatar
Sylvain Gugger committed
633
634
        The configuration class to instantiate is selected based on the `model_type` property of the config object that
        is loaded, or when it's missing, by falling back to using pattern matching on `pretrained_model_name_or_path`:
635

636
        List options
Lysandre Debut's avatar
Lysandre Debut committed
637
638

        Args:
639
            pretrained_model_name_or_path (`str` or `os.PathLike`):
640
641
                Can be either:

642
643
644
645
                    - A string, the *model id* of a pretrained model configuration hosted inside a model repo on
                      huggingface.co. Valid model ids can be located at the root-level, like `bert-base-uncased`, or
                      namespaced under a user or organization name, like `dbmdz/bert-base-german-cased`.
                    - A path to a *directory* containing a configuration file saved using the
Sylvain Gugger's avatar
Sylvain Gugger committed
646
647
                      [`~PretrainedConfig.save_pretrained`] method, or the [`~PreTrainedModel.save_pretrained`] method,
                      e.g., `./my_model_directory/`.
648
649
650
                    - A path or url to a saved configuration JSON *file*, e.g.,
                      `./my_model_directory/configuration.json`.
            cache_dir (`str` or `os.PathLike`, *optional*):
651
652
                Path to a directory in which a downloaded pretrained model configuration should be cached if the
                standard cache should not be used.
653
            force_download (`bool`, *optional*, defaults to `False`):
654
655
                Whether or not to force the (re-)download the model weights and configuration files and override the
                cached versions if they exist.
656
            resume_download (`bool`, *optional*, defaults to `False`):
657
658
                Whether or not to delete incompletely received files. Will attempt to resume the download if such a
                file exists.
659
            proxies (`Dict[str, str]`, *optional*):
Sylvain Gugger's avatar
Sylvain Gugger committed
660
661
                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.
662
            revision (`str`, *optional*, defaults to `"main"`):
Julien Chaumond's avatar
Julien Chaumond committed
663
                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
664
                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
Julien Chaumond's avatar
Julien Chaumond committed
665
                identifier allowed by git.
666
667
            return_unused_kwargs (`bool`, *optional*, defaults to `False`):
                If `False`, then this function returns just the final configuration object.
668

Sylvain Gugger's avatar
Sylvain Gugger committed
669
670
671
                If `True`, then this functions returns a `Tuple(config, unused_kwargs)` where *unused_kwargs* is a
                dictionary consisting of the key/value pairs whose keys are not configuration attributes: i.e., the
                part of `kwargs` which has not been used to update `config` and is otherwise ignored.
672
            trust_remote_code (`bool`, *optional*, defaults to `False`):
673
                Whether or not to allow for custom models defined on the Hub in their own modeling files. This option
Sylvain Gugger's avatar
Sylvain Gugger committed
674
675
                should only be set to `True` for repositories you trust and in which you have read the code, as it will
                execute code present on the Hub on your local machine.
676
            kwargs(additional keyword arguments, *optional*):
677
                The values in kwargs of any keys which are configuration attributes will be used to override the loaded
Sylvain Gugger's avatar
Sylvain Gugger committed
678
                values. Behavior concerning key/value pairs whose keys are *not* configuration attributes is controlled
679
                by the `return_unused_kwargs` keyword parameter.
Lysandre Debut's avatar
Lysandre Debut committed
680

681
        Examples:
682

683
684
        ```python
        >>> from transformers import AutoConfig
685

686
        >>> # Download configuration from huggingface.co and cache.
Sylvain Gugger's avatar
Sylvain Gugger committed
687
        >>> config = AutoConfig.from_pretrained("bert-base-uncased")
Lysandre Debut's avatar
Lysandre Debut committed
688

689
        >>> # Download configuration from huggingface.co (user-uploaded) and cache.
Sylvain Gugger's avatar
Sylvain Gugger committed
690
        >>> config = AutoConfig.from_pretrained("dbmdz/bert-base-german-cased")
Lysandre Debut's avatar
Lysandre Debut committed
691

692
        >>> # If configuration file is in a directory (e.g., was saved using *save_pretrained('./test/saved_model/')*).
Sylvain Gugger's avatar
Sylvain Gugger committed
693
        >>> config = AutoConfig.from_pretrained("./test/bert_saved_model/")
694

695
        >>> # Load a specific configuration file.
Sylvain Gugger's avatar
Sylvain Gugger committed
696
        >>> config = AutoConfig.from_pretrained("./test/bert_saved_model/my_configuration.json")
697

698
        >>> # Change some config attributes when loading a pretrained config.
Sylvain Gugger's avatar
Sylvain Gugger committed
699
        >>> config = AutoConfig.from_pretrained("bert-base-uncased", output_attentions=True, foo=False)
700
701
        >>> config.output_attentions
        True
Sylvain Gugger's avatar
Sylvain Gugger committed
702
703
704
705

        >>> config, unused_kwargs = AutoConfig.from_pretrained(
        ...     "bert-base-uncased", output_attentions=True, foo=False, return_unused_kwargs=True
        ... )
706
707
        >>> config.output_attentions
        True
Sylvain Gugger's avatar
Sylvain Gugger committed
708

709
        >>> unused_kwargs
710
711
        {'foo': False}
        ```"""
712
        kwargs["_from_auto"] = True
713
714
        kwargs["name_or_path"] = pretrained_model_name_or_path
        trust_remote_code = kwargs.pop("trust_remote_code", False)
715
        config_dict, _ = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs)
716
717
718
        if "auto_map" in config_dict and "AutoConfig" in config_dict["auto_map"]:
            if not trust_remote_code:
                raise ValueError(
Sylvain Gugger's avatar
Sylvain Gugger committed
719
720
721
                    f"Loading {pretrained_model_name_or_path} requires you to execute the configuration file in that"
                    " repo on your local machine. Make sure you have read the code there to avoid malicious use, then"
                    " set the option `trust_remote_code=True` to remove this error."
722
723
                )
            if kwargs.get("revision", None) is None:
724
                logger.warning(
725
726
727
728
729
730
731
732
733
734
                    "Explicitly passing a `revision` is encouraged when loading a configuration with custom code to "
                    "ensure no malicious code has been contributed in a newer revision."
                )
            class_ref = config_dict["auto_map"]["AutoConfig"]
            module_file, class_name = class_ref.split(".")
            config_class = get_class_from_dynamic_module(
                pretrained_model_name_or_path, module_file + ".py", class_name, **kwargs
            )
            return config_class.from_pretrained(pretrained_model_name_or_path, **kwargs)
        elif "model_type" in config_dict:
735
736
737
738
            config_class = CONFIG_MAPPING[config_dict["model_type"]]
            return config_class.from_dict(config_dict, **kwargs)
        else:
            # Fallback: use pattern matching on the string.
739
740
            # We go from longer names to shorter names to catch roberta before bert (for instance)
            for pattern in sorted(CONFIG_MAPPING.keys(), key=len, reverse=True):
741
                if pattern in str(pretrained_model_name_or_path):
742
                    return CONFIG_MAPPING[pattern].from_dict(config_dict, **kwargs)
743

744
        raise ValueError(
745
            f"Unrecognized model in {pretrained_model_name_or_path}. "
746
            f"Should have a `model_type` key in its {CONFIG_NAME}, or contain one of the following strings "
747
            f"in its name: {', '.join(CONFIG_MAPPING.keys())}"
748
        )
749
750
751
752
753
754
755

    @staticmethod
    def register(model_type, config):
        """
        Register a new configuration for this class.

        Args:
756
757
            model_type (`str`): The model type like "bert" or "gpt".
            config ([`PretrainedConfig`]): The config to register.
758
759
760
761
762
763
764
765
        """
        if issubclass(config, PretrainedConfig) and config.model_type != model_type:
            raise ValueError(
                "The config you are passing has a `model_type` attribute that is not consistent with the model type "
                f"you passed (config has {config.model_type} and you passed {model_type}. Fix one of those so they "
                "match!"
            )
        CONFIG_MAPPING.register(model_type, config)