configuration_auto.py 31.6 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
# coding=utf-8
# Copyright 2018 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Sylvain Gugger's avatar
Sylvain Gugger committed
15
""" Auto Config class."""
16
import importlib
17
import re
18
import warnings
19
from collections import OrderedDict
20
from typing import List, Union
21

Sylvain Gugger's avatar
Sylvain Gugger committed
22
from ...configuration_utils import PretrainedConfig
23
from ...dynamic_module_utils import get_class_from_dynamic_module
24
from ...utils import CONFIG_NAME, logging
Aymeric Augustin's avatar
Aymeric Augustin committed
25

26

27
28
logger = logging.get_logger(__name__)

29
30
31
CONFIG_MAPPING_NAMES = OrderedDict(
    [
        # Add configs here
32
33
        ("albert", "AlbertConfig"),
        ("bart", "BartConfig"),
34
        ("beit", "BeitConfig"),
35
36
37
38
39
40
        ("bert", "BertConfig"),
        ("bert-generation", "BertGenerationConfig"),
        ("big_bird", "BigBirdConfig"),
        ("bigbird_pegasus", "BigBirdPegasusConfig"),
        ("blenderbot", "BlenderbotConfig"),
        ("blenderbot-small", "BlenderbotSmallConfig"),
Younes Belkada's avatar
Younes Belkada committed
41
        ("bloom", "BloomConfig"),
42
        ("camembert", "CamembertConfig"),
43
44
        ("canine", "CanineConfig"),
        ("clip", "CLIPConfig"),
45
46
47
        ("convbert", "ConvBertConfig"),
        ("convnext", "ConvNextConfig"),
        ("ctrl", "CTRLConfig"),
NielsRogge's avatar
NielsRogge committed
48
        ("cvt", "CvtConfig"),
49
50
51
52
53
54
        ("data2vec-audio", "Data2VecAudioConfig"),
        ("data2vec-text", "Data2VecTextConfig"),
        ("data2vec-vision", "Data2VecVisionConfig"),
        ("deberta", "DebertaConfig"),
        ("deberta-v2", "DebertaV2Config"),
        ("decision_transformer", "DecisionTransformerConfig"),
55
56
        ("deit", "DeiTConfig"),
        ("detr", "DetrConfig"),
57
58
59
60
61
62
63
64
65
66
67
68
        ("distilbert", "DistilBertConfig"),
        ("dpr", "DPRConfig"),
        ("dpt", "DPTConfig"),
        ("electra", "ElectraConfig"),
        ("encoder-decoder", "EncoderDecoderConfig"),
        ("flaubert", "FlaubertConfig"),
        ("flava", "FlavaConfig"),
        ("fnet", "FNetConfig"),
        ("fsmt", "FSMTConfig"),
        ("funnel", "FunnelConfig"),
        ("glpn", "GLPNConfig"),
        ("gpt2", "GPT2Config"),
69
        ("gpt_neo", "GPTNeoConfig"),
70
        ("gpt_neox", "GPTNeoXConfig"),
71
72
        ("gptj", "GPTJConfig"),
        ("hubert", "HubertConfig"),
73
        ("ibert", "IBertConfig"),
74
75
76
        ("imagegpt", "ImageGPTConfig"),
        ("layoutlm", "LayoutLMConfig"),
        ("layoutlmv2", "LayoutLMv2Config"),
NielsRogge's avatar
NielsRogge committed
77
        ("layoutlmv3", "LayoutLMv3Config"),
78
        ("led", "LEDConfig"),
79
        ("levit", "LevitConfig"),
80
81
82
83
        ("longformer", "LongformerConfig"),
        ("luke", "LukeConfig"),
        ("lxmert", "LxmertConfig"),
        ("m2m_100", "M2M100Config"),
84
        ("marian", "MarianConfig"),
85
        ("maskformer", "MaskFormerConfig"),
86
        ("mbart", "MBartConfig"),
Chan Woo Kim's avatar
Chan Woo Kim committed
87
        ("mctct", "MCTCTConfig"),
88
        ("megatron-bert", "MegatronBertConfig"),
89
        ("mobilebert", "MobileBertConfig"),
90
        ("mpnet", "MPNetConfig"),
91
92
93
        ("mt5", "MT5Config"),
        ("nystromformer", "NystromformerConfig"),
        ("openai-gpt", "OpenAIGPTConfig"),
Younes Belkada's avatar
Younes Belkada committed
94
        ("opt", "OPTConfig"),
95
96
97
98
99
100
101
102
        ("pegasus", "PegasusConfig"),
        ("perceiver", "PerceiverConfig"),
        ("plbart", "PLBartConfig"),
        ("poolformer", "PoolFormerConfig"),
        ("prophetnet", "ProphetNetConfig"),
        ("qdqbert", "QDQBertConfig"),
        ("rag", "RagConfig"),
        ("realm", "RealmConfig"),
103
        ("reformer", "ReformerConfig"),
104
105
106
107
        ("regnet", "RegNetConfig"),
        ("rembert", "RemBertConfig"),
        ("resnet", "ResNetConfig"),
        ("retribert", "RetriBertConfig"),
108
        ("roberta", "RobertaConfig"),
109
110
111
112
        ("roformer", "RoFormerConfig"),
        ("segformer", "SegformerConfig"),
        ("sew", "SEWConfig"),
        ("sew-d", "SEWDConfig"),
113
        ("speech-encoder-decoder", "SpeechEncoderDecoderConfig"),
114
115
        ("speech_to_text", "Speech2TextConfig"),
        ("speech_to_text_2", "Speech2Text2Config"),
Ori Ram's avatar
Ori Ram committed
116
        ("splinter", "SplinterConfig"),
117
118
119
120
        ("squeezebert", "SqueezeBertConfig"),
        ("swin", "SwinConfig"),
        ("t5", "T5Config"),
        ("tapas", "TapasConfig"),
Carl's avatar
Carl committed
121
        ("trajectory_transformer", "TrajectoryTransformerConfig"),
122
123
        ("transfo-xl", "TransfoXLConfig"),
        ("trocr", "TrOCRConfig"),
124
        ("unispeech", "UniSpeechConfig"),
125
126
127
128
129
130
131
132
133
        ("unispeech-sat", "UniSpeechSatConfig"),
        ("van", "VanConfig"),
        ("vilt", "ViltConfig"),
        ("vision-encoder-decoder", "VisionEncoderDecoderConfig"),
        ("vision-text-dual-encoder", "VisionTextDualEncoderConfig"),
        ("visual_bert", "VisualBertConfig"),
        ("vit", "ViTConfig"),
        ("vit_mae", "ViTMAEConfig"),
        ("wav2vec2", "Wav2Vec2Config"),
134
        ("wav2vec2-conformer", "Wav2Vec2ConformerConfig"),
Patrick von Platen's avatar
Patrick von Platen committed
135
        ("wavlm", "WavLMConfig"),
136
137
138
139
140
141
142
143
        ("xglm", "XGLMConfig"),
        ("xlm", "XLMConfig"),
        ("xlm-prophetnet", "XLMProphetNetConfig"),
        ("xlm-roberta", "XLMRobertaConfig"),
        ("xlm-roberta-xl", "XLMRobertaXLConfig"),
        ("xlnet", "XLNetConfig"),
        ("yolos", "YolosConfig"),
        ("yoso", "YosoConfig"),
144
145
    ]
)
146

147
CONFIG_ARCHIVE_MAP_MAPPING_NAMES = OrderedDict(
148
    [
NielsRogge's avatar
NielsRogge committed
149
        # Add archive maps here)
150
151
        ("albert", "ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("bart", "BART_PRETRAINED_CONFIG_ARCHIVE_MAP"),
152
        ("beit", "BEIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
153
154
155
156
157
        ("bert", "BERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("big_bird", "BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("bigbird_pegasus", "BIGBIRD_PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("blenderbot", "BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("blenderbot-small", "BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP"),
Younes Belkada's avatar
Younes Belkada committed
158
        ("bloom", "BLOOM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
159
        ("camembert", "CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
160
161
162
        ("canine", "CANINE_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("clip", "CLIP_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("convbert", "CONVBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
163
        ("convnext", "CONVNEXT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
164
        ("ctrl", "CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP"),
NielsRogge's avatar
NielsRogge committed
165
        ("cvt", "CVT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
166
        ("data2vec-audio", "DATA2VEC_AUDIO_PRETRAINED_CONFIG_ARCHIVE_MAP"),
167
168
169
170
171
172
        ("data2vec-text", "DATA2VEC_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("data2vec-vision", "DATA2VEC_VISION_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("deberta", "DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("deberta-v2", "DEBERTA_V2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("deit", "DEIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("detr", "DETR_PRETRAINED_CONFIG_ARCHIVE_MAP"),
173
        ("distilbert", "DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
174
175
176
        ("dpr", "DPR_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("dpt", "DPT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("electra", "ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
177
        ("flaubert", "FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
178
179
        ("flava", "FLAVA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("fnet", "FNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
180
181
        ("fsmt", "FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("funnel", "FUNNEL_PRETRAINED_CONFIG_ARCHIVE_MAP"),
182
183
184
        ("glpn", "GLPN_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("gpt2", "GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("gpt_neo", "GPT_NEO_PRETRAINED_CONFIG_ARCHIVE_MAP"),
185
        ("gpt_neox", "GPT_NEOX_PRETRAINED_CONFIG_ARCHIVE_MAP"),
186
187
188
189
        ("gptj", "GPTJ_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("hubert", "HUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("ibert", "IBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("imagegpt", "IMAGEGPT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
190
        ("layoutlm", "LAYOUTLM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
191
        ("layoutlmv2", "LAYOUTLMV2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
NielsRogge's avatar
NielsRogge committed
192
        ("layoutlmv3", "LAYOUTLMV3_PRETRAINED_CONFIG_ARCHIVE_MAP"),
193
        ("led", "LED_PRETRAINED_CONFIG_ARCHIVE_MAP"),
194
        ("levit", "LEVIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
195
196
197
198
199
200
        ("longformer", "LONGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("luke", "LUKE_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("lxmert", "LXMERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("m2m_100", "M2M_100_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("maskformer", "MASKFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("mbart", "MBART_PRETRAINED_CONFIG_ARCHIVE_MAP"),
Chan Woo Kim's avatar
Chan Woo Kim committed
201
        ("mctct", "MCTCT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
202
        ("megatron-bert", "MEGATRON_BERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
203
        ("mpnet", "MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
        ("nystromformer", "NYSTROMFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("openai-gpt", "OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("opt", "OPT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("pegasus", "PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("perceiver", "PERCEIVER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("plbart", "PLBART_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("poolformer", "POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("prophetnet", "PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("qdqbert", "QDQBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("realm", "REALM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("regnet", "REGNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("rembert", "REMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("resnet", "RESNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("retribert", "RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("roberta", "ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("roformer", "ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("segformer", "SEGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
221
        ("sew", "SEW_PRETRAINED_CONFIG_ARCHIVE_MAP"),
222
223
224
225
226
227
228
229
230
        ("sew-d", "SEW_D_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("speech_to_text", "SPEECH_TO_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("speech_to_text_2", "SPEECH_TO_TEXT_2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("splinter", "SPLINTER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("squeezebert", "SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("swin", "SWIN_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("t5", "T5_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("tapas", "TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("transfo-xl", "TRANSFO_XL_PRETRAINED_CONFIG_ARCHIVE_MAP"),
231
        ("unispeech", "UNISPEECH_PRETRAINED_CONFIG_ARCHIVE_MAP"),
232
233
234
235
236
237
238
        ("unispeech-sat", "UNISPEECH_SAT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("van", "VAN_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("vilt", "VILT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("visual_bert", "VISUAL_BERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("vit", "VIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("vit_mae", "VIT_MAE_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("wav2vec2", "WAV_2_VEC_2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
239
        ("wav2vec2-conformer", "WAV2VEC2_CONFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
240
241
242
243
244
245
246
        ("xglm", "XGLM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("xlm", "XLM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("xlm-prophetnet", "XLM_PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("xlm-roberta", "XLM_ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("xlnet", "XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("yolos", "YOLOS_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("yoso", "YOSO_PRETRAINED_CONFIG_ARCHIVE_MAP"),
247
248
249
250
251
    ]
)

MODEL_NAMES_MAPPING = OrderedDict(
    [
252
        # Add full (and cased) model names here
253
254
255
256
        ("albert", "ALBERT"),
        ("bart", "BART"),
        ("barthez", "BARThez"),
        ("bartpho", "BARTpho"),
NielsRogge's avatar
NielsRogge committed
257
        ("beit", "BEiT"),
258
259
260
        ("bert", "BERT"),
        ("bert-generation", "Bert Generation"),
        ("bert-japanese", "BertJapanese"),
261
        ("bertweet", "BERTweet"),
262
        ("big_bird", "BigBird"),
263
        ("bigbird_pegasus", "BigBird-Pegasus"),
264
265
        ("blenderbot", "Blenderbot"),
        ("blenderbot-small", "BlenderbotSmall"),
Younes Belkada's avatar
Younes Belkada committed
266
        ("bloom", "BLOOM"),
267
268
269
        ("bort", "BORT"),
        ("byt5", "ByT5"),
        ("camembert", "CamemBERT"),
270
        ("canine", "CANINE"),
Suraj Patil's avatar
Suraj Patil committed
271
        ("clip", "CLIP"),
272
        ("convbert", "ConvBERT"),
273
        ("convnext", "ConvNeXT"),
274
275
        ("cpm", "CPM"),
        ("ctrl", "CTRL"),
NielsRogge's avatar
NielsRogge committed
276
        ("cvt", "CvT"),
277
278
279
280
281
282
        ("data2vec-audio", "Data2VecAudio"),
        ("data2vec-text", "Data2VecText"),
        ("data2vec-vision", "Data2VecVision"),
        ("deberta", "DeBERTa"),
        ("deberta-v2", "DeBERTa-v2"),
        ("decision_transformer", "Decision Transformer"),
NielsRogge's avatar
NielsRogge committed
283
        ("deit", "DeiT"),
NielsRogge's avatar
NielsRogge committed
284
        ("detr", "DETR"),
285
286
287
288
289
290
291
292
        ("dialogpt", "DialoGPT"),
        ("distilbert", "DistilBERT"),
        ("dit", "DiT"),
        ("dpr", "DPR"),
        ("dpt", "DPT"),
        ("electra", "ELECTRA"),
        ("encoder-decoder", "Encoder decoder"),
        ("flaubert", "FlauBERT"),
293
        ("flava", "FLAVA"),
294
295
296
297
298
        ("fnet", "FNet"),
        ("fsmt", "FairSeq Machine-Translation"),
        ("funnel", "Funnel Transformer"),
        ("glpn", "GLPN"),
        ("gpt2", "OpenAI GPT-2"),
Suraj Patil's avatar
Suraj Patil committed
299
        ("gpt_neo", "GPT Neo"),
300
        ("gpt_neox", "GPT NeoX"),
301
302
303
        ("gptj", "GPT-J"),
        ("herbert", "HerBERT"),
        ("hubert", "Hubert"),
Sehoon Kim's avatar
Sehoon Kim committed
304
        ("ibert", "I-BERT"),
305
306
307
        ("imagegpt", "ImageGPT"),
        ("layoutlm", "LayoutLM"),
        ("layoutlmv2", "LayoutLMv2"),
NielsRogge's avatar
NielsRogge committed
308
        ("layoutlmv3", "LayoutLMv3"),
309
310
        ("layoutxlm", "LayoutXLM"),
        ("led", "LED"),
311
        ("levit", "LeViT"),
312
313
314
315
        ("longformer", "Longformer"),
        ("luke", "LUKE"),
        ("lxmert", "LXMERT"),
        ("m2m_100", "M2M100"),
316
        ("marian", "Marian"),
317
        ("maskformer", "MaskFormer"),
318
        ("mbart", "mBART"),
319
        ("mbart50", "mBART-50"),
Chan Woo Kim's avatar
Chan Woo Kim committed
320
        ("mctct", "M-CTC-T"),
321
322
        ("megatron-bert", "Megatron-BERT"),
        ("megatron_gpt2", "Megatron-GPT2"),
323
324
325
        ("mluke", "mLUKE"),
        ("mobilebert", "MobileBERT"),
        ("mpnet", "MPNet"),
326
327
        ("mt5", "MT5"),
        ("nystromformer", "Nystr枚mformer"),
328
        ("openai-gpt", "OpenAI GPT"),
Younes Belkada's avatar
Younes Belkada committed
329
        ("opt", "OPT"),
330
331
332
333
334
335
336
337
        ("pegasus", "Pegasus"),
        ("perceiver", "Perceiver"),
        ("phobert", "PhoBERT"),
        ("plbart", "PLBart"),
        ("poolformer", "PoolFormer"),
        ("prophetnet", "ProphetNet"),
        ("qdqbert", "QDQBert"),
        ("rag", "RAG"),
338
        ("realm", "REALM"),
339
        ("reformer", "Reformer"),
340
341
342
343
        ("regnet", "RegNet"),
        ("rembert", "RemBERT"),
        ("resnet", "ResNet"),
        ("retribert", "RetriBERT"),
344
        ("roberta", "RoBERTa"),
345
346
347
348
        ("roformer", "RoFormer"),
        ("segformer", "SegFormer"),
        ("sew", "SEW"),
        ("sew-d", "SEW-D"),
349
        ("speech-encoder-decoder", "Speech Encoder decoder"),
350
351
        ("speech_to_text", "Speech2Text"),
        ("speech_to_text_2", "Speech2Text2"),
Ori Ram's avatar
Ori Ram committed
352
        ("splinter", "Splinter"),
353
        ("squeezebert", "SqueezeBERT"),
354
        ("swin", "Swin Transformer"),
355
356
357
358
        ("t5", "T5"),
        ("t5v1.1", "T5v1.1"),
        ("tapas", "TAPAS"),
        ("tapex", "TAPEX"),
Carl's avatar
Carl committed
359
        ("trajectory_transformer", "Trajectory Transformer"),
360
361
        ("transfo-xl", "Transformer-XL"),
        ("trocr", "TrOCR"),
362
        ("unispeech", "UniSpeech"),
363
364
365
366
367
        ("unispeech-sat", "UniSpeechSat"),
        ("van", "VAN"),
        ("vilt", "ViLT"),
        ("vision-encoder-decoder", "Vision Encoder decoder"),
        ("vision-text-dual-encoder", "VisionTextDualEncoder"),
368
        ("visual_bert", "VisualBERT"),
369
370
371
        ("vit", "ViT"),
        ("vit_mae", "ViTMAE"),
        ("wav2vec2", "Wav2Vec2"),
372
        ("wav2vec2-conformer", "Wav2Vec2-Conformer"),
373
        ("wav2vec2_phoneme", "Wav2Vec2Phoneme"),
Patrick von Platen's avatar
Patrick von Platen committed
374
        ("wavlm", "WavLM"),
375
376
        ("xglm", "XGLM"),
        ("xlm", "XLM"),
377
        ("xlm-prophetnet", "XLM-ProphetNet"),
378
379
380
        ("xlm-roberta", "XLM-RoBERTa"),
        ("xlm-roberta-xl", "XLM-RoBERTa-XL"),
        ("xlnet", "XLNet"),
381
382
        ("xls_r", "XLS-R"),
        ("xlsr_wav2vec2", "XLSR-Wav2Vec2"),
383
384
        ("yolos", "YOLOS"),
        ("yoso", "YOSO"),
385
386
387
    ]
)

388
SPECIAL_MODEL_TYPE_TO_MODULE_NAME = OrderedDict(
389
390
391
392
393
394
    [
        ("openai-gpt", "openai"),
        ("data2vec-audio", "data2vec"),
        ("data2vec-text", "data2vec"),
        ("data2vec-vision", "data2vec"),
    ]
395
)
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421


def model_type_to_module_name(key):
    """Converts a config key to the corresponding module."""
    # Special treatment
    if key in SPECIAL_MODEL_TYPE_TO_MODULE_NAME:
        return SPECIAL_MODEL_TYPE_TO_MODULE_NAME[key]

    return key.replace("-", "_")


def config_class_to_model_type(config):
    """Converts a config class name to the corresponding model type"""
    for key, cls in CONFIG_MAPPING_NAMES.items():
        if cls == config:
            return key
    return None


class _LazyConfigMapping(OrderedDict):
    """
    A dictionary that lazily load its values when they are requested.
    """

    def __init__(self, mapping):
        self._mapping = mapping
422
        self._extra_content = {}
423
424
425
        self._modules = {}

    def __getitem__(self, key):
426
427
        if key in self._extra_content:
            return self._extra_content[key]
428
429
430
431
432
433
        if key not in self._mapping:
            raise KeyError(key)
        value = self._mapping[key]
        module_name = model_type_to_module_name(key)
        if module_name not in self._modules:
            self._modules[module_name] = importlib.import_module(f".{module_name}", "transformers.models")
434
435
436
437
438
439
440
        if hasattr(self._modules[module_name], value):
            return getattr(self._modules[module_name], value)

        # Some of the mappings have entries model_type -> config of another model type. In that case we try to grab the
        # object at the top level.
        transformers_module = importlib.import_module("transformers")
        return getattr(transformers_module, value)
441
442

    def keys(self):
443
        return list(self._mapping.keys()) + list(self._extra_content.keys())
444
445

    def values(self):
446
        return [self[k] for k in self._mapping.keys()] + list(self._extra_content.values())
447

448
    def items(self):
449
        return [(k, self[k]) for k in self._mapping.keys()] + list(self._extra_content.items())
450
451

    def __iter__(self):
452
        return iter(list(self._mapping.keys()) + list(self._extra_content.keys()))
453
454

    def __contains__(self, item):
455
456
457
458
459
460
461
462
463
        return item in self._mapping or item in self._extra_content

    def register(self, key, value):
        """
        Register a new configuration in this mapping.
        """
        if key in self._mapping.keys():
            raise ValueError(f"'{key}' is already used by a Transformers config, pick another name.")
        self._extra_content[key] = value
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528


CONFIG_MAPPING = _LazyConfigMapping(CONFIG_MAPPING_NAMES)


class _LazyLoadAllMappings(OrderedDict):
    """
    A mapping that will load all pairs of key values at the first access (either by indexing, requestions keys, values,
    etc.)

    Args:
        mapping: The mapping to load.
    """

    def __init__(self, mapping):
        self._mapping = mapping
        self._initialized = False
        self._data = {}

    def _initialize(self):
        if self._initialized:
            return
        warnings.warn(
            "ALL_PRETRAINED_CONFIG_ARCHIVE_MAP is deprecated and will be removed in v5 of Transformers. "
            "It does not contain all available model checkpoints, far from it. Checkout hf.co/models for that.",
            FutureWarning,
        )

        for model_type, map_name in self._mapping.items():
            module_name = model_type_to_module_name(model_type)
            module = importlib.import_module(f".{module_name}", "transformers.models")
            mapping = getattr(module, map_name)
            self._data.update(mapping)

        self._initialized = True

    def __getitem__(self, key):
        self._initialize()
        return self._data[key]

    def keys(self):
        self._initialize()
        return self._data.keys()

    def values(self):
        self._initialize()
        return self._data.values()

    def items(self):
        self._initialize()
        return self._data.keys()

    def __iter__(self):
        self._initialize()
        return iter(self._data)

    def __contains__(self, item):
        self._initialize()
        return item in self._data


ALL_PRETRAINED_CONFIG_ARCHIVE_MAP = _LazyLoadAllMappings(CONFIG_ARCHIVE_MAP_MAPPING_NAMES)


def _get_class_name(model_class: Union[str, List[str]]):
529
    if isinstance(model_class, (list, tuple)):
Stas Bekman's avatar
Stas Bekman committed
530
531
        return " or ".join([f"[`{c}`]" for c in model_class if c is not None])
    return f"[`{model_class}`]"
532
533


534
535
536
537
538
def _list_model_options(indent, config_to_class=None, use_model_types=True):
    if config_to_class is None and not use_model_types:
        raise ValueError("Using `use_model_types=False` requires a `config_to_class` dictionary.")
    if use_model_types:
        if config_to_class is None:
Stas Bekman's avatar
Stas Bekman committed
539
            model_type_to_name = {model_type: f"[`{config}`]" for model_type, config in CONFIG_MAPPING_NAMES.items()}
540
541
        else:
            model_type_to_name = {
542
543
544
                model_type: _get_class_name(model_class)
                for model_type, model_class in config_to_class.items()
                if model_type in MODEL_NAMES_MAPPING
545
546
            }
        lines = [
547
            f"{indent}- **{model_type}** -- {model_type_to_name[model_type]} ({MODEL_NAMES_MAPPING[model_type]} model)"
548
            for model_type in sorted(model_type_to_name.keys())
549
550
        ]
    else:
551
552
553
554
555
        config_to_name = {
            CONFIG_MAPPING_NAMES[config]: _get_class_name(clas)
            for config, clas in config_to_class.items()
            if config in CONFIG_MAPPING_NAMES
        }
556
        config_to_model_name = {
557
            config: MODEL_NAMES_MAPPING[model_type] for model_type, config in CONFIG_MAPPING_NAMES.items()
558
559
        }
        lines = [
Sylvain Gugger's avatar
Sylvain Gugger committed
560
561
            f"{indent}- [`{config_name}`] configuration class:"
            f" {config_to_name[config_name]} ({config_to_model_name[config_name]} model)"
562
            for config_name in sorted(config_to_name.keys())
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
        ]
    return "\n".join(lines)


def replace_list_option_in_docstrings(config_to_class=None, use_model_types=True):
    def docstring_decorator(fn):
        docstrings = fn.__doc__
        lines = docstrings.split("\n")
        i = 0
        while i < len(lines) and re.search(r"^(\s*)List options\s*$", lines[i]) is None:
            i += 1
        if i < len(lines):
            indent = re.search(r"^(\s*)List options\s*$", lines[i]).groups()[0]
            if use_model_types:
                indent = f"{indent}    "
            lines[i] = _list_model_options(indent, config_to_class=config_to_class, use_model_types=use_model_types)
            docstrings = "\n".join(lines)
        else:
            raise ValueError(
Sylvain Gugger's avatar
Sylvain Gugger committed
582
583
                f"The function {fn} should have an empty 'List options' in its docstring as placeholder, current"
                f" docstring is:\n{docstrings}"
584
585
586
587
588
589
590
            )
        fn.__doc__ = docstrings
        return fn

    return docstring_decorator


Julien Chaumond's avatar
Julien Chaumond committed
591
class AutoConfig:
Lysandre Debut's avatar
Lysandre Debut committed
592
    r"""
593
    This is a generic configuration class that will be instantiated as one of the configuration classes of the library
594
    when created with the [`~AutoConfig.from_pretrained`] class method.
595

596
    This class cannot be instantiated directly using `__init__()` (throws an error).
597
    """
598

599
    def __init__(self):
600
601
602
603
        raise EnvironmentError(
            "AutoConfig is designed to be instantiated "
            "using the `AutoConfig.from_pretrained(pretrained_model_name_or_path)` method."
        )
604

605
    @classmethod
606
607
608
609
    def for_model(cls, model_type: str, *args, **kwargs):
        if model_type in CONFIG_MAPPING:
            config_class = CONFIG_MAPPING[model_type]
            return config_class(*args, **kwargs)
610
        raise ValueError(
611
            f"Unrecognized model identifier: {model_type}. Should contain one of {', '.join(CONFIG_MAPPING.keys())}"
612
        )
613

614
    @classmethod
615
    @replace_list_option_in_docstrings()
616
    def from_pretrained(cls, pretrained_model_name_or_path, **kwargs):
617
618
        r"""
        Instantiate one of the configuration classes of the library from a pretrained model configuration.
619

Sylvain Gugger's avatar
Sylvain Gugger committed
620
621
        The configuration class to instantiate is selected based on the `model_type` property of the config object that
        is loaded, or when it's missing, by falling back to using pattern matching on `pretrained_model_name_or_path`:
622

623
        List options
Lysandre Debut's avatar
Lysandre Debut committed
624
625

        Args:
626
            pretrained_model_name_or_path (`str` or `os.PathLike`):
627
628
                Can be either:

629
630
631
632
                    - A string, the *model id* of a pretrained model configuration hosted inside a model repo on
                      huggingface.co. Valid model ids can be located at the root-level, like `bert-base-uncased`, or
                      namespaced under a user or organization name, like `dbmdz/bert-base-german-cased`.
                    - A path to a *directory* containing a configuration file saved using the
Sylvain Gugger's avatar
Sylvain Gugger committed
633
634
                      [`~PretrainedConfig.save_pretrained`] method, or the [`~PreTrainedModel.save_pretrained`] method,
                      e.g., `./my_model_directory/`.
635
636
637
                    - A path or url to a saved configuration JSON *file*, e.g.,
                      `./my_model_directory/configuration.json`.
            cache_dir (`str` or `os.PathLike`, *optional*):
638
639
                Path to a directory in which a downloaded pretrained model configuration should be cached if the
                standard cache should not be used.
640
            force_download (`bool`, *optional*, defaults to `False`):
641
642
                Whether or not to force the (re-)download the model weights and configuration files and override the
                cached versions if they exist.
643
            resume_download (`bool`, *optional*, defaults to `False`):
644
645
                Whether or not to delete incompletely received files. Will attempt to resume the download if such a
                file exists.
646
            proxies (`Dict[str, str]`, *optional*):
Sylvain Gugger's avatar
Sylvain Gugger committed
647
648
                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.
649
            revision (`str`, *optional*, defaults to `"main"`):
Julien Chaumond's avatar
Julien Chaumond committed
650
                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
651
                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
Julien Chaumond's avatar
Julien Chaumond committed
652
                identifier allowed by git.
653
654
            return_unused_kwargs (`bool`, *optional*, defaults to `False`):
                If `False`, then this function returns just the final configuration object.
655

Sylvain Gugger's avatar
Sylvain Gugger committed
656
657
658
                If `True`, then this functions returns a `Tuple(config, unused_kwargs)` where *unused_kwargs* is a
                dictionary consisting of the key/value pairs whose keys are not configuration attributes: i.e., the
                part of `kwargs` which has not been used to update `config` and is otherwise ignored.
659
            trust_remote_code (`bool`, *optional*, defaults to `False`):
660
                Whether or not to allow for custom models defined on the Hub in their own modeling files. This option
Sylvain Gugger's avatar
Sylvain Gugger committed
661
662
                should only be set to `True` for repositories you trust and in which you have read the code, as it will
                execute code present on the Hub on your local machine.
663
            kwargs(additional keyword arguments, *optional*):
664
                The values in kwargs of any keys which are configuration attributes will be used to override the loaded
Sylvain Gugger's avatar
Sylvain Gugger committed
665
                values. Behavior concerning key/value pairs whose keys are *not* configuration attributes is controlled
666
                by the `return_unused_kwargs` keyword parameter.
Lysandre Debut's avatar
Lysandre Debut committed
667

668
        Examples:
669

670
671
        ```python
        >>> from transformers import AutoConfig
672

673
        >>> # Download configuration from huggingface.co and cache.
Sylvain Gugger's avatar
Sylvain Gugger committed
674
        >>> config = AutoConfig.from_pretrained("bert-base-uncased")
Lysandre Debut's avatar
Lysandre Debut committed
675

676
        >>> # Download configuration from huggingface.co (user-uploaded) and cache.
Sylvain Gugger's avatar
Sylvain Gugger committed
677
        >>> config = AutoConfig.from_pretrained("dbmdz/bert-base-german-cased")
Lysandre Debut's avatar
Lysandre Debut committed
678

679
        >>> # If configuration file is in a directory (e.g., was saved using *save_pretrained('./test/saved_model/')*).
Sylvain Gugger's avatar
Sylvain Gugger committed
680
        >>> config = AutoConfig.from_pretrained("./test/bert_saved_model/")
681

682
        >>> # Load a specific configuration file.
Sylvain Gugger's avatar
Sylvain Gugger committed
683
        >>> config = AutoConfig.from_pretrained("./test/bert_saved_model/my_configuration.json")
684

685
        >>> # Change some config attributes when loading a pretrained config.
Sylvain Gugger's avatar
Sylvain Gugger committed
686
        >>> config = AutoConfig.from_pretrained("bert-base-uncased", output_attentions=True, foo=False)
687
688
        >>> config.output_attentions
        True
Sylvain Gugger's avatar
Sylvain Gugger committed
689
690
691
692

        >>> config, unused_kwargs = AutoConfig.from_pretrained(
        ...     "bert-base-uncased", output_attentions=True, foo=False, return_unused_kwargs=True
        ... )
693
694
        >>> config.output_attentions
        True
Sylvain Gugger's avatar
Sylvain Gugger committed
695

696
        >>> unused_kwargs
697
698
        {'foo': False}
        ```"""
699
        kwargs["_from_auto"] = True
700
701
        kwargs["name_or_path"] = pretrained_model_name_or_path
        trust_remote_code = kwargs.pop("trust_remote_code", False)
702
        config_dict, _ = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs)
703
704
705
        if "auto_map" in config_dict and "AutoConfig" in config_dict["auto_map"]:
            if not trust_remote_code:
                raise ValueError(
Sylvain Gugger's avatar
Sylvain Gugger committed
706
707
708
                    f"Loading {pretrained_model_name_or_path} requires you to execute the configuration file in that"
                    " repo on your local machine. Make sure you have read the code there to avoid malicious use, then"
                    " set the option `trust_remote_code=True` to remove this error."
709
710
                )
            if kwargs.get("revision", None) is None:
711
                logger.warning(
712
713
714
715
716
717
718
719
720
721
                    "Explicitly passing a `revision` is encouraged when loading a configuration with custom code to "
                    "ensure no malicious code has been contributed in a newer revision."
                )
            class_ref = config_dict["auto_map"]["AutoConfig"]
            module_file, class_name = class_ref.split(".")
            config_class = get_class_from_dynamic_module(
                pretrained_model_name_or_path, module_file + ".py", class_name, **kwargs
            )
            return config_class.from_pretrained(pretrained_model_name_or_path, **kwargs)
        elif "model_type" in config_dict:
722
723
724
725
            config_class = CONFIG_MAPPING[config_dict["model_type"]]
            return config_class.from_dict(config_dict, **kwargs)
        else:
            # Fallback: use pattern matching on the string.
726
727
            # We go from longer names to shorter names to catch roberta before bert (for instance)
            for pattern in sorted(CONFIG_MAPPING.keys(), key=len, reverse=True):
728
                if pattern in str(pretrained_model_name_or_path):
729
                    return CONFIG_MAPPING[pattern].from_dict(config_dict, **kwargs)
730

731
        raise ValueError(
732
            f"Unrecognized model in {pretrained_model_name_or_path}. "
733
            f"Should have a `model_type` key in its {CONFIG_NAME}, or contain one of the following strings "
734
            f"in its name: {', '.join(CONFIG_MAPPING.keys())}"
735
        )
736
737
738
739
740
741
742

    @staticmethod
    def register(model_type, config):
        """
        Register a new configuration for this class.

        Args:
743
744
            model_type (`str`): The model type like "bert" or "gpt".
            config ([`PretrainedConfig`]): The config to register.
745
746
747
748
749
750
751
752
        """
        if issubclass(config, PretrainedConfig) and config.model_type != model_type:
            raise ValueError(
                "The config you are passing has a `model_type` attribute that is not consistent with the model type "
                f"you passed (config has {config.model_type} and you passed {model_type}. Fix one of those so they "
                "match!"
            )
        CONFIG_MAPPING.register(model_type, config)