"vscode:/vscode.git/clone" did not exist on "6950f70b38e76657c0d640fa7061394a7af198b0"
configuration_auto.py 33 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
# coding=utf-8
# Copyright 2018 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Sylvain Gugger's avatar
Sylvain Gugger committed
15
""" Auto Config class."""
16
import importlib
17
import re
18
import warnings
19
from collections import OrderedDict
20
from typing import List, Union
21

Sylvain Gugger's avatar
Sylvain Gugger committed
22
from ...configuration_utils import PretrainedConfig
23
from ...dynamic_module_utils import get_class_from_dynamic_module
24
from ...utils import CONFIG_NAME, logging
Aymeric Augustin's avatar
Aymeric Augustin committed
25

26

27
28
logger = logging.get_logger(__name__)

29
30
31
CONFIG_MAPPING_NAMES = OrderedDict(
    [
        # Add configs here
32
33
        ("albert", "AlbertConfig"),
        ("bart", "BartConfig"),
34
        ("beit", "BeitConfig"),
35
36
37
38
39
40
        ("bert", "BertConfig"),
        ("bert-generation", "BertGenerationConfig"),
        ("big_bird", "BigBirdConfig"),
        ("bigbird_pegasus", "BigBirdPegasusConfig"),
        ("blenderbot", "BlenderbotConfig"),
        ("blenderbot-small", "BlenderbotSmallConfig"),
Younes Belkada's avatar
Younes Belkada committed
41
        ("bloom", "BloomConfig"),
42
        ("camembert", "CamembertConfig"),
43
44
        ("canine", "CanineConfig"),
        ("clip", "CLIPConfig"),
rooa's avatar
rooa committed
45
        ("codegen", "CodeGenConfig"),
46
47
48
        ("convbert", "ConvBertConfig"),
        ("convnext", "ConvNextConfig"),
        ("ctrl", "CTRLConfig"),
NielsRogge's avatar
NielsRogge committed
49
        ("cvt", "CvtConfig"),
50
51
52
53
54
55
        ("data2vec-audio", "Data2VecAudioConfig"),
        ("data2vec-text", "Data2VecTextConfig"),
        ("data2vec-vision", "Data2VecVisionConfig"),
        ("deberta", "DebertaConfig"),
        ("deberta-v2", "DebertaV2Config"),
        ("decision_transformer", "DecisionTransformerConfig"),
56
57
        ("deit", "DeiTConfig"),
        ("detr", "DetrConfig"),
58
        ("distilbert", "DistilBertConfig"),
NielsRogge's avatar
NielsRogge committed
59
        ("donut-swin", "DonutSwinConfig"),
60
61
62
63
64
65
66
67
68
69
70
        ("dpr", "DPRConfig"),
        ("dpt", "DPTConfig"),
        ("electra", "ElectraConfig"),
        ("encoder-decoder", "EncoderDecoderConfig"),
        ("flaubert", "FlaubertConfig"),
        ("flava", "FlavaConfig"),
        ("fnet", "FNetConfig"),
        ("fsmt", "FSMTConfig"),
        ("funnel", "FunnelConfig"),
        ("glpn", "GLPNConfig"),
        ("gpt2", "GPT2Config"),
71
        ("gpt_neo", "GPTNeoConfig"),
72
        ("gpt_neox", "GPTNeoXConfig"),
73
        ("gptj", "GPTJConfig"),
74
        ("groupvit", "GroupViTConfig"),
75
        ("hubert", "HubertConfig"),
76
        ("ibert", "IBertConfig"),
77
78
79
        ("imagegpt", "ImageGPTConfig"),
        ("layoutlm", "LayoutLMConfig"),
        ("layoutlmv2", "LayoutLMv2Config"),
NielsRogge's avatar
NielsRogge committed
80
        ("layoutlmv3", "LayoutLMv3Config"),
81
        ("led", "LEDConfig"),
82
        ("levit", "LevitConfig"),
83
        ("longformer", "LongformerConfig"),
Daniel Stancl's avatar
Daniel Stancl committed
84
        ("longt5", "LongT5Config"),
85
86
87
        ("luke", "LukeConfig"),
        ("lxmert", "LxmertConfig"),
        ("m2m_100", "M2M100Config"),
88
        ("marian", "MarianConfig"),
89
        ("maskformer", "MaskFormerConfig"),
90
        ("mbart", "MBartConfig"),
Chan Woo Kim's avatar
Chan Woo Kim committed
91
        ("mctct", "MCTCTConfig"),
92
        ("megatron-bert", "MegatronBertConfig"),
93
        ("mobilebert", "MobileBertConfig"),
94
        ("mobilevit", "MobileViTConfig"),
95
        ("mpnet", "MPNetConfig"),
96
        ("mt5", "MT5Config"),
StevenTang1998's avatar
StevenTang1998 committed
97
        ("mvp", "MvpConfig"),
98
        ("nezha", "NezhaConfig"),
99
100
        ("nystromformer", "NystromformerConfig"),
        ("openai-gpt", "OpenAIGPTConfig"),
Younes Belkada's avatar
Younes Belkada committed
101
        ("opt", "OPTConfig"),
102
        ("owlvit", "OwlViTConfig"),
103
104
105
106
107
108
109
110
        ("pegasus", "PegasusConfig"),
        ("perceiver", "PerceiverConfig"),
        ("plbart", "PLBartConfig"),
        ("poolformer", "PoolFormerConfig"),
        ("prophetnet", "ProphetNetConfig"),
        ("qdqbert", "QDQBertConfig"),
        ("rag", "RagConfig"),
        ("realm", "RealmConfig"),
111
        ("reformer", "ReformerConfig"),
112
113
114
115
        ("regnet", "RegNetConfig"),
        ("rembert", "RemBertConfig"),
        ("resnet", "ResNetConfig"),
        ("retribert", "RetriBertConfig"),
116
        ("roberta", "RobertaConfig"),
117
118
119
120
        ("roformer", "RoFormerConfig"),
        ("segformer", "SegformerConfig"),
        ("sew", "SEWConfig"),
        ("sew-d", "SEWDConfig"),
121
        ("speech-encoder-decoder", "SpeechEncoderDecoderConfig"),
122
123
        ("speech_to_text", "Speech2TextConfig"),
        ("speech_to_text_2", "Speech2Text2Config"),
Ori Ram's avatar
Ori Ram committed
124
        ("splinter", "SplinterConfig"),
125
126
        ("squeezebert", "SqueezeBertConfig"),
        ("swin", "SwinConfig"),
127
        ("swinv2", "Swinv2Config"),
128
129
        ("t5", "T5Config"),
        ("tapas", "TapasConfig"),
Carl's avatar
Carl committed
130
        ("trajectory_transformer", "TrajectoryTransformerConfig"),
131
132
        ("transfo-xl", "TransfoXLConfig"),
        ("trocr", "TrOCRConfig"),
133
        ("unispeech", "UniSpeechConfig"),
134
135
        ("unispeech-sat", "UniSpeechSatConfig"),
        ("van", "VanConfig"),
NielsRogge's avatar
NielsRogge committed
136
        ("videomae", "VideoMAEConfig"),
137
138
139
140
141
142
143
        ("vilt", "ViltConfig"),
        ("vision-encoder-decoder", "VisionEncoderDecoderConfig"),
        ("vision-text-dual-encoder", "VisionTextDualEncoderConfig"),
        ("visual_bert", "VisualBertConfig"),
        ("vit", "ViTConfig"),
        ("vit_mae", "ViTMAEConfig"),
        ("wav2vec2", "Wav2Vec2Config"),
144
        ("wav2vec2-conformer", "Wav2Vec2ConformerConfig"),
Patrick von Platen's avatar
Patrick von Platen committed
145
        ("wavlm", "WavLMConfig"),
146
147
148
149
150
151
152
153
        ("xglm", "XGLMConfig"),
        ("xlm", "XLMConfig"),
        ("xlm-prophetnet", "XLMProphetNetConfig"),
        ("xlm-roberta", "XLMRobertaConfig"),
        ("xlm-roberta-xl", "XLMRobertaXLConfig"),
        ("xlnet", "XLNetConfig"),
        ("yolos", "YolosConfig"),
        ("yoso", "YosoConfig"),
154
155
    ]
)
156

157
CONFIG_ARCHIVE_MAP_MAPPING_NAMES = OrderedDict(
158
    [
NielsRogge's avatar
NielsRogge committed
159
        # Add archive maps here)
160
161
        ("albert", "ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("bart", "BART_PRETRAINED_CONFIG_ARCHIVE_MAP"),
162
        ("beit", "BEIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
163
164
165
166
167
        ("bert", "BERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("big_bird", "BIG_BIRD_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("bigbird_pegasus", "BIGBIRD_PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("blenderbot", "BLENDERBOT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("blenderbot-small", "BLENDERBOT_SMALL_PRETRAINED_CONFIG_ARCHIVE_MAP"),
Younes Belkada's avatar
Younes Belkada committed
168
        ("bloom", "BLOOM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
169
        ("camembert", "CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
170
171
        ("canine", "CANINE_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("clip", "CLIP_PRETRAINED_CONFIG_ARCHIVE_MAP"),
rooa's avatar
rooa committed
172
        ("codegen", "CODEGEN_PRETRAINED_CONFIG_ARCHIVE_MAP"),
173
        ("convbert", "CONVBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
174
        ("convnext", "CONVNEXT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
175
        ("ctrl", "CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP"),
NielsRogge's avatar
NielsRogge committed
176
        ("cvt", "CVT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
177
        ("data2vec-audio", "DATA2VEC_AUDIO_PRETRAINED_CONFIG_ARCHIVE_MAP"),
178
179
180
181
182
183
        ("data2vec-text", "DATA2VEC_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("data2vec-vision", "DATA2VEC_VISION_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("deberta", "DEBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("deberta-v2", "DEBERTA_V2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("deit", "DEIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("detr", "DETR_PRETRAINED_CONFIG_ARCHIVE_MAP"),
184
        ("distilbert", "DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
NielsRogge's avatar
NielsRogge committed
185
        ("donut-swin", "DONUT_SWIN_PRETRAINED_CONFIG_ARCHIVE_MAP"),
186
187
188
        ("dpr", "DPR_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("dpt", "DPT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("electra", "ELECTRA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
189
        ("flaubert", "FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
190
191
        ("flava", "FLAVA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("fnet", "FNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
192
193
        ("fsmt", "FSMT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("funnel", "FUNNEL_PRETRAINED_CONFIG_ARCHIVE_MAP"),
194
195
196
        ("glpn", "GLPN_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("gpt2", "GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("gpt_neo", "GPT_NEO_PRETRAINED_CONFIG_ARCHIVE_MAP"),
197
        ("gpt_neox", "GPT_NEOX_PRETRAINED_CONFIG_ARCHIVE_MAP"),
198
        ("gptj", "GPTJ_PRETRAINED_CONFIG_ARCHIVE_MAP"),
199
        ("groupvit", "GROUPVIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
200
201
202
        ("hubert", "HUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("ibert", "IBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("imagegpt", "IMAGEGPT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
203
        ("layoutlm", "LAYOUTLM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
204
        ("layoutlmv2", "LAYOUTLMV2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
NielsRogge's avatar
NielsRogge committed
205
        ("layoutlmv3", "LAYOUTLMV3_PRETRAINED_CONFIG_ARCHIVE_MAP"),
206
        ("led", "LED_PRETRAINED_CONFIG_ARCHIVE_MAP"),
207
        ("levit", "LEVIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
208
        ("longformer", "LONGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
Daniel Stancl's avatar
Daniel Stancl committed
209
        ("longt5", "LONGT5_PRETRAINED_CONFIG_ARCHIVE_MAP"),
210
211
212
213
214
        ("luke", "LUKE_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("lxmert", "LXMERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("m2m_100", "M2M_100_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("maskformer", "MASKFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("mbart", "MBART_PRETRAINED_CONFIG_ARCHIVE_MAP"),
Chan Woo Kim's avatar
Chan Woo Kim committed
215
        ("mctct", "MCTCT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
216
        ("megatron-bert", "MEGATRON_BERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
217
        ("mobilevit", "MOBILEVIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
218
        ("mpnet", "MPNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
StevenTang1998's avatar
StevenTang1998 committed
219
        ("mvp", "MVP_PRETRAINED_CONFIG_ARCHIVE_MAP"),
220
        ("nezha", "NEZHA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
221
222
223
        ("nystromformer", "NYSTROMFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("openai-gpt", "OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("opt", "OPT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
224
        ("owlvit", "OWLVIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
225
226
227
228
229
230
231
232
233
234
235
236
237
238
        ("pegasus", "PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("perceiver", "PERCEIVER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("plbart", "PLBART_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("poolformer", "POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("prophetnet", "PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("qdqbert", "QDQBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("realm", "REALM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("regnet", "REGNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("rembert", "REMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("resnet", "RESNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("retribert", "RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("roberta", "ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("roformer", "ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("segformer", "SEGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
239
        ("sew", "SEW_PRETRAINED_CONFIG_ARCHIVE_MAP"),
240
241
242
243
244
245
        ("sew-d", "SEW_D_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("speech_to_text", "SPEECH_TO_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("speech_to_text_2", "SPEECH_TO_TEXT_2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("splinter", "SPLINTER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("squeezebert", "SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("swin", "SWIN_PRETRAINED_CONFIG_ARCHIVE_MAP"),
246
        ("swinv2", "SWINV2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
247
248
249
        ("t5", "T5_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("tapas", "TAPAS_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("transfo-xl", "TRANSFO_XL_PRETRAINED_CONFIG_ARCHIVE_MAP"),
250
        ("unispeech", "UNISPEECH_PRETRAINED_CONFIG_ARCHIVE_MAP"),
251
252
        ("unispeech-sat", "UNISPEECH_SAT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("van", "VAN_PRETRAINED_CONFIG_ARCHIVE_MAP"),
NielsRogge's avatar
NielsRogge committed
253
        ("videomae", "VIDEOMAE_PRETRAINED_CONFIG_ARCHIVE_MAP"),
254
255
256
257
258
        ("vilt", "VILT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("visual_bert", "VISUAL_BERT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("vit", "VIT_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("vit_mae", "VIT_MAE_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("wav2vec2", "WAV_2_VEC_2_PRETRAINED_CONFIG_ARCHIVE_MAP"),
259
        ("wav2vec2-conformer", "WAV2VEC2_CONFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP"),
260
261
262
263
264
265
266
        ("xglm", "XGLM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("xlm", "XLM_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("xlm-prophetnet", "XLM_PROPHETNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("xlm-roberta", "XLM_ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("xlnet", "XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("yolos", "YOLOS_PRETRAINED_CONFIG_ARCHIVE_MAP"),
        ("yoso", "YOSO_PRETRAINED_CONFIG_ARCHIVE_MAP"),
267
268
269
270
271
    ]
)

MODEL_NAMES_MAPPING = OrderedDict(
    [
272
        # Add full (and cased) model names here
273
274
275
276
        ("albert", "ALBERT"),
        ("bart", "BART"),
        ("barthez", "BARThez"),
        ("bartpho", "BARTpho"),
NielsRogge's avatar
NielsRogge committed
277
        ("beit", "BEiT"),
278
279
280
        ("bert", "BERT"),
        ("bert-generation", "Bert Generation"),
        ("bert-japanese", "BertJapanese"),
281
        ("bertweet", "BERTweet"),
282
        ("big_bird", "BigBird"),
283
        ("bigbird_pegasus", "BigBird-Pegasus"),
284
285
        ("blenderbot", "Blenderbot"),
        ("blenderbot-small", "BlenderbotSmall"),
Younes Belkada's avatar
Younes Belkada committed
286
        ("bloom", "BLOOM"),
287
288
289
        ("bort", "BORT"),
        ("byt5", "ByT5"),
        ("camembert", "CamemBERT"),
290
        ("canine", "CANINE"),
Suraj Patil's avatar
Suraj Patil committed
291
        ("clip", "CLIP"),
rooa's avatar
rooa committed
292
        ("codegen", "CodeGen"),
293
        ("convbert", "ConvBERT"),
294
        ("convnext", "ConvNeXT"),
295
296
        ("cpm", "CPM"),
        ("ctrl", "CTRL"),
NielsRogge's avatar
NielsRogge committed
297
        ("cvt", "CvT"),
298
299
300
301
302
303
        ("data2vec-audio", "Data2VecAudio"),
        ("data2vec-text", "Data2VecText"),
        ("data2vec-vision", "Data2VecVision"),
        ("deberta", "DeBERTa"),
        ("deberta-v2", "DeBERTa-v2"),
        ("decision_transformer", "Decision Transformer"),
NielsRogge's avatar
NielsRogge committed
304
        ("deit", "DeiT"),
NielsRogge's avatar
NielsRogge committed
305
        ("detr", "DETR"),
306
307
308
        ("dialogpt", "DialoGPT"),
        ("distilbert", "DistilBERT"),
        ("dit", "DiT"),
NielsRogge's avatar
NielsRogge committed
309
310
        ("donut", "Donut"),
        ("donut-swin", "DonutSwin"),
311
312
313
314
315
        ("dpr", "DPR"),
        ("dpt", "DPT"),
        ("electra", "ELECTRA"),
        ("encoder-decoder", "Encoder decoder"),
        ("flaubert", "FlauBERT"),
316
        ("flava", "FLAVA"),
317
318
319
320
321
        ("fnet", "FNet"),
        ("fsmt", "FairSeq Machine-Translation"),
        ("funnel", "Funnel Transformer"),
        ("glpn", "GLPN"),
        ("gpt2", "OpenAI GPT-2"),
Suraj Patil's avatar
Suraj Patil committed
322
        ("gpt_neo", "GPT Neo"),
323
        ("gpt_neox", "GPT NeoX"),
324
        ("gptj", "GPT-J"),
325
        ("groupvit", "GroupViT"),
326
327
        ("herbert", "HerBERT"),
        ("hubert", "Hubert"),
Sehoon Kim's avatar
Sehoon Kim committed
328
        ("ibert", "I-BERT"),
329
330
331
        ("imagegpt", "ImageGPT"),
        ("layoutlm", "LayoutLM"),
        ("layoutlmv2", "LayoutLMv2"),
NielsRogge's avatar
NielsRogge committed
332
        ("layoutlmv3", "LayoutLMv3"),
333
334
        ("layoutxlm", "LayoutXLM"),
        ("led", "LED"),
335
        ("levit", "LeViT"),
336
        ("longformer", "Longformer"),
Daniel Stancl's avatar
Daniel Stancl committed
337
        ("longt5", "LongT5"),
338
339
340
        ("luke", "LUKE"),
        ("lxmert", "LXMERT"),
        ("m2m_100", "M2M100"),
341
        ("marian", "Marian"),
342
        ("maskformer", "MaskFormer"),
343
        ("mbart", "mBART"),
344
        ("mbart50", "mBART-50"),
Chan Woo Kim's avatar
Chan Woo Kim committed
345
        ("mctct", "M-CTC-T"),
346
347
        ("megatron-bert", "Megatron-BERT"),
        ("megatron_gpt2", "Megatron-GPT2"),
348
349
        ("mluke", "mLUKE"),
        ("mobilebert", "MobileBERT"),
350
        ("mobilevit", "MobileViT"),
351
        ("mpnet", "MPNet"),
352
        ("mt5", "MT5"),
StevenTang1998's avatar
StevenTang1998 committed
353
        ("mvp", "MVP"),
354
        ("nezha", "Nezha"),
Lysandre Debut's avatar
Lysandre Debut committed
355
        ("nllb", "NLLB"),
356
        ("nystromformer", "Nystr枚mformer"),
357
        ("openai-gpt", "OpenAI GPT"),
Younes Belkada's avatar
Younes Belkada committed
358
        ("opt", "OPT"),
359
        ("owlvit", "OWL-ViT"),
360
361
362
363
364
365
366
367
        ("pegasus", "Pegasus"),
        ("perceiver", "Perceiver"),
        ("phobert", "PhoBERT"),
        ("plbart", "PLBart"),
        ("poolformer", "PoolFormer"),
        ("prophetnet", "ProphetNet"),
        ("qdqbert", "QDQBert"),
        ("rag", "RAG"),
368
        ("realm", "REALM"),
369
        ("reformer", "Reformer"),
370
371
372
373
        ("regnet", "RegNet"),
        ("rembert", "RemBERT"),
        ("resnet", "ResNet"),
        ("retribert", "RetriBERT"),
374
        ("roberta", "RoBERTa"),
375
376
377
378
        ("roformer", "RoFormer"),
        ("segformer", "SegFormer"),
        ("sew", "SEW"),
        ("sew-d", "SEW-D"),
379
        ("speech-encoder-decoder", "Speech Encoder decoder"),
380
381
        ("speech_to_text", "Speech2Text"),
        ("speech_to_text_2", "Speech2Text2"),
Ori Ram's avatar
Ori Ram committed
382
        ("splinter", "Splinter"),
383
        ("squeezebert", "SqueezeBERT"),
384
        ("swin", "Swin Transformer"),
385
        ("swinv2", "Swin Transformer V2"),
386
387
388
389
        ("t5", "T5"),
        ("t5v1.1", "T5v1.1"),
        ("tapas", "TAPAS"),
        ("tapex", "TAPEX"),
Carl's avatar
Carl committed
390
        ("trajectory_transformer", "Trajectory Transformer"),
391
392
        ("transfo-xl", "Transformer-XL"),
        ("trocr", "TrOCR"),
393
        ("ul2", "UL2"),
394
        ("unispeech", "UniSpeech"),
395
396
        ("unispeech-sat", "UniSpeechSat"),
        ("van", "VAN"),
NielsRogge's avatar
NielsRogge committed
397
        ("videomae", "VideoMAE"),
398
399
400
        ("vilt", "ViLT"),
        ("vision-encoder-decoder", "Vision Encoder decoder"),
        ("vision-text-dual-encoder", "VisionTextDualEncoder"),
401
        ("visual_bert", "VisualBERT"),
402
403
404
        ("vit", "ViT"),
        ("vit_mae", "ViTMAE"),
        ("wav2vec2", "Wav2Vec2"),
405
        ("wav2vec2-conformer", "Wav2Vec2-Conformer"),
406
        ("wav2vec2_phoneme", "Wav2Vec2Phoneme"),
Patrick von Platen's avatar
Patrick von Platen committed
407
        ("wavlm", "WavLM"),
408
409
        ("xglm", "XGLM"),
        ("xlm", "XLM"),
410
        ("xlm-prophetnet", "XLM-ProphetNet"),
411
412
413
        ("xlm-roberta", "XLM-RoBERTa"),
        ("xlm-roberta-xl", "XLM-RoBERTa-XL"),
        ("xlnet", "XLNet"),
414
415
        ("xls_r", "XLS-R"),
        ("xlsr_wav2vec2", "XLSR-Wav2Vec2"),
416
417
        ("yolos", "YOLOS"),
        ("yoso", "YOSO"),
418
419
420
    ]
)

421
SPECIAL_MODEL_TYPE_TO_MODULE_NAME = OrderedDict(
422
423
424
425
426
    [
        ("openai-gpt", "openai"),
        ("data2vec-audio", "data2vec"),
        ("data2vec-text", "data2vec"),
        ("data2vec-vision", "data2vec"),
NielsRogge's avatar
NielsRogge committed
427
        ("donut-swin", "donut"),
428
    ]
429
)
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455


def model_type_to_module_name(key):
    """Converts a config key to the corresponding module."""
    # Special treatment
    if key in SPECIAL_MODEL_TYPE_TO_MODULE_NAME:
        return SPECIAL_MODEL_TYPE_TO_MODULE_NAME[key]

    return key.replace("-", "_")


def config_class_to_model_type(config):
    """Converts a config class name to the corresponding model type"""
    for key, cls in CONFIG_MAPPING_NAMES.items():
        if cls == config:
            return key
    return None


class _LazyConfigMapping(OrderedDict):
    """
    A dictionary that lazily load its values when they are requested.
    """

    def __init__(self, mapping):
        self._mapping = mapping
456
        self._extra_content = {}
457
458
459
        self._modules = {}

    def __getitem__(self, key):
460
461
        if key in self._extra_content:
            return self._extra_content[key]
462
463
464
465
466
467
        if key not in self._mapping:
            raise KeyError(key)
        value = self._mapping[key]
        module_name = model_type_to_module_name(key)
        if module_name not in self._modules:
            self._modules[module_name] = importlib.import_module(f".{module_name}", "transformers.models")
468
469
470
471
472
473
474
        if hasattr(self._modules[module_name], value):
            return getattr(self._modules[module_name], value)

        # Some of the mappings have entries model_type -> config of another model type. In that case we try to grab the
        # object at the top level.
        transformers_module = importlib.import_module("transformers")
        return getattr(transformers_module, value)
475
476

    def keys(self):
477
        return list(self._mapping.keys()) + list(self._extra_content.keys())
478
479

    def values(self):
480
        return [self[k] for k in self._mapping.keys()] + list(self._extra_content.values())
481

482
    def items(self):
483
        return [(k, self[k]) for k in self._mapping.keys()] + list(self._extra_content.items())
484
485

    def __iter__(self):
486
        return iter(list(self._mapping.keys()) + list(self._extra_content.keys()))
487
488

    def __contains__(self, item):
489
490
491
492
493
494
495
496
497
        return item in self._mapping or item in self._extra_content

    def register(self, key, value):
        """
        Register a new configuration in this mapping.
        """
        if key in self._mapping.keys():
            raise ValueError(f"'{key}' is already used by a Transformers config, pick another name.")
        self._extra_content[key] = value
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562


CONFIG_MAPPING = _LazyConfigMapping(CONFIG_MAPPING_NAMES)


class _LazyLoadAllMappings(OrderedDict):
    """
    A mapping that will load all pairs of key values at the first access (either by indexing, requestions keys, values,
    etc.)

    Args:
        mapping: The mapping to load.
    """

    def __init__(self, mapping):
        self._mapping = mapping
        self._initialized = False
        self._data = {}

    def _initialize(self):
        if self._initialized:
            return
        warnings.warn(
            "ALL_PRETRAINED_CONFIG_ARCHIVE_MAP is deprecated and will be removed in v5 of Transformers. "
            "It does not contain all available model checkpoints, far from it. Checkout hf.co/models for that.",
            FutureWarning,
        )

        for model_type, map_name in self._mapping.items():
            module_name = model_type_to_module_name(model_type)
            module = importlib.import_module(f".{module_name}", "transformers.models")
            mapping = getattr(module, map_name)
            self._data.update(mapping)

        self._initialized = True

    def __getitem__(self, key):
        self._initialize()
        return self._data[key]

    def keys(self):
        self._initialize()
        return self._data.keys()

    def values(self):
        self._initialize()
        return self._data.values()

    def items(self):
        self._initialize()
        return self._data.keys()

    def __iter__(self):
        self._initialize()
        return iter(self._data)

    def __contains__(self, item):
        self._initialize()
        return item in self._data


ALL_PRETRAINED_CONFIG_ARCHIVE_MAP = _LazyLoadAllMappings(CONFIG_ARCHIVE_MAP_MAPPING_NAMES)


def _get_class_name(model_class: Union[str, List[str]]):
563
    if isinstance(model_class, (list, tuple)):
Stas Bekman's avatar
Stas Bekman committed
564
565
        return " or ".join([f"[`{c}`]" for c in model_class if c is not None])
    return f"[`{model_class}`]"
566
567


568
569
570
571
572
def _list_model_options(indent, config_to_class=None, use_model_types=True):
    if config_to_class is None and not use_model_types:
        raise ValueError("Using `use_model_types=False` requires a `config_to_class` dictionary.")
    if use_model_types:
        if config_to_class is None:
Stas Bekman's avatar
Stas Bekman committed
573
            model_type_to_name = {model_type: f"[`{config}`]" for model_type, config in CONFIG_MAPPING_NAMES.items()}
574
575
        else:
            model_type_to_name = {
576
577
578
                model_type: _get_class_name(model_class)
                for model_type, model_class in config_to_class.items()
                if model_type in MODEL_NAMES_MAPPING
579
580
            }
        lines = [
581
            f"{indent}- **{model_type}** -- {model_type_to_name[model_type]} ({MODEL_NAMES_MAPPING[model_type]} model)"
582
            for model_type in sorted(model_type_to_name.keys())
583
584
        ]
    else:
585
586
587
588
589
        config_to_name = {
            CONFIG_MAPPING_NAMES[config]: _get_class_name(clas)
            for config, clas in config_to_class.items()
            if config in CONFIG_MAPPING_NAMES
        }
590
        config_to_model_name = {
591
            config: MODEL_NAMES_MAPPING[model_type] for model_type, config in CONFIG_MAPPING_NAMES.items()
592
593
        }
        lines = [
Sylvain Gugger's avatar
Sylvain Gugger committed
594
595
            f"{indent}- [`{config_name}`] configuration class:"
            f" {config_to_name[config_name]} ({config_to_model_name[config_name]} model)"
596
            for config_name in sorted(config_to_name.keys())
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
        ]
    return "\n".join(lines)


def replace_list_option_in_docstrings(config_to_class=None, use_model_types=True):
    def docstring_decorator(fn):
        docstrings = fn.__doc__
        lines = docstrings.split("\n")
        i = 0
        while i < len(lines) and re.search(r"^(\s*)List options\s*$", lines[i]) is None:
            i += 1
        if i < len(lines):
            indent = re.search(r"^(\s*)List options\s*$", lines[i]).groups()[0]
            if use_model_types:
                indent = f"{indent}    "
            lines[i] = _list_model_options(indent, config_to_class=config_to_class, use_model_types=use_model_types)
            docstrings = "\n".join(lines)
        else:
            raise ValueError(
Sylvain Gugger's avatar
Sylvain Gugger committed
616
617
                f"The function {fn} should have an empty 'List options' in its docstring as placeholder, current"
                f" docstring is:\n{docstrings}"
618
619
620
621
622
623
624
            )
        fn.__doc__ = docstrings
        return fn

    return docstring_decorator


Julien Chaumond's avatar
Julien Chaumond committed
625
class AutoConfig:
Lysandre Debut's avatar
Lysandre Debut committed
626
    r"""
627
    This is a generic configuration class that will be instantiated as one of the configuration classes of the library
628
    when created with the [`~AutoConfig.from_pretrained`] class method.
629

630
    This class cannot be instantiated directly using `__init__()` (throws an error).
631
    """
632

633
    def __init__(self):
634
635
636
637
        raise EnvironmentError(
            "AutoConfig is designed to be instantiated "
            "using the `AutoConfig.from_pretrained(pretrained_model_name_or_path)` method."
        )
638

639
    @classmethod
640
641
642
643
    def for_model(cls, model_type: str, *args, **kwargs):
        if model_type in CONFIG_MAPPING:
            config_class = CONFIG_MAPPING[model_type]
            return config_class(*args, **kwargs)
644
        raise ValueError(
645
            f"Unrecognized model identifier: {model_type}. Should contain one of {', '.join(CONFIG_MAPPING.keys())}"
646
        )
647

648
    @classmethod
649
    @replace_list_option_in_docstrings()
650
    def from_pretrained(cls, pretrained_model_name_or_path, **kwargs):
651
652
        r"""
        Instantiate one of the configuration classes of the library from a pretrained model configuration.
653

Sylvain Gugger's avatar
Sylvain Gugger committed
654
655
        The configuration class to instantiate is selected based on the `model_type` property of the config object that
        is loaded, or when it's missing, by falling back to using pattern matching on `pretrained_model_name_or_path`:
656

657
        List options
Lysandre Debut's avatar
Lysandre Debut committed
658
659

        Args:
660
            pretrained_model_name_or_path (`str` or `os.PathLike`):
661
662
                Can be either:

663
664
665
666
                    - A string, the *model id* of a pretrained model configuration hosted inside a model repo on
                      huggingface.co. Valid model ids can be located at the root-level, like `bert-base-uncased`, or
                      namespaced under a user or organization name, like `dbmdz/bert-base-german-cased`.
                    - A path to a *directory* containing a configuration file saved using the
Sylvain Gugger's avatar
Sylvain Gugger committed
667
668
                      [`~PretrainedConfig.save_pretrained`] method, or the [`~PreTrainedModel.save_pretrained`] method,
                      e.g., `./my_model_directory/`.
669
670
671
                    - A path or url to a saved configuration JSON *file*, e.g.,
                      `./my_model_directory/configuration.json`.
            cache_dir (`str` or `os.PathLike`, *optional*):
672
673
                Path to a directory in which a downloaded pretrained model configuration should be cached if the
                standard cache should not be used.
674
            force_download (`bool`, *optional*, defaults to `False`):
675
676
                Whether or not to force the (re-)download the model weights and configuration files and override the
                cached versions if they exist.
677
            resume_download (`bool`, *optional*, defaults to `False`):
678
679
                Whether or not to delete incompletely received files. Will attempt to resume the download if such a
                file exists.
680
            proxies (`Dict[str, str]`, *optional*):
Sylvain Gugger's avatar
Sylvain Gugger committed
681
682
                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.
683
            revision (`str`, *optional*, defaults to `"main"`):
Julien Chaumond's avatar
Julien Chaumond committed
684
                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
685
                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
Julien Chaumond's avatar
Julien Chaumond committed
686
                identifier allowed by git.
687
688
            return_unused_kwargs (`bool`, *optional*, defaults to `False`):
                If `False`, then this function returns just the final configuration object.
689

Sylvain Gugger's avatar
Sylvain Gugger committed
690
691
692
                If `True`, then this functions returns a `Tuple(config, unused_kwargs)` where *unused_kwargs* is a
                dictionary consisting of the key/value pairs whose keys are not configuration attributes: i.e., the
                part of `kwargs` which has not been used to update `config` and is otherwise ignored.
693
            trust_remote_code (`bool`, *optional*, defaults to `False`):
694
                Whether or not to allow for custom models defined on the Hub in their own modeling files. This option
Sylvain Gugger's avatar
Sylvain Gugger committed
695
696
                should only be set to `True` for repositories you trust and in which you have read the code, as it will
                execute code present on the Hub on your local machine.
697
            kwargs(additional keyword arguments, *optional*):
698
                The values in kwargs of any keys which are configuration attributes will be used to override the loaded
Sylvain Gugger's avatar
Sylvain Gugger committed
699
                values. Behavior concerning key/value pairs whose keys are *not* configuration attributes is controlled
700
                by the `return_unused_kwargs` keyword parameter.
Lysandre Debut's avatar
Lysandre Debut committed
701

702
        Examples:
703

704
705
        ```python
        >>> from transformers import AutoConfig
706

707
        >>> # Download configuration from huggingface.co and cache.
Sylvain Gugger's avatar
Sylvain Gugger committed
708
        >>> config = AutoConfig.from_pretrained("bert-base-uncased")
Lysandre Debut's avatar
Lysandre Debut committed
709

710
        >>> # Download configuration from huggingface.co (user-uploaded) and cache.
Sylvain Gugger's avatar
Sylvain Gugger committed
711
        >>> config = AutoConfig.from_pretrained("dbmdz/bert-base-german-cased")
Lysandre Debut's avatar
Lysandre Debut committed
712

713
        >>> # If configuration file is in a directory (e.g., was saved using *save_pretrained('./test/saved_model/')*).
Sylvain Gugger's avatar
Sylvain Gugger committed
714
        >>> config = AutoConfig.from_pretrained("./test/bert_saved_model/")
715

716
        >>> # Load a specific configuration file.
Sylvain Gugger's avatar
Sylvain Gugger committed
717
        >>> config = AutoConfig.from_pretrained("./test/bert_saved_model/my_configuration.json")
718

719
        >>> # Change some config attributes when loading a pretrained config.
Sylvain Gugger's avatar
Sylvain Gugger committed
720
        >>> config = AutoConfig.from_pretrained("bert-base-uncased", output_attentions=True, foo=False)
721
722
        >>> config.output_attentions
        True
Sylvain Gugger's avatar
Sylvain Gugger committed
723
724
725
726

        >>> config, unused_kwargs = AutoConfig.from_pretrained(
        ...     "bert-base-uncased", output_attentions=True, foo=False, return_unused_kwargs=True
        ... )
727
728
        >>> config.output_attentions
        True
Sylvain Gugger's avatar
Sylvain Gugger committed
729

730
        >>> unused_kwargs
731
732
        {'foo': False}
        ```"""
733
        kwargs["_from_auto"] = True
734
735
        kwargs["name_or_path"] = pretrained_model_name_or_path
        trust_remote_code = kwargs.pop("trust_remote_code", False)
736
        config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs)
737
738
739
        if "auto_map" in config_dict and "AutoConfig" in config_dict["auto_map"]:
            if not trust_remote_code:
                raise ValueError(
Sylvain Gugger's avatar
Sylvain Gugger committed
740
741
742
                    f"Loading {pretrained_model_name_or_path} requires you to execute the configuration file in that"
                    " repo on your local machine. Make sure you have read the code there to avoid malicious use, then"
                    " set the option `trust_remote_code=True` to remove this error."
743
744
                )
            if kwargs.get("revision", None) is None:
745
                logger.warning(
746
747
748
749
750
751
752
753
754
755
                    "Explicitly passing a `revision` is encouraged when loading a configuration with custom code to "
                    "ensure no malicious code has been contributed in a newer revision."
                )
            class_ref = config_dict["auto_map"]["AutoConfig"]
            module_file, class_name = class_ref.split(".")
            config_class = get_class_from_dynamic_module(
                pretrained_model_name_or_path, module_file + ".py", class_name, **kwargs
            )
            return config_class.from_pretrained(pretrained_model_name_or_path, **kwargs)
        elif "model_type" in config_dict:
756
            config_class = CONFIG_MAPPING[config_dict["model_type"]]
757
            return config_class.from_dict(config_dict, **unused_kwargs)
758
759
        else:
            # Fallback: use pattern matching on the string.
760
761
            # We go from longer names to shorter names to catch roberta before bert (for instance)
            for pattern in sorted(CONFIG_MAPPING.keys(), key=len, reverse=True):
762
                if pattern in str(pretrained_model_name_or_path):
763
                    return CONFIG_MAPPING[pattern].from_dict(config_dict, **unused_kwargs)
764

765
        raise ValueError(
766
            f"Unrecognized model in {pretrained_model_name_or_path}. "
767
            f"Should have a `model_type` key in its {CONFIG_NAME}, or contain one of the following strings "
768
            f"in its name: {', '.join(CONFIG_MAPPING.keys())}"
769
        )
770
771
772
773
774
775
776

    @staticmethod
    def register(model_type, config):
        """
        Register a new configuration for this class.

        Args:
777
778
            model_type (`str`): The model type like "bert" or "gpt".
            config ([`PretrainedConfig`]): The config to register.
779
780
781
782
783
784
785
786
        """
        if issubclass(config, PretrainedConfig) and config.model_type != model_type:
            raise ValueError(
                "The config you are passing has a `model_type` attribute that is not consistent with the model type "
                f"you passed (config has {config.model_type} and you passed {model_type}. Fix one of those so they "
                "match!"
            )
        CONFIG_MAPPING.register(model_type, config)