check_repo.py 42.7 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
# coding=utf-8
# Copyright 2020 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

16
17
18
import inspect
import os
import re
19
import warnings
20
from collections import OrderedDict
21
from difflib import get_close_matches
22
from pathlib import Path
23

24
from transformers import is_flax_available, is_tf_available, is_torch_available
25
from transformers.models.auto import get_values
Yih-Dar's avatar
Yih-Dar committed
26
from transformers.models.auto.configuration_auto import CONFIG_MAPPING_NAMES
27
28
29
30
from transformers.models.auto.feature_extraction_auto import FEATURE_EXTRACTOR_MAPPING_NAMES
from transformers.models.auto.image_processing_auto import IMAGE_PROCESSOR_MAPPING_NAMES
from transformers.models.auto.processing_auto import PROCESSOR_MAPPING_NAMES
from transformers.models.auto.tokenization_auto import TOKENIZER_MAPPING_NAMES
31
from transformers.utils import ENV_VARS_TRUE_VALUES, direct_transformers_import
32

33
34
35
36
37

# All paths are set with the intent you should run this script from the root of the repo with the command
# python utils/check_repo.py
PATH_TO_TRANSFORMERS = "src/transformers"
PATH_TO_TESTS = "tests"
38
PATH_TO_DOC = "docs/source/en"
39

40
41
# Update this list with models that are supposed to be private.
PRIVATE_MODELS = [
Jongjyh's avatar
Jongjyh committed
42
    "AltRobertaModel",
43
    "DPRSpanPredictor",
Daniel Stancl's avatar
Daniel Stancl committed
44
    "LongT5Stack",
Li-Huai (Allan) Lin's avatar
Li-Huai (Allan) Lin committed
45
    "RealmBertModel",
46
    "T5Stack",
47
    "MT5Stack",
48
    "SwitchTransformersStack",
49
    "TFDPRSpanPredictor",
50
51
    "MaskFormerSwinModel",
    "MaskFormerSwinPreTrainedModel",
52
53
    "BridgeTowerTextModel",
    "BridgeTowerVisionModel",
54
55
]

56
57
# Update this list for models that are not tested with a comment explaining the reason it should not be.
# Being in this list is an exception and should **not** be the rule.
58
IGNORE_NON_TESTED = PRIVATE_MODELS.copy() + [
59
    # models to ignore for not tested
NielsRogge's avatar
NielsRogge committed
60
    "InstructBlipQFormerModel",  # Building part of bigger (tested) model.
61
62
    "NllbMoeDecoder",
    "NllbMoeEncoder",
Jason Phang's avatar
Jason Phang committed
63
    "LlamaDecoder",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
64
    "Blip2QFormerModel",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
65
66
    "DetaEncoder",  # Building part of bigger (tested) model.
    "DetaDecoder",  # Building part of bigger (tested) model.
67
    "ErnieMForInformationExtraction",
68
69
    "GraphormerEncoder",  # Building part of bigger (tested) model.
    "GraphormerDecoderHead",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
70
    "CLIPSegDecoder",  # Building part of bigger (tested) model.
71
72
    "TableTransformerEncoder",  # Building part of bigger (tested) model.
    "TableTransformerDecoder",  # Building part of bigger (tested) model.
73
74
    "TimeSeriesTransformerEncoder",  # Building part of bigger (tested) model.
    "TimeSeriesTransformerDecoder",  # Building part of bigger (tested) model.
75
76
    "InformerEncoder",  # Building part of bigger (tested) model.
    "InformerDecoder",  # Building part of bigger (tested) model.
77
78
    "AutoformerEncoder",  # Building part of bigger (tested) model.
    "AutoformerDecoder",  # Building part of bigger (tested) model.
79
80
    "JukeboxVQVAE",  # Building part of bigger (tested) model.
    "JukeboxPrior",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
81
82
    "DeformableDetrEncoder",  # Building part of bigger (tested) model.
    "DeformableDetrDecoder",  # Building part of bigger (tested) model.
Younes Belkada's avatar
Younes Belkada committed
83
    "OPTDecoder",  # Building part of bigger (tested) model.
84
85
    "FlaxWhisperDecoder",  # Building part of bigger (tested) model.
    "FlaxWhisperEncoder",  # Building part of bigger (tested) model.
86
87
    "WhisperDecoder",  # Building part of bigger (tested) model.
    "WhisperEncoder",  # Building part of bigger (tested) model.
88
    "DecisionTransformerGPT2Model",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
89
    "SegformerDecodeHead",  # Building part of bigger (tested) model.
Gunjan Chhablani's avatar
Gunjan Chhablani committed
90
91
92
    "PLBartEncoder",  # Building part of bigger (tested) model.
    "PLBartDecoder",  # Building part of bigger (tested) model.
    "PLBartDecoderWrapper",  # Building part of bigger (tested) model.
Vasudev Gupta's avatar
Vasudev Gupta committed
93
94
95
    "BigBirdPegasusEncoder",  # Building part of bigger (tested) model.
    "BigBirdPegasusDecoder",  # Building part of bigger (tested) model.
    "BigBirdPegasusDecoderWrapper",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
96
97
98
    "DetrEncoder",  # Building part of bigger (tested) model.
    "DetrDecoder",  # Building part of bigger (tested) model.
    "DetrDecoderWrapper",  # Building part of bigger (tested) model.
99
100
    "ConditionalDetrEncoder",  # Building part of bigger (tested) model.
    "ConditionalDetrDecoder",  # Building part of bigger (tested) model.
Suraj Patil's avatar
Suraj Patil committed
101
102
    "M2M100Encoder",  # Building part of bigger (tested) model.
    "M2M100Decoder",  # Building part of bigger (tested) model.
Chan Woo Kim's avatar
Chan Woo Kim committed
103
    "MCTCTEncoder",  # Building part of bigger (tested) model.
wangpeng's avatar
wangpeng committed
104
    "MgpstrModel",  # Building part of bigger (tested) model.
Suraj Patil's avatar
Suraj Patil committed
105
106
    "Speech2TextEncoder",  # Building part of bigger (tested) model.
    "Speech2TextDecoder",  # Building part of bigger (tested) model.
Patrick von Platen's avatar
Patrick von Platen committed
107
108
    "LEDEncoder",  # Building part of bigger (tested) model.
    "LEDDecoder",  # Building part of bigger (tested) model.
109
    "BartDecoderWrapper",  # Building part of bigger (tested) model.
110
    "BartEncoder",  # Building part of bigger (tested) model.
111
    "BertLMHeadModel",  # Needs to be setup as decoder.
112
    "BlenderbotSmallEncoder",  # Building part of bigger (tested) model.
113
    "BlenderbotSmallDecoderWrapper",  # Building part of bigger (tested) model.
114
    "BlenderbotEncoder",  # Building part of bigger (tested) model.
115
    "BlenderbotDecoderWrapper",  # Building part of bigger (tested) model.
116
    "MBartEncoder",  # Building part of bigger (tested) model.
117
    "MBartDecoderWrapper",  # Building part of bigger (tested) model.
118
119
120
121
    "MegatronBertLMHeadModel",  # Building part of bigger (tested) model.
    "MegatronBertEncoder",  # Building part of bigger (tested) model.
    "MegatronBertDecoder",  # Building part of bigger (tested) model.
    "MegatronBertDecoderWrapper",  # Building part of bigger (tested) model.
Sanchit Gandhi's avatar
Sanchit Gandhi committed
122
    "MusicgenDecoder",  # Building part of bigger (tested) model.
StevenTang1998's avatar
StevenTang1998 committed
123
124
    "MvpDecoderWrapper",  # Building part of bigger (tested) model.
    "MvpEncoder",  # Building part of bigger (tested) model.
125
    "PegasusEncoder",  # Building part of bigger (tested) model.
126
    "PegasusDecoderWrapper",  # Building part of bigger (tested) model.
Jason Phang's avatar
Jason Phang committed
127
128
129
    "PegasusXEncoder",  # Building part of bigger (tested) model.
    "PegasusXDecoder",  # Building part of bigger (tested) model.
    "PegasusXDecoderWrapper",  # Building part of bigger (tested) model.
130
    "DPREncoder",  # Building part of bigger (tested) model.
131
    "ProphetNetDecoderWrapper",  # Building part of bigger (tested) model.
Li-Huai (Allan) Lin's avatar
Li-Huai (Allan) Lin committed
132
133
134
135
    "RealmBertModel",  # Building part of bigger (tested) model.
    "RealmReader",  # Not regular model.
    "RealmScorer",  # Not regular model.
    "RealmForOpenQA",  # Not regular model.
136
    "ReformerForMaskedLM",  # Needs to be setup as decoder.
137
    "Speech2Text2DecoderWrapper",  # Building part of bigger (tested) model.
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
138
    "TFDPREncoder",  # Building part of bigger (tested) model.
139
140
    "TFElectraMainLayer",  # Building part of bigger (tested) model (should it be a TFPreTrainedModel ?)
    "TFRobertaForMultipleChoice",  # TODO: fix
141
    "TFRobertaPreLayerNormForMultipleChoice",  # TODO: fix
142
    "TrOCRDecoderWrapper",  # Building part of bigger (tested) model.
amyeroberts's avatar
amyeroberts committed
143
144
    "TFWhisperEncoder",  # Building part of bigger (tested) model.
    "TFWhisperDecoder",  # Building part of bigger (tested) model.
abhishek thakur's avatar
abhishek thakur committed
145
    "SeparableConv1D",  # Building part of bigger (tested) model.
146
    "FlaxBartForCausalLM",  # Building part of bigger (tested) model.
147
    "FlaxBertForCausalLM",  # Building part of bigger (tested) model. Tested implicitly through FlaxRobertaForCausalLM.
Younes Belkada's avatar
Younes Belkada committed
148
    "OPTDecoderWrapper",
149
    "TFSegformerDecodeHead",  # Not a regular model.
Jongjyh's avatar
Jongjyh committed
150
    "AltRobertaModel",  # Building part of bigger (tested) model.
Younes Belkada's avatar
Younes Belkada committed
151
    "BlipTextLMHeadModel",  # No need to test it as it is tested by BlipTextVision models
Matt's avatar
Matt committed
152
    "TFBlipTextLMHeadModel",  # No need to test it as it is tested by BlipTextVision models
153
154
    "BridgeTowerTextModel",  # No need to test it as it is tested by BridgeTowerModel model.
    "BridgeTowerVisionModel",  # No need to test it as it is tested by BridgeTowerModel model.
155
156
157
158
159
160
161
162
163
164
165
166
    "SpeechT5Decoder",  # Building part of bigger (tested) model.
    "SpeechT5DecoderWithoutPrenet",  # Building part of bigger (tested) model.
    "SpeechT5DecoderWithSpeechPrenet",  # Building part of bigger (tested) model.
    "SpeechT5DecoderWithTextPrenet",  # Building part of bigger (tested) model.
    "SpeechT5Encoder",  # Building part of bigger (tested) model.
    "SpeechT5EncoderWithoutPrenet",  # Building part of bigger (tested) model.
    "SpeechT5EncoderWithSpeechPrenet",  # Building part of bigger (tested) model.
    "SpeechT5EncoderWithTextPrenet",  # Building part of bigger (tested) model.
    "SpeechT5SpeechDecoder",  # Building part of bigger (tested) model.
    "SpeechT5SpeechEncoder",  # Building part of bigger (tested) model.
    "SpeechT5TextDecoder",  # Building part of bigger (tested) model.
    "SpeechT5TextEncoder",  # Building part of bigger (tested) model.
167
168
169
170
171
]

# Update this list with test files that don't have a tester with a `all_model_classes` variable and which don't
# trigger the common tests.
TEST_FILES_WITH_NO_COMMON_TESTS = [
Yih-Dar's avatar
Yih-Dar committed
172
173
174
175
176
177
178
179
180
181
182
183
184
    "models/decision_transformer/test_modeling_decision_transformer.py",
    "models/camembert/test_modeling_camembert.py",
    "models/mt5/test_modeling_flax_mt5.py",
    "models/mbart/test_modeling_mbart.py",
    "models/mt5/test_modeling_mt5.py",
    "models/pegasus/test_modeling_pegasus.py",
    "models/camembert/test_modeling_tf_camembert.py",
    "models/mt5/test_modeling_tf_mt5.py",
    "models/xlm_roberta/test_modeling_tf_xlm_roberta.py",
    "models/xlm_roberta/test_modeling_flax_xlm_roberta.py",
    "models/xlm_prophetnet/test_modeling_xlm_prophetnet.py",
    "models/xlm_roberta/test_modeling_xlm_roberta.py",
    "models/vision_text_dual_encoder/test_modeling_vision_text_dual_encoder.py",
Matt's avatar
Matt committed
185
    "models/vision_text_dual_encoder/test_modeling_tf_vision_text_dual_encoder.py",
Yih-Dar's avatar
Yih-Dar committed
186
187
    "models/vision_text_dual_encoder/test_modeling_flax_vision_text_dual_encoder.py",
    "models/decision_transformer/test_modeling_decision_transformer.py",
188
189
]

190
191
# Update this list for models that are not in any of the auto MODEL_XXX_MAPPING. Being in this list is an exception and
# should **not** be the rule.
192
IGNORE_NON_AUTO_CONFIGURED = PRIVATE_MODELS.copy() + [
193
    # models to ignore for model xxx mapping
194
195
    "AlignTextModel",
    "AlignVisionModel",
196
197
198
199
    "ClapTextModel",
    "ClapTextModelWithProjection",
    "ClapAudioModel",
    "ClapAudioModelWithProjection",
NielsRogge's avatar
NielsRogge committed
200
201
202
    "Blip2ForConditionalGeneration",
    "Blip2QFormerModel",
    "Blip2VisionModel",
203
    "ErnieMForInformationExtraction",
204
    "GitVisionModel",
205
206
    "GraphormerModel",
    "GraphormerForGraphClassification",
Younes Belkada's avatar
Younes Belkada committed
207
208
209
210
211
212
    "BlipForConditionalGeneration",
    "BlipForImageTextRetrieval",
    "BlipForQuestionAnswering",
    "BlipVisionModel",
    "BlipTextLMHeadModel",
    "BlipTextModel",
Matt's avatar
Matt committed
213
214
215
216
217
218
    "TFBlipForConditionalGeneration",
    "TFBlipForImageTextRetrieval",
    "TFBlipForQuestionAnswering",
    "TFBlipVisionModel",
    "TFBlipTextLMHeadModel",
    "TFBlipTextModel",
NielsRogge's avatar
NielsRogge committed
219
    "Swin2SRForImageSuperResolution",
220
221
    "BridgeTowerForImageAndTextRetrieval",
    "BridgeTowerForMaskedLM",
222
    "BridgeTowerForContrastiveLearning",
NielsRogge's avatar
NielsRogge committed
223
224
225
    "CLIPSegForImageSegmentation",
    "CLIPSegVisionModel",
    "CLIPSegTextModel",
Matt's avatar
Matt committed
226
    "EsmForProteinFolding",
227
    "GPTSanJapaneseModel",
228
    "TimeSeriesTransformerForPrediction",
229
    "InformerForPrediction",
230
    "AutoformerForPrediction",
231
232
    "JukeboxVQVAE",
    "JukeboxPrior",
Jason Phang's avatar
Jason Phang committed
233
234
235
236
237
238
    "PegasusXEncoder",
    "PegasusXDecoder",
    "PegasusXDecoderWrapper",
    "PegasusXEncoder",
    "PegasusXDecoder",
    "PegasusXDecoderWrapper",
239
    "SamModel",
NielsRogge's avatar
NielsRogge committed
240
    "DPTForDepthEstimation",
241
    "DecisionTransformerGPT2Model",
NielsRogge's avatar
NielsRogge committed
242
    "GLPNForDepthEstimation",
NielsRogge's avatar
NielsRogge committed
243
244
    "ViltForImagesAndTextClassification",
    "ViltForImageAndTextRetrieval",
245
    "ViltForTokenClassification",
NielsRogge's avatar
NielsRogge committed
246
    "ViltForMaskedLM",
Suraj Patil's avatar
Suraj Patil committed
247
248
249
    "XGLMEncoder",
    "XGLMDecoder",
    "XGLMDecoderWrapper",
NielsRogge's avatar
NielsRogge committed
250
251
    "PerceiverForMultimodalAutoencoding",
    "PerceiverForOpticalFlow",
NielsRogge's avatar
NielsRogge committed
252
    "SegformerDecodeHead",
253
    "TFSegformerDecodeHead",
Kamal Raj's avatar
Kamal Raj committed
254
    "FlaxBeitForMaskedImageModeling",
Gunjan Chhablani's avatar
Gunjan Chhablani committed
255
256
257
    "PLBartEncoder",
    "PLBartDecoder",
    "PLBartDecoderWrapper",
NielsRogge's avatar
NielsRogge committed
258
    "BeitForMaskedImageModeling",
259
260
    "ChineseCLIPTextModel",
    "ChineseCLIPVisionModel",
Suraj Patil's avatar
Suraj Patil committed
261
    "CLIPTextModel",
262
    "CLIPTextModelWithProjection",
Suraj Patil's avatar
Suraj Patil committed
263
    "CLIPVisionModel",
264
    "CLIPVisionModelWithProjection",
265
266
    "GroupViTTextModel",
    "GroupViTVisionModel",
Yih-Dar's avatar
Yih-Dar committed
267
268
    "TFCLIPTextModel",
    "TFCLIPVisionModel",
269
270
    "TFGroupViTTextModel",
    "TFGroupViTVisionModel",
Suraj Patil's avatar
Suraj Patil committed
271
272
    "FlaxCLIPTextModel",
    "FlaxCLIPVisionModel",
273
    "FlaxWav2Vec2ForCTC",
NielsRogge's avatar
NielsRogge committed
274
    "DetrForSegmentation",
Younes Belkada's avatar
Younes Belkada committed
275
276
277
    "Pix2StructVisionModel",
    "Pix2StructTextModel",
    "Pix2StructForConditionalGeneration",
278
    "ConditionalDetrForSegmentation",
279
280
    "DPRReader",
    "FlaubertForQuestionAnswering",
281
282
283
284
    "FlavaImageCodebook",
    "FlavaTextModel",
    "FlavaImageModel",
    "FlavaMultimodalModel",
285
    "GPT2DoubleHeadsModel",
286
    "GPTSw3DoubleHeadsModel",
NielsRogge's avatar
NielsRogge committed
287
288
    "InstructBlipVisionModel",
    "InstructBlipQFormerModel",
289
    "LayoutLMForQuestionAnswering",
Ryokan RI's avatar
Ryokan RI committed
290
    "LukeForMaskedLM",
NielsRogge's avatar
NielsRogge committed
291
292
293
    "LukeForEntityClassification",
    "LukeForEntityPairClassification",
    "LukeForEntitySpanClassification",
wangpeng's avatar
wangpeng committed
294
    "MgpstrModel",
295
    "OpenAIGPTDoubleHeadsModel",
296
297
298
    "OwlViTTextModel",
    "OwlViTVisionModel",
    "OwlViTForObjectDetection",
299
300
301
    "RagModel",
    "RagSequenceForGeneration",
    "RagTokenForGeneration",
Li-Huai (Allan) Lin's avatar
Li-Huai (Allan) Lin committed
302
303
304
305
    "RealmEmbedder",
    "RealmForOpenQA",
    "RealmScorer",
    "RealmReader",
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
306
    "TFDPRReader",
307
    "TFGPT2DoubleHeadsModel",
308
    "TFLayoutLMForQuestionAnswering",
309
    "TFOpenAIGPTDoubleHeadsModel",
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
310
311
312
    "TFRagModel",
    "TFRagSequenceForGeneration",
    "TFRagTokenForGeneration",
313
    "Wav2Vec2ForCTC",
Patrick von Platen's avatar
Patrick von Platen committed
314
    "HubertForCTC",
315
316
    "SEWForCTC",
    "SEWDForCTC",
317
318
    "XLMForQuestionAnswering",
    "XLNetForQuestionAnswering",
abhishek thakur's avatar
abhishek thakur committed
319
    "SeparableConv1D",
Gunjan Chhablani's avatar
Gunjan Chhablani committed
320
321
322
323
    "VisualBertForRegionToPhraseAlignment",
    "VisualBertForVisualReasoning",
    "VisualBertForQuestionAnswering",
    "VisualBertForMultipleChoice",
Will Rice's avatar
Will Rice committed
324
    "TFWav2Vec2ForCTC",
Will Rice's avatar
Will Rice committed
325
    "TFHubertForCTC",
NielsRogge's avatar
NielsRogge committed
326
327
    "XCLIPVisionModel",
    "XCLIPTextModel",
Jongjyh's avatar
Jongjyh committed
328
329
330
    "AltCLIPTextModel",
    "AltCLIPVisionModel",
    "AltRobertaModel",
Zineng Tang's avatar
Zineng Tang committed
331
    "TvltForAudioVisualClassification",
332
333
334
    "SpeechT5ForSpeechToSpeech",
    "SpeechT5ForTextToSpeech",
    "SpeechT5HifiGan",
Sanchit Gandhi's avatar
Sanchit Gandhi committed
335
336
    "MusicgenModel",
    "MusicgenForConditionalGeneration",
337
338
]

339
340
341
342
343
344
# Update this list for models that have multiple model types for the same
# model doc
MODEL_TYPE_TO_DOC_MAPPING = OrderedDict(
    [
        ("data2vec-text", "data2vec"),
        ("data2vec-audio", "data2vec"),
345
        ("data2vec-vision", "data2vec"),
NielsRogge's avatar
NielsRogge committed
346
        ("donut-swin", "donut"),
347
348
349
350
    ]
)


351
# This is to make sure the transformers module imported is the one in the repo.
352
transformers = direct_transformers_import(PATH_TO_TRANSFORMERS)
353
354


355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
def check_missing_backends():
    missing_backends = []
    if not is_torch_available():
        missing_backends.append("PyTorch")
    if not is_tf_available():
        missing_backends.append("TensorFlow")
    if not is_flax_available():
        missing_backends.append("Flax")
    if len(missing_backends) > 0:
        missing = ", ".join(missing_backends)
        if os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES:
            raise Exception(
                "Full repo consistency checks require all backends to be installed (with `pip install -e .[dev]` in the "
                f"Transformers repo, the following are missing: {missing}."
            )
        else:
            warnings.warn(
                "Full repo consistency checks require all backends to be installed (with `pip install -e .[dev]` in the "
                f"Transformers repo, the following are missing: {missing}. While it's probably fine as long as you "
                "didn't make any change in one of those backends modeling files, you should probably execute the "
                "command above to be on the safe side."
            )


379
380
381
382
383
384
385
386
387
388
389
390
391
def check_model_list():
    """Check the model list inside the transformers library."""
    # Get the models from the directory structure of `src/transformers/models/`
    models_dir = os.path.join(PATH_TO_TRANSFORMERS, "models")
    _models = []
    for model in os.listdir(models_dir):
        model_dir = os.path.join(models_dir, model)
        if os.path.isdir(model_dir) and "__init__.py" in os.listdir(model_dir):
            _models.append(model)

    # Get the models from the directory structure of `src/transformers/models/`
    models = [model for model in dir(transformers.models) if not model.startswith("__")]

392
    missing_models = sorted(set(_models).difference(models))
393
394
395
396
397
398
    if missing_models:
        raise Exception(
            f"The following models should be included in {models_dir}/__init__.py: {','.join(missing_models)}."
        )


399
400
401
# If some modeling modules should be ignored for all checks, they should be added in the nested list
# _ignore_modules of this function.
def get_model_modules():
Patrick von Platen's avatar
Patrick von Platen committed
402
    """Get the model modules inside the transformers library."""
403
404
405
406
407
408
409
410
    _ignore_modules = [
        "modeling_auto",
        "modeling_encoder_decoder",
        "modeling_marian",
        "modeling_mmbt",
        "modeling_outputs",
        "modeling_retribert",
        "modeling_utils",
Sylvain Gugger's avatar
Sylvain Gugger committed
411
        "modeling_flax_auto",
412
        "modeling_flax_encoder_decoder",
Stas Bekman's avatar
Stas Bekman committed
413
        "modeling_flax_utils",
414
        "modeling_speech_encoder_decoder",
415
        "modeling_flax_speech_encoder_decoder",
416
        "modeling_flax_vision_encoder_decoder",
amyeroberts's avatar
amyeroberts committed
417
        "modeling_timm_backbone",
418
419
        "modeling_transfo_xl_utilities",
        "modeling_tf_auto",
420
        "modeling_tf_encoder_decoder",
421
422
423
424
        "modeling_tf_outputs",
        "modeling_tf_pytorch_utils",
        "modeling_tf_utils",
        "modeling_tf_transfo_xl_utilities",
425
        "modeling_tf_vision_encoder_decoder",
426
        "modeling_vision_encoder_decoder",
427
428
    ]
    modules = []
Sylvain Gugger's avatar
Sylvain Gugger committed
429
430
431
432
433
434
435
436
437
    for model in dir(transformers.models):
        # There are some magic dunder attributes in the dir, we ignore them
        if not model.startswith("__"):
            model_module = getattr(transformers.models, model)
            for submodule in dir(model_module):
                if submodule.startswith("modeling") and submodule not in _ignore_modules:
                    modeling_module = getattr(model_module, submodule)
                    if inspect.ismodule(modeling_module):
                        modules.append(modeling_module)
438
439
440
    return modules


441
def get_models(module, include_pretrained=False):
Patrick von Platen's avatar
Patrick von Platen committed
442
    """Get the objects in module that are models."""
443
    models = []
444
    model_classes = (transformers.PreTrainedModel, transformers.TFPreTrainedModel, transformers.FlaxPreTrainedModel)
445
    for attr_name in dir(module):
446
        if not include_pretrained and ("Pretrained" in attr_name or "PreTrained" in attr_name):
447
448
449
450
451
452
453
            continue
        attr = getattr(module, attr_name)
        if isinstance(attr, type) and issubclass(attr, model_classes) and attr.__module__ == module.__name__:
            models.append((attr_name, attr))
    return models


454
455
456
457
458
459
460
461
462
463
464
465
def is_a_private_model(model):
    """Returns True if the model should not be in the main init."""
    if model in PRIVATE_MODELS:
        return True

    # Wrapper, Encoder and Decoder are all privates
    if model.endswith("Wrapper"):
        return True
    if model.endswith("Encoder"):
        return True
    if model.endswith("Decoder"):
        return True
466
467
    if model.endswith("Prenet"):
        return True
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
    return False


def check_models_are_in_init():
    """Checks all models defined in the library are in the main init."""
    models_not_in_init = []
    dir_transformers = dir(transformers)
    for module in get_model_modules():
        models_not_in_init += [
            model[0] for model in get_models(module, include_pretrained=True) if model[0] not in dir_transformers
        ]

    # Remove private models
    models_not_in_init = [model for model in models_not_in_init if not is_a_private_model(model)]
    if len(models_not_in_init) > 0:
        raise Exception(f"The following models should be in the main init: {','.join(models_not_in_init)}.")


486
487
488
# If some test_modeling files should be ignored when checking models are all tested, they should be added in the
# nested list _ignore_files of this function.
def get_model_test_files():
Yih-Dar's avatar
Yih-Dar committed
489
490
491
492
493
494
    """Get the model test files.

    The returned files should NOT contain the `tests` (i.e. `PATH_TO_TESTS` defined in this script). They will be
    considered as paths relative to `tests`. A caller has to use `os.path.join(PATH_TO_TESTS, ...)` to access the files.
    """

495
496
497
    _ignore_files = [
        "test_modeling_common",
        "test_modeling_encoder_decoder",
498
        "test_modeling_flax_encoder_decoder",
499
        "test_modeling_flax_speech_encoder_decoder",
500
501
        "test_modeling_marian",
        "test_modeling_tf_common",
502
        "test_modeling_tf_encoder_decoder",
503
504
    ]
    test_files = []
Yih-Dar's avatar
Yih-Dar committed
505
506
507
508
509
510
511
512
513
514
515
516
517
    # Check both `PATH_TO_TESTS` and `PATH_TO_TESTS/models`
    model_test_root = os.path.join(PATH_TO_TESTS, "models")
    model_test_dirs = []
    for x in os.listdir(model_test_root):
        x = os.path.join(model_test_root, x)
        if os.path.isdir(x):
            model_test_dirs.append(x)

    for target_dir in [PATH_TO_TESTS] + model_test_dirs:
        for file_or_dir in os.listdir(target_dir):
            path = os.path.join(target_dir, file_or_dir)
            if os.path.isfile(path):
                filename = os.path.split(path)[-1]
518
                if "test_modeling" in filename and os.path.splitext(filename)[0] not in _ignore_files:
Yih-Dar's avatar
Yih-Dar committed
519
520
521
                    file = os.path.join(*path.split(os.sep)[1:])
                    test_files.append(file)

522
523
524
525
526
527
    return test_files


# This is a bit hacky but I didn't find a way to import the test_file as a module and read inside the tester class
# for the all_model_classes variable.
def find_tested_models(test_file):
Patrick von Platen's avatar
Patrick von Platen committed
528
    """Parse the content of test_file to detect what's in all_model_classes"""
Sylvain Gugger's avatar
Sylvain Gugger committed
529
    # This is a bit hacky but I didn't find a way to import the test_file as a module and read inside the class
530
    with open(os.path.join(PATH_TO_TESTS, test_file), "r", encoding="utf-8", newline="\n") as f:
531
        content = f.read()
Sylvain Gugger's avatar
Sylvain Gugger committed
532
    all_models = re.findall(r"all_model_classes\s+=\s+\(\s*\(([^\)]*)\)", content)
533
534
    # Check with one less parenthesis as well
    all_models += re.findall(r"all_model_classes\s+=\s+\(([^\)]*)\)", content)
Sylvain Gugger's avatar
Sylvain Gugger committed
535
    if len(all_models) > 0:
536
        model_tested = []
Sylvain Gugger's avatar
Sylvain Gugger committed
537
538
539
540
541
        for entry in all_models:
            for line in entry.split(","):
                name = line.strip()
                if len(name) > 0:
                    model_tested.append(name)
542
543
544
545
        return model_tested


def check_models_are_tested(module, test_file):
Patrick von Platen's avatar
Patrick von Platen committed
546
    """Check models defined in module are tested in test_file."""
547
    # XxxPreTrainedModel are not tested
548
549
550
    defined_models = get_models(module)
    tested_models = find_tested_models(test_file)
    if tested_models is None:
551
        if test_file.replace(os.path.sep, "/") in TEST_FILES_WITH_NO_COMMON_TESTS:
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
            return
        return [
            f"{test_file} should define `all_model_classes` to apply common tests to the models it tests. "
            + "If this intentional, add the test filename to `TEST_FILES_WITH_NO_COMMON_TESTS` in the file "
            + "`utils/check_repo.py`."
        ]
    failures = []
    for model_name, _ in defined_models:
        if model_name not in tested_models and model_name not in IGNORE_NON_TESTED:
            failures.append(
                f"{model_name} is defined in {module.__name__} but is not tested in "
                + f"{os.path.join(PATH_TO_TESTS, test_file)}. Add it to the all_model_classes in that file."
                + "If common tests should not applied to that model, add its name to `IGNORE_NON_TESTED`"
                + "in the file `utils/check_repo.py`."
            )
    return failures


def check_all_models_are_tested():
Patrick von Platen's avatar
Patrick von Platen committed
571
    """Check all models are properly tested."""
572
573
574
575
    modules = get_model_modules()
    test_files = get_model_test_files()
    failures = []
    for module in modules:
576
577
        test_file = [file for file in test_files if f"test_{module.__name__.split('.')[-1]}.py" in file]
        if len(test_file) == 0:
578
            failures.append(f"{module.__name__} does not have its corresponding test file {test_file}.")
579
580
581
582
        elif len(test_file) > 1:
            failures.append(f"{module.__name__} has several test files: {test_file}.")
        else:
            test_file = test_file[0]
583
584
585
            new_failures = check_models_are_tested(module, test_file)
            if new_failures is not None:
                failures += new_failures
586
587
588
589
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


590
def get_all_auto_configured_models():
Patrick von Platen's avatar
Patrick von Platen committed
591
    """Return the list of all models in at least one auto class."""
592
    result = set()  # To avoid duplicates we concatenate all model classes in a set.
593
594
    if is_torch_available():
        for attr_name in dir(transformers.models.auto.modeling_auto):
595
            if attr_name.startswith("MODEL_") and attr_name.endswith("MAPPING_NAMES"):
596
597
598
                result = result | set(get_values(getattr(transformers.models.auto.modeling_auto, attr_name)))
    if is_tf_available():
        for attr_name in dir(transformers.models.auto.modeling_tf_auto):
599
            if attr_name.startswith("TF_MODEL_") and attr_name.endswith("MAPPING_NAMES"):
600
601
602
                result = result | set(get_values(getattr(transformers.models.auto.modeling_tf_auto, attr_name)))
    if is_flax_available():
        for attr_name in dir(transformers.models.auto.modeling_flax_auto):
603
            if attr_name.startswith("FLAX_MODEL_") and attr_name.endswith("MAPPING_NAMES"):
604
                result = result | set(get_values(getattr(transformers.models.auto.modeling_flax_auto, attr_name)))
605
    return list(result)
606
607


608
609
610
611
612
613
614
615
616
617
618
def ignore_unautoclassed(model_name):
    """Rules to determine if `name` should be in an auto class."""
    # Special white list
    if model_name in IGNORE_NON_AUTO_CONFIGURED:
        return True
    # Encoder and Decoder should be ignored
    if "Encoder" in model_name or "Decoder" in model_name:
        return True
    return False


619
def check_models_are_auto_configured(module, all_auto_models):
Patrick von Platen's avatar
Patrick von Platen committed
620
    """Check models defined in module are each in an auto class."""
621
622
623
    defined_models = get_models(module)
    failures = []
    for model_name, _ in defined_models:
624
        if model_name not in all_auto_models and not ignore_unautoclassed(model_name):
625
626
627
628
629
630
631
632
633
            failures.append(
                f"{model_name} is defined in {module.__name__} but is not present in any of the auto mapping. "
                "If that is intended behavior, add its name to `IGNORE_NON_AUTO_CONFIGURED` in the file "
                "`utils/check_repo.py`."
            )
    return failures


def check_all_models_are_auto_configured():
Patrick von Platen's avatar
Patrick von Platen committed
634
    """Check all models are each in an auto class."""
635
    check_missing_backends()
636
637
638
639
640
641
642
643
644
645
646
    modules = get_model_modules()
    all_auto_models = get_all_auto_configured_models()
    failures = []
    for module in modules:
        new_failures = check_models_are_auto_configured(module, all_auto_models)
        if new_failures is not None:
            failures += new_failures
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


647
648
def check_all_auto_object_names_being_defined():
    """Check all names defined in auto (name) mappings exist in the library."""
649
    check_missing_backends()
650

651
    failures = []
652
    mappings_to_check = {
653
654
655
656
657
658
        "TOKENIZER_MAPPING_NAMES": TOKENIZER_MAPPING_NAMES,
        "IMAGE_PROCESSOR_MAPPING_NAMES": IMAGE_PROCESSOR_MAPPING_NAMES,
        "FEATURE_EXTRACTOR_MAPPING_NAMES": FEATURE_EXTRACTOR_MAPPING_NAMES,
        "PROCESSOR_MAPPING_NAMES": PROCESSOR_MAPPING_NAMES,
    }

659
660
    # Each auto modeling files contains multiple mappings. Let's get them in a dynamic way.
    for module_name in ["modeling_auto", "modeling_tf_auto", "modeling_flax_auto"]:
661
662
663
        module = getattr(transformers.models.auto, module_name, None)
        if module is None:
            continue
664
665
666
667
668
        # all mappings in a single auto modeling file
        mapping_names = [x for x in dir(module) if x.endswith("_MAPPING_NAMES")]
        mappings_to_check.update({name: getattr(module, name) for name in mapping_names})

    for name, mapping in mappings_to_check.items():
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
        for model_type, class_names in mapping.items():
            if not isinstance(class_names, tuple):
                class_names = (class_names,)
                for class_name in class_names:
                    if class_name is None:
                        continue
                    # dummy object is accepted
                    if not hasattr(transformers, class_name):
                        # If the class name is in a model name mapping, let's not check if there is a definition in any modeling
                        # module, if it's a private model defined in this file.
                        if name.endswith("MODEL_MAPPING_NAMES") and is_a_private_model(class_name):
                            continue
                        failures.append(
                            f"`{class_name}` appears in the mapping `{name}` but it is not defined in the library."
                        )
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


Yih-Dar's avatar
Yih-Dar committed
688
689
def check_all_auto_mapping_names_in_config_mapping_names():
    """Check all keys defined in auto mappings (mappings of names) appear in `CONFIG_MAPPING_NAMES`."""
690
    check_missing_backends()
Yih-Dar's avatar
Yih-Dar committed
691

692
    failures = []
Yih-Dar's avatar
Yih-Dar committed
693
    # `TOKENIZER_PROCESSOR_MAPPING_NAMES` and `AutoTokenizer` is special, and don't need to follow the rule.
694
    mappings_to_check = {
Yih-Dar's avatar
Yih-Dar committed
695
696
697
698
699
        "IMAGE_PROCESSOR_MAPPING_NAMES": IMAGE_PROCESSOR_MAPPING_NAMES,
        "FEATURE_EXTRACTOR_MAPPING_NAMES": FEATURE_EXTRACTOR_MAPPING_NAMES,
        "PROCESSOR_MAPPING_NAMES": PROCESSOR_MAPPING_NAMES,
    }

700
701
    # Each auto modeling files contains multiple mappings. Let's get them in a dynamic way.
    for module_name in ["modeling_auto", "modeling_tf_auto", "modeling_flax_auto"]:
702
703
704
        module = getattr(transformers.models.auto, module_name, None)
        if module is None:
            continue
705
706
707
708
709
        # all mappings in a single auto modeling file
        mapping_names = [x for x in dir(module) if x.endswith("_MAPPING_NAMES")]
        mappings_to_check.update({name: getattr(module, name) for name in mapping_names})

    for name, mapping in mappings_to_check.items():
Yih-Dar's avatar
Yih-Dar committed
710
711
712
713
714
715
716
717
718
719
        for model_type, class_names in mapping.items():
            if model_type not in CONFIG_MAPPING_NAMES:
                failures.append(
                    f"`{model_type}` appears in the mapping `{name}` but it is not defined in the keys of "
                    "`CONFIG_MAPPING_NAMES`."
                )
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
def check_all_auto_mappings_importable():
    """Check all auto mappings could be imported."""
    check_missing_backends()

    failures = []
    mappings_to_check = {}
    # Each auto modeling files contains multiple mappings. Let's get them in a dynamic way.
    for module_name in ["modeling_auto", "modeling_tf_auto", "modeling_flax_auto"]:
        module = getattr(transformers.models.auto, module_name, None)
        if module is None:
            continue
        # all mappings in a single auto modeling file
        mapping_names = [x for x in dir(module) if x.endswith("_MAPPING_NAMES")]
        mappings_to_check.update({name: getattr(module, name) for name in mapping_names})

    for name, _ in mappings_to_check.items():
        name = name.replace("_MAPPING_NAMES", "_MAPPING")
        if not hasattr(transformers, name):
            failures.append(f"`{name}` should be defined in the main `__init__` file.")
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


Sylvain Gugger's avatar
Sylvain Gugger committed
743
744
745
746
_re_decorator = re.compile(r"^\s*@(\S+)\s+$")


def check_decorator_order(filename):
Patrick von Platen's avatar
Patrick von Platen committed
747
    """Check that in the test file `filename` the slow decorator is always last."""
748
    with open(filename, "r", encoding="utf-8", newline="\n") as f:
Sylvain Gugger's avatar
Sylvain Gugger committed
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
        lines = f.readlines()
    decorator_before = None
    errors = []
    for i, line in enumerate(lines):
        search = _re_decorator.search(line)
        if search is not None:
            decorator_name = search.groups()[0]
            if decorator_before is not None and decorator_name.startswith("parameterized"):
                errors.append(i)
            decorator_before = decorator_name
        elif decorator_before is not None:
            decorator_before = None
    return errors


def check_all_decorator_order():
Patrick von Platen's avatar
Patrick von Platen committed
765
    """Check that in all test files, the slow decorator is always last."""
Sylvain Gugger's avatar
Sylvain Gugger committed
766
767
768
769
770
771
772
773
774
    errors = []
    for fname in os.listdir(PATH_TO_TESTS):
        if fname.endswith(".py"):
            filename = os.path.join(PATH_TO_TESTS, fname)
            new_errors = check_decorator_order(filename)
            errors += [f"- {filename}, line {i}" for i in new_errors]
    if len(errors) > 0:
        msg = "\n".join(errors)
        raise ValueError(
Sylvain Gugger's avatar
Sylvain Gugger committed
775
776
            "The parameterized decorator (and its variants) should always be first, but this is not the case in the"
            f" following files:\n{msg}"
Sylvain Gugger's avatar
Sylvain Gugger committed
777
778
779
        )


780
def find_all_documented_objects():
Patrick von Platen's avatar
Patrick von Platen committed
781
    """Parse the content of all doc files to detect which classes and functions it documents"""
782
783
    documented_obj = []
    for doc_file in Path(PATH_TO_DOC).glob("**/*.rst"):
Julien Plu's avatar
Julien Plu committed
784
        with open(doc_file, "r", encoding="utf-8", newline="\n") as f:
785
786
787
            content = f.read()
        raw_doc_objs = re.findall(r"(?:autoclass|autofunction):: transformers.(\S+)\s+", content)
        documented_obj += [obj.split(".")[-1] for obj in raw_doc_objs]
788
    for doc_file in Path(PATH_TO_DOC).glob("**/*.md"):
Sylvain Gugger's avatar
Sylvain Gugger committed
789
790
        with open(doc_file, "r", encoding="utf-8", newline="\n") as f:
            content = f.read()
791
        raw_doc_objs = re.findall(r"\[\[autodoc\]\]\s+(\S+)\s+", content)
Sylvain Gugger's avatar
Sylvain Gugger committed
792
        documented_obj += [obj.split(".")[-1] for obj in raw_doc_objs]
793
794
795
796
797
798
    return documented_obj


# One good reason for not being documented is to be deprecated. Put in this list deprecated objects.
DEPRECATED_OBJECTS = [
    "AutoModelWithLMHead",
799
    "BartPretrainedModel",
800
801
    "DataCollator",
    "DataCollatorForSOP",
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
    "GlueDataset",
    "GlueDataTrainingArguments",
    "LineByLineTextDataset",
    "LineByLineWithRefDataset",
    "LineByLineWithSOPTextDataset",
    "PretrainedBartModel",
    "PretrainedFSMTModel",
    "SingleSentenceClassificationProcessor",
    "SquadDataTrainingArguments",
    "SquadDataset",
    "SquadExample",
    "SquadFeatures",
    "SquadV1Processor",
    "SquadV2Processor",
    "TFAutoModelWithLMHead",
817
    "TFBartPretrainedModel",
818
819
    "TextDataset",
    "TextDatasetForNextSentencePrediction",
820
    "Wav2Vec2ForMaskedLM",
821
    "Wav2Vec2Tokenizer",
822
823
824
825
826
827
828
829
830
831
    "glue_compute_metrics",
    "glue_convert_examples_to_features",
    "glue_output_modes",
    "glue_processors",
    "glue_tasks_num_labels",
    "squad_convert_examples_to_features",
    "xnli_compute_metrics",
    "xnli_output_modes",
    "xnli_processors",
    "xnli_tasks_num_labels",
832
833
    "TFTrainer",
    "TFTrainingArguments",
834
835
836
837
838
839
840
]

# Exceptionally, some objects should not be documented after all rules passed.
# ONLY PUT SOMETHING IN THIS LIST AS A LAST RESORT!
UNDOCUMENTED_OBJECTS = [
    "AddedToken",  # This is a tokenizers class.
    "BasicTokenizer",  # Internal, should never have been in the main init.
841
    "CharacterTokenizer",  # Internal, should never have been in the main init.
842
    "DPRPretrainedReader",  # Like an Encoder.
Sylvain Gugger's avatar
Sylvain Gugger committed
843
    "DummyObject",  # Just picked by mistake sometimes.
844
    "MecabTokenizer",  # Internal, should never have been in the main init.
845
846
847
848
849
850
851
852
853
854
855
    "ModelCard",  # Internal type.
    "SqueezeBertModule",  # Internal building block (should have been called SqueezeBertLayer)
    "TFDPRPretrainedReader",  # Like an Encoder.
    "TransfoXLCorpus",  # Internal type.
    "WordpieceTokenizer",  # Internal, should never have been in the main init.
    "absl",  # External module
    "add_end_docstrings",  # Internal, should never have been in the main init.
    "add_start_docstrings",  # Internal, should never have been in the main init.
    "convert_tf_weight_name_to_pt_weight_name",  # Internal used to convert model weights
    "logger",  # Internal logger
    "logging",  # External module
856
    "requires_backends",  # Internal function
Jongjyh's avatar
Jongjyh committed
857
    "AltRobertaModel",  # Internal module
858
859
860
861
862
863
864
865
866
]

# This list should be empty. Objects in it should get their own doc page.
SHOULD_HAVE_THEIR_OWN_PAGE = [
    # Benchmarks
    "PyTorchBenchmark",
    "PyTorchBenchmarkArguments",
    "TensorFlowBenchmark",
    "TensorFlowBenchmarkArguments",
867
    "AutoBackbone",
NielsRogge's avatar
NielsRogge committed
868
869
    "BitBackbone",
    "ConvNextBackbone",
Alara Dirik's avatar
Alara Dirik committed
870
    "ConvNextV2Backbone",
871
    "DinatBackbone",
Alara Dirik's avatar
Alara Dirik committed
872
    "FocalNetBackbone",
NielsRogge's avatar
NielsRogge committed
873
    "MaskFormerSwinBackbone",
874
875
    "MaskFormerSwinConfig",
    "MaskFormerSwinModel",
NielsRogge's avatar
NielsRogge committed
876
877
    "NatBackbone",
    "ResNetBackbone",
NielsRogge's avatar
NielsRogge committed
878
    "SwinBackbone",
amyeroberts's avatar
amyeroberts committed
879
880
    "TimmBackbone",
    "TimmBackboneConfig",
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
]


def ignore_undocumented(name):
    """Rules to determine if `name` should be undocumented."""
    # NOT DOCUMENTED ON PURPOSE.
    # Constants uppercase are not documented.
    if name.isupper():
        return True
    # PreTrainedModels / Encoders / Decoders / Layers / Embeddings / Attention are not documented.
    if (
        name.endswith("PreTrainedModel")
        or name.endswith("Decoder")
        or name.endswith("Encoder")
        or name.endswith("Layer")
        or name.endswith("Embeddings")
        or name.endswith("Attention")
    ):
        return True
    # Submodules are not documented.
    if os.path.isdir(os.path.join(PATH_TO_TRANSFORMERS, name)) or os.path.isfile(
        os.path.join(PATH_TO_TRANSFORMERS, f"{name}.py")
    ):
        return True
    # All load functions are not documented.
    if name.startswith("load_tf") or name.startswith("load_pytorch"):
        return True
    # is_xxx_available functions are not documented.
    if name.startswith("is_") and name.endswith("_available"):
        return True
    # Deprecated objects are not documented.
    if name in DEPRECATED_OBJECTS or name in UNDOCUMENTED_OBJECTS:
        return True
    # MMBT model does not really work.
    if name.startswith("MMBT"):
        return True
    if name in SHOULD_HAVE_THEIR_OWN_PAGE:
        return True
    return False


def check_all_objects_are_documented():
Patrick von Platen's avatar
Patrick von Platen committed
923
    """Check all models are properly documented."""
924
    documented_objs = find_all_documented_objects()
925
926
927
    modules = transformers._modules
    objects = [c for c in dir(transformers) if c not in modules and not c.startswith("_")]
    undocumented_objs = [c for c in objects if c not in documented_objs and not ignore_undocumented(c)]
928
929
930
931
932
    if len(undocumented_objs) > 0:
        raise Exception(
            "The following objects are in the public init so should be documented:\n - "
            + "\n - ".join(undocumented_objs)
        )
933
    check_docstrings_are_in_md()
934
935
936
937
938
939
    check_model_type_doc_match()


def check_model_type_doc_match():
    """Check all doc pages have a corresponding model type."""
    model_doc_folder = Path(PATH_TO_DOC) / "model_doc"
940
    model_docs = [m.stem for m in model_doc_folder.glob("*.md")]
941
942

    model_types = list(transformers.models.auto.configuration_auto.MODEL_NAMES_MAPPING.keys())
943
    model_types = [MODEL_TYPE_TO_DOC_MAPPING[m] if m in MODEL_TYPE_TO_DOC_MAPPING else m for m in model_types]
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961

    errors = []
    for m in model_docs:
        if m not in model_types and m != "auto":
            close_matches = get_close_matches(m, model_types)
            error_message = f"{m} is not a proper model identifier."
            if len(close_matches) > 0:
                close_matches = "/".join(close_matches)
                error_message += f" Did you mean {close_matches}?"
            errors.append(error_message)

    if len(errors) > 0:
        raise ValueError(
            "Some model doc pages do not match any existing model type:\n"
            + "\n".join(errors)
            + "\nYou can add any missing model type to the `MODEL_NAMES_MAPPING` constant in "
            "models/auto/configuration_auto.py."
        )
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988


# Re pattern to catch :obj:`xx`, :class:`xx`, :func:`xx` or :meth:`xx`.
_re_rst_special_words = re.compile(r":(?:obj|func|class|meth):`([^`]+)`")
# Re pattern to catch things between double backquotes.
_re_double_backquotes = re.compile(r"(^|[^`])``([^`]+)``([^`]|$)")
# Re pattern to catch example introduction.
_re_rst_example = re.compile(r"^\s*Example.*::\s*$", flags=re.MULTILINE)


def is_rst_docstring(docstring):
    """
    Returns `True` if `docstring` is written in rst.
    """
    if _re_rst_special_words.search(docstring) is not None:
        return True
    if _re_double_backquotes.search(docstring) is not None:
        return True
    if _re_rst_example.search(docstring) is not None:
        return True
    return False


def check_docstrings_are_in_md():
    """Check all docstrings are in md"""
    files_with_rst = []
    for file in Path(PATH_TO_TRANSFORMERS).glob("**/*.py"):
989
        with open(file, encoding="utf-8") as f:
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
            code = f.read()
        docstrings = code.split('"""')

        for idx, docstring in enumerate(docstrings):
            if idx % 2 == 0 or not is_rst_docstring(docstring):
                continue
            files_with_rst.append(file)
            break

    if len(files_with_rst) > 0:
        raise ValueError(
            "The following files have docstrings written in rst:\n"
            + "\n".join([f"- {f}" for f in files_with_rst])
Kamal Raj's avatar
Kamal Raj committed
1003
            + "\nTo fix this run `doc-builder convert path_to_py_file` after installing `doc-builder`\n"
1004
1005
            "(`pip install git+https://github.com/huggingface/doc-builder`)"
        )
1006
1007


1008
def check_repo_quality():
Patrick von Platen's avatar
Patrick von Platen committed
1009
    """Check all models are properly tested and documented."""
1010
1011
    print("Checking all models are included.")
    check_model_list()
1012
1013
    print("Checking all models are public.")
    check_models_are_in_init()
1014
    print("Checking all models are properly tested.")
Sylvain Gugger's avatar
Sylvain Gugger committed
1015
    check_all_decorator_order()
1016
    check_all_models_are_tested()
1017
    print("Checking all objects are properly documented.")
1018
    check_all_objects_are_documented()
1019
1020
    print("Checking all models are in at least one auto class.")
    check_all_models_are_auto_configured()
1021
1022
    print("Checking all names in auto name mappings are defined.")
    check_all_auto_object_names_being_defined()
Yih-Dar's avatar
Yih-Dar committed
1023
1024
    print("Checking all keys in auto name mappings are defined in `CONFIG_MAPPING_NAMES`.")
    check_all_auto_mapping_names_in_config_mapping_names()
1025
1026
    print("Checking all auto mappings could be imported.")
    check_all_auto_mappings_importable()
1027
1028
1029
1030


if __name__ == "__main__":
    check_repo_quality()