check_repo.py 41 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
# coding=utf-8
# Copyright 2020 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

16
17
18
import inspect
import os
import re
19
import warnings
20
from collections import OrderedDict
21
from difflib import get_close_matches
22
from pathlib import Path
23

24
from transformers import is_flax_available, is_tf_available, is_torch_available
25
from transformers.models.auto import get_values
Yih-Dar's avatar
Yih-Dar committed
26
from transformers.models.auto.configuration_auto import CONFIG_MAPPING_NAMES
27
28
29
30
from transformers.models.auto.feature_extraction_auto import FEATURE_EXTRACTOR_MAPPING_NAMES
from transformers.models.auto.image_processing_auto import IMAGE_PROCESSOR_MAPPING_NAMES
from transformers.models.auto.processing_auto import PROCESSOR_MAPPING_NAMES
from transformers.models.auto.tokenization_auto import TOKENIZER_MAPPING_NAMES
31
from transformers.utils import ENV_VARS_TRUE_VALUES, direct_transformers_import
32

33
34
35
36
37

# All paths are set with the intent you should run this script from the root of the repo with the command
# python utils/check_repo.py
PATH_TO_TRANSFORMERS = "src/transformers"
PATH_TO_TESTS = "tests"
38
PATH_TO_DOC = "docs/source/en"
39

40
41
# Update this list with models that are supposed to be private.
PRIVATE_MODELS = [
Jongjyh's avatar
Jongjyh committed
42
    "AltRobertaModel",
43
    "DPRSpanPredictor",
Daniel Stancl's avatar
Daniel Stancl committed
44
    "LongT5Stack",
Li-Huai (Allan) Lin's avatar
Li-Huai (Allan) Lin committed
45
    "RealmBertModel",
46
    "T5Stack",
47
    "MT5Stack",
48
    "SwitchTransformersStack",
49
    "TFDPRSpanPredictor",
50
51
    "MaskFormerSwinModel",
    "MaskFormerSwinPreTrainedModel",
52
53
    "BridgeTowerTextModel",
    "BridgeTowerVisionModel",
54
55
]

56
57
# Update this list for models that are not tested with a comment explaining the reason it should not be.
# Being in this list is an exception and should **not** be the rule.
58
IGNORE_NON_TESTED = PRIVATE_MODELS.copy() + [
59
    # models to ignore for not tested
60
61
    "NllbMoeDecoder",
    "NllbMoeEncoder",
Jason Phang's avatar
Jason Phang committed
62
    "LlamaDecoder",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
63
    "Blip2QFormerModel",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
64
65
    "DetaEncoder",  # Building part of bigger (tested) model.
    "DetaDecoder",  # Building part of bigger (tested) model.
66
    "ErnieMForInformationExtraction",
67
68
    "GraphormerEncoder",  # Building part of bigger (tested) model.
    "GraphormerDecoderHead",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
69
    "CLIPSegDecoder",  # Building part of bigger (tested) model.
70
71
    "TableTransformerEncoder",  # Building part of bigger (tested) model.
    "TableTransformerDecoder",  # Building part of bigger (tested) model.
72
73
    "TimeSeriesTransformerEncoder",  # Building part of bigger (tested) model.
    "TimeSeriesTransformerDecoder",  # Building part of bigger (tested) model.
74
75
    "InformerEncoder",  # Building part of bigger (tested) model.
    "InformerDecoder",  # Building part of bigger (tested) model.
76
77
    "JukeboxVQVAE",  # Building part of bigger (tested) model.
    "JukeboxPrior",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
78
79
    "DeformableDetrEncoder",  # Building part of bigger (tested) model.
    "DeformableDetrDecoder",  # Building part of bigger (tested) model.
Younes Belkada's avatar
Younes Belkada committed
80
    "OPTDecoder",  # Building part of bigger (tested) model.
81
82
    "FlaxWhisperDecoder",  # Building part of bigger (tested) model.
    "FlaxWhisperEncoder",  # Building part of bigger (tested) model.
83
84
    "WhisperDecoder",  # Building part of bigger (tested) model.
    "WhisperEncoder",  # Building part of bigger (tested) model.
85
    "DecisionTransformerGPT2Model",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
86
    "SegformerDecodeHead",  # Building part of bigger (tested) model.
Gunjan Chhablani's avatar
Gunjan Chhablani committed
87
88
89
    "PLBartEncoder",  # Building part of bigger (tested) model.
    "PLBartDecoder",  # Building part of bigger (tested) model.
    "PLBartDecoderWrapper",  # Building part of bigger (tested) model.
Vasudev Gupta's avatar
Vasudev Gupta committed
90
91
92
    "BigBirdPegasusEncoder",  # Building part of bigger (tested) model.
    "BigBirdPegasusDecoder",  # Building part of bigger (tested) model.
    "BigBirdPegasusDecoderWrapper",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
93
94
95
    "DetrEncoder",  # Building part of bigger (tested) model.
    "DetrDecoder",  # Building part of bigger (tested) model.
    "DetrDecoderWrapper",  # Building part of bigger (tested) model.
96
97
    "ConditionalDetrEncoder",  # Building part of bigger (tested) model.
    "ConditionalDetrDecoder",  # Building part of bigger (tested) model.
Suraj Patil's avatar
Suraj Patil committed
98
99
    "M2M100Encoder",  # Building part of bigger (tested) model.
    "M2M100Decoder",  # Building part of bigger (tested) model.
Chan Woo Kim's avatar
Chan Woo Kim committed
100
    "MCTCTEncoder",  # Building part of bigger (tested) model.
wangpeng's avatar
wangpeng committed
101
    "MgpstrModel",  # Building part of bigger (tested) model.
Suraj Patil's avatar
Suraj Patil committed
102
103
    "Speech2TextEncoder",  # Building part of bigger (tested) model.
    "Speech2TextDecoder",  # Building part of bigger (tested) model.
Patrick von Platen's avatar
Patrick von Platen committed
104
105
    "LEDEncoder",  # Building part of bigger (tested) model.
    "LEDDecoder",  # Building part of bigger (tested) model.
106
    "BartDecoderWrapper",  # Building part of bigger (tested) model.
107
    "BartEncoder",  # Building part of bigger (tested) model.
108
    "BertLMHeadModel",  # Needs to be setup as decoder.
109
    "BlenderbotSmallEncoder",  # Building part of bigger (tested) model.
110
    "BlenderbotSmallDecoderWrapper",  # Building part of bigger (tested) model.
111
    "BlenderbotEncoder",  # Building part of bigger (tested) model.
112
    "BlenderbotDecoderWrapper",  # Building part of bigger (tested) model.
113
    "MBartEncoder",  # Building part of bigger (tested) model.
114
    "MBartDecoderWrapper",  # Building part of bigger (tested) model.
115
116
117
118
    "MegatronBertLMHeadModel",  # Building part of bigger (tested) model.
    "MegatronBertEncoder",  # Building part of bigger (tested) model.
    "MegatronBertDecoder",  # Building part of bigger (tested) model.
    "MegatronBertDecoderWrapper",  # Building part of bigger (tested) model.
StevenTang1998's avatar
StevenTang1998 committed
119
120
    "MvpDecoderWrapper",  # Building part of bigger (tested) model.
    "MvpEncoder",  # Building part of bigger (tested) model.
121
    "PegasusEncoder",  # Building part of bigger (tested) model.
122
    "PegasusDecoderWrapper",  # Building part of bigger (tested) model.
Jason Phang's avatar
Jason Phang committed
123
124
125
    "PegasusXEncoder",  # Building part of bigger (tested) model.
    "PegasusXDecoder",  # Building part of bigger (tested) model.
    "PegasusXDecoderWrapper",  # Building part of bigger (tested) model.
126
    "DPREncoder",  # Building part of bigger (tested) model.
127
    "ProphetNetDecoderWrapper",  # Building part of bigger (tested) model.
Li-Huai (Allan) Lin's avatar
Li-Huai (Allan) Lin committed
128
129
130
131
    "RealmBertModel",  # Building part of bigger (tested) model.
    "RealmReader",  # Not regular model.
    "RealmScorer",  # Not regular model.
    "RealmForOpenQA",  # Not regular model.
132
    "ReformerForMaskedLM",  # Needs to be setup as decoder.
133
    "Speech2Text2DecoderWrapper",  # Building part of bigger (tested) model.
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
134
    "TFDPREncoder",  # Building part of bigger (tested) model.
135
136
    "TFElectraMainLayer",  # Building part of bigger (tested) model (should it be a TFPreTrainedModel ?)
    "TFRobertaForMultipleChoice",  # TODO: fix
137
    "TFRobertaPreLayerNormForMultipleChoice",  # TODO: fix
138
    "TrOCRDecoderWrapper",  # Building part of bigger (tested) model.
amyeroberts's avatar
amyeroberts committed
139
140
    "TFWhisperEncoder",  # Building part of bigger (tested) model.
    "TFWhisperDecoder",  # Building part of bigger (tested) model.
abhishek thakur's avatar
abhishek thakur committed
141
    "SeparableConv1D",  # Building part of bigger (tested) model.
142
    "FlaxBartForCausalLM",  # Building part of bigger (tested) model.
143
    "FlaxBertForCausalLM",  # Building part of bigger (tested) model. Tested implicitly through FlaxRobertaForCausalLM.
Younes Belkada's avatar
Younes Belkada committed
144
    "OPTDecoderWrapper",
145
    "TFSegformerDecodeHead",  # Not a regular model.
Jongjyh's avatar
Jongjyh committed
146
    "AltRobertaModel",  # Building part of bigger (tested) model.
Younes Belkada's avatar
Younes Belkada committed
147
    "BlipTextLMHeadModel",  # No need to test it as it is tested by BlipTextVision models
Matt's avatar
Matt committed
148
    "TFBlipTextLMHeadModel",  # No need to test it as it is tested by BlipTextVision models
149
150
    "BridgeTowerTextModel",  # No need to test it as it is tested by BridgeTowerModel model.
    "BridgeTowerVisionModel",  # No need to test it as it is tested by BridgeTowerModel model.
151
152
153
154
155
156
157
158
159
160
161
162
    "SpeechT5Decoder",  # Building part of bigger (tested) model.
    "SpeechT5DecoderWithoutPrenet",  # Building part of bigger (tested) model.
    "SpeechT5DecoderWithSpeechPrenet",  # Building part of bigger (tested) model.
    "SpeechT5DecoderWithTextPrenet",  # Building part of bigger (tested) model.
    "SpeechT5Encoder",  # Building part of bigger (tested) model.
    "SpeechT5EncoderWithoutPrenet",  # Building part of bigger (tested) model.
    "SpeechT5EncoderWithSpeechPrenet",  # Building part of bigger (tested) model.
    "SpeechT5EncoderWithTextPrenet",  # Building part of bigger (tested) model.
    "SpeechT5SpeechDecoder",  # Building part of bigger (tested) model.
    "SpeechT5SpeechEncoder",  # Building part of bigger (tested) model.
    "SpeechT5TextDecoder",  # Building part of bigger (tested) model.
    "SpeechT5TextEncoder",  # Building part of bigger (tested) model.
163
164
165
166
167
]

# Update this list with test files that don't have a tester with a `all_model_classes` variable and which don't
# trigger the common tests.
TEST_FILES_WITH_NO_COMMON_TESTS = [
Yih-Dar's avatar
Yih-Dar committed
168
169
170
171
172
173
174
175
176
177
178
179
180
    "models/decision_transformer/test_modeling_decision_transformer.py",
    "models/camembert/test_modeling_camembert.py",
    "models/mt5/test_modeling_flax_mt5.py",
    "models/mbart/test_modeling_mbart.py",
    "models/mt5/test_modeling_mt5.py",
    "models/pegasus/test_modeling_pegasus.py",
    "models/camembert/test_modeling_tf_camembert.py",
    "models/mt5/test_modeling_tf_mt5.py",
    "models/xlm_roberta/test_modeling_tf_xlm_roberta.py",
    "models/xlm_roberta/test_modeling_flax_xlm_roberta.py",
    "models/xlm_prophetnet/test_modeling_xlm_prophetnet.py",
    "models/xlm_roberta/test_modeling_xlm_roberta.py",
    "models/vision_text_dual_encoder/test_modeling_vision_text_dual_encoder.py",
Matt's avatar
Matt committed
181
    "models/vision_text_dual_encoder/test_modeling_tf_vision_text_dual_encoder.py",
Yih-Dar's avatar
Yih-Dar committed
182
183
    "models/vision_text_dual_encoder/test_modeling_flax_vision_text_dual_encoder.py",
    "models/decision_transformer/test_modeling_decision_transformer.py",
184
185
]

186
187
# Update this list for models that are not in any of the auto MODEL_XXX_MAPPING. Being in this list is an exception and
# should **not** be the rule.
188
IGNORE_NON_AUTO_CONFIGURED = PRIVATE_MODELS.copy() + [
189
    # models to ignore for model xxx mapping
190
191
    "AlignTextModel",
    "AlignVisionModel",
192
193
194
195
    "ClapTextModel",
    "ClapTextModelWithProjection",
    "ClapAudioModel",
    "ClapAudioModelWithProjection",
NielsRogge's avatar
NielsRogge committed
196
197
198
    "Blip2ForConditionalGeneration",
    "Blip2QFormerModel",
    "Blip2VisionModel",
199
    "ErnieMForInformationExtraction",
200
    "GitVisionModel",
201
202
    "GraphormerModel",
    "GraphormerForGraphClassification",
Younes Belkada's avatar
Younes Belkada committed
203
204
205
206
207
208
    "BlipForConditionalGeneration",
    "BlipForImageTextRetrieval",
    "BlipForQuestionAnswering",
    "BlipVisionModel",
    "BlipTextLMHeadModel",
    "BlipTextModel",
Matt's avatar
Matt committed
209
210
211
212
213
214
    "TFBlipForConditionalGeneration",
    "TFBlipForImageTextRetrieval",
    "TFBlipForQuestionAnswering",
    "TFBlipVisionModel",
    "TFBlipTextLMHeadModel",
    "TFBlipTextModel",
NielsRogge's avatar
NielsRogge committed
215
    "Swin2SRForImageSuperResolution",
216
217
    "BridgeTowerForImageAndTextRetrieval",
    "BridgeTowerForMaskedLM",
218
    "BridgeTowerForContrastiveLearning",
NielsRogge's avatar
NielsRogge committed
219
220
221
    "CLIPSegForImageSegmentation",
    "CLIPSegVisionModel",
    "CLIPSegTextModel",
Matt's avatar
Matt committed
222
    "EsmForProteinFolding",
223
    "GPTSanJapaneseModel",
224
    "TimeSeriesTransformerForPrediction",
225
    "InformerForPrediction",
226
227
    "JukeboxVQVAE",
    "JukeboxPrior",
Jason Phang's avatar
Jason Phang committed
228
229
230
231
232
233
    "PegasusXEncoder",
    "PegasusXDecoder",
    "PegasusXDecoderWrapper",
    "PegasusXEncoder",
    "PegasusXDecoder",
    "PegasusXDecoderWrapper",
NielsRogge's avatar
NielsRogge committed
234
    "DPTForDepthEstimation",
235
    "DecisionTransformerGPT2Model",
NielsRogge's avatar
NielsRogge committed
236
    "GLPNForDepthEstimation",
NielsRogge's avatar
NielsRogge committed
237
238
    "ViltForImagesAndTextClassification",
    "ViltForImageAndTextRetrieval",
239
    "ViltForTokenClassification",
NielsRogge's avatar
NielsRogge committed
240
    "ViltForMaskedLM",
Suraj Patil's avatar
Suraj Patil committed
241
242
243
    "XGLMEncoder",
    "XGLMDecoder",
    "XGLMDecoderWrapper",
NielsRogge's avatar
NielsRogge committed
244
245
    "PerceiverForMultimodalAutoencoding",
    "PerceiverForOpticalFlow",
NielsRogge's avatar
NielsRogge committed
246
    "SegformerDecodeHead",
247
    "TFSegformerDecodeHead",
Kamal Raj's avatar
Kamal Raj committed
248
    "FlaxBeitForMaskedImageModeling",
Gunjan Chhablani's avatar
Gunjan Chhablani committed
249
250
251
    "PLBartEncoder",
    "PLBartDecoder",
    "PLBartDecoderWrapper",
NielsRogge's avatar
NielsRogge committed
252
    "BeitForMaskedImageModeling",
253
254
    "ChineseCLIPTextModel",
    "ChineseCLIPVisionModel",
Suraj Patil's avatar
Suraj Patil committed
255
    "CLIPTextModel",
256
    "CLIPTextModelWithProjection",
Suraj Patil's avatar
Suraj Patil committed
257
    "CLIPVisionModel",
258
    "CLIPVisionModelWithProjection",
259
260
    "GroupViTTextModel",
    "GroupViTVisionModel",
Yih-Dar's avatar
Yih-Dar committed
261
262
    "TFCLIPTextModel",
    "TFCLIPVisionModel",
263
264
    "TFGroupViTTextModel",
    "TFGroupViTVisionModel",
Suraj Patil's avatar
Suraj Patil committed
265
266
    "FlaxCLIPTextModel",
    "FlaxCLIPVisionModel",
267
    "FlaxWav2Vec2ForCTC",
NielsRogge's avatar
NielsRogge committed
268
    "DetrForSegmentation",
Younes Belkada's avatar
Younes Belkada committed
269
270
271
    "Pix2StructVisionModel",
    "Pix2StructTextModel",
    "Pix2StructForConditionalGeneration",
272
    "ConditionalDetrForSegmentation",
273
274
    "DPRReader",
    "FlaubertForQuestionAnswering",
275
276
277
278
    "FlavaImageCodebook",
    "FlavaTextModel",
    "FlavaImageModel",
    "FlavaMultimodalModel",
279
    "GPT2DoubleHeadsModel",
280
    "GPTSw3DoubleHeadsModel",
281
    "LayoutLMForQuestionAnswering",
Ryokan RI's avatar
Ryokan RI committed
282
    "LukeForMaskedLM",
NielsRogge's avatar
NielsRogge committed
283
284
285
    "LukeForEntityClassification",
    "LukeForEntityPairClassification",
    "LukeForEntitySpanClassification",
wangpeng's avatar
wangpeng committed
286
    "MgpstrModel",
287
    "OpenAIGPTDoubleHeadsModel",
288
289
290
    "OwlViTTextModel",
    "OwlViTVisionModel",
    "OwlViTForObjectDetection",
291
292
293
    "RagModel",
    "RagSequenceForGeneration",
    "RagTokenForGeneration",
Li-Huai (Allan) Lin's avatar
Li-Huai (Allan) Lin committed
294
295
296
297
    "RealmEmbedder",
    "RealmForOpenQA",
    "RealmScorer",
    "RealmReader",
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
298
    "TFDPRReader",
299
    "TFGPT2DoubleHeadsModel",
300
    "TFLayoutLMForQuestionAnswering",
301
    "TFOpenAIGPTDoubleHeadsModel",
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
302
303
304
    "TFRagModel",
    "TFRagSequenceForGeneration",
    "TFRagTokenForGeneration",
305
    "Wav2Vec2ForCTC",
Patrick von Platen's avatar
Patrick von Platen committed
306
    "HubertForCTC",
307
308
    "SEWForCTC",
    "SEWDForCTC",
309
310
    "XLMForQuestionAnswering",
    "XLNetForQuestionAnswering",
abhishek thakur's avatar
abhishek thakur committed
311
    "SeparableConv1D",
Gunjan Chhablani's avatar
Gunjan Chhablani committed
312
313
314
315
    "VisualBertForRegionToPhraseAlignment",
    "VisualBertForVisualReasoning",
    "VisualBertForQuestionAnswering",
    "VisualBertForMultipleChoice",
Will Rice's avatar
Will Rice committed
316
    "TFWav2Vec2ForCTC",
Will Rice's avatar
Will Rice committed
317
    "TFHubertForCTC",
NielsRogge's avatar
NielsRogge committed
318
319
    "XCLIPVisionModel",
    "XCLIPTextModel",
Jongjyh's avatar
Jongjyh committed
320
321
322
    "AltCLIPTextModel",
    "AltCLIPVisionModel",
    "AltRobertaModel",
Zineng Tang's avatar
Zineng Tang committed
323
    "TvltForAudioVisualClassification",
324
325
326
    "SpeechT5ForSpeechToSpeech",
    "SpeechT5ForTextToSpeech",
    "SpeechT5HifiGan",
327
328
]

329
330
331
332
333
334
# Update this list for models that have multiple model types for the same
# model doc
MODEL_TYPE_TO_DOC_MAPPING = OrderedDict(
    [
        ("data2vec-text", "data2vec"),
        ("data2vec-audio", "data2vec"),
335
        ("data2vec-vision", "data2vec"),
NielsRogge's avatar
NielsRogge committed
336
        ("donut-swin", "donut"),
337
338
339
340
    ]
)


341
# This is to make sure the transformers module imported is the one in the repo.
342
transformers = direct_transformers_import(PATH_TO_TRANSFORMERS)
343
344


345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
def check_missing_backends():
    missing_backends = []
    if not is_torch_available():
        missing_backends.append("PyTorch")
    if not is_tf_available():
        missing_backends.append("TensorFlow")
    if not is_flax_available():
        missing_backends.append("Flax")
    if len(missing_backends) > 0:
        missing = ", ".join(missing_backends)
        if os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES:
            raise Exception(
                "Full repo consistency checks require all backends to be installed (with `pip install -e .[dev]` in the "
                f"Transformers repo, the following are missing: {missing}."
            )
        else:
            warnings.warn(
                "Full repo consistency checks require all backends to be installed (with `pip install -e .[dev]` in the "
                f"Transformers repo, the following are missing: {missing}. While it's probably fine as long as you "
                "didn't make any change in one of those backends modeling files, you should probably execute the "
                "command above to be on the safe side."
            )


369
370
371
372
373
374
375
376
377
378
379
380
381
def check_model_list():
    """Check the model list inside the transformers library."""
    # Get the models from the directory structure of `src/transformers/models/`
    models_dir = os.path.join(PATH_TO_TRANSFORMERS, "models")
    _models = []
    for model in os.listdir(models_dir):
        model_dir = os.path.join(models_dir, model)
        if os.path.isdir(model_dir) and "__init__.py" in os.listdir(model_dir):
            _models.append(model)

    # Get the models from the directory structure of `src/transformers/models/`
    models = [model for model in dir(transformers.models) if not model.startswith("__")]

382
    missing_models = sorted(set(_models).difference(models))
383
384
385
386
387
388
    if missing_models:
        raise Exception(
            f"The following models should be included in {models_dir}/__init__.py: {','.join(missing_models)}."
        )


389
390
391
# If some modeling modules should be ignored for all checks, they should be added in the nested list
# _ignore_modules of this function.
def get_model_modules():
Patrick von Platen's avatar
Patrick von Platen committed
392
    """Get the model modules inside the transformers library."""
393
394
395
396
397
398
399
400
    _ignore_modules = [
        "modeling_auto",
        "modeling_encoder_decoder",
        "modeling_marian",
        "modeling_mmbt",
        "modeling_outputs",
        "modeling_retribert",
        "modeling_utils",
Sylvain Gugger's avatar
Sylvain Gugger committed
401
        "modeling_flax_auto",
402
        "modeling_flax_encoder_decoder",
Stas Bekman's avatar
Stas Bekman committed
403
        "modeling_flax_utils",
404
        "modeling_speech_encoder_decoder",
405
        "modeling_flax_speech_encoder_decoder",
406
        "modeling_flax_vision_encoder_decoder",
407
408
        "modeling_transfo_xl_utilities",
        "modeling_tf_auto",
409
        "modeling_tf_encoder_decoder",
410
411
412
413
        "modeling_tf_outputs",
        "modeling_tf_pytorch_utils",
        "modeling_tf_utils",
        "modeling_tf_transfo_xl_utilities",
414
        "modeling_tf_vision_encoder_decoder",
415
        "modeling_vision_encoder_decoder",
416
417
    ]
    modules = []
Sylvain Gugger's avatar
Sylvain Gugger committed
418
419
420
421
422
423
424
425
426
    for model in dir(transformers.models):
        # There are some magic dunder attributes in the dir, we ignore them
        if not model.startswith("__"):
            model_module = getattr(transformers.models, model)
            for submodule in dir(model_module):
                if submodule.startswith("modeling") and submodule not in _ignore_modules:
                    modeling_module = getattr(model_module, submodule)
                    if inspect.ismodule(modeling_module):
                        modules.append(modeling_module)
427
428
429
    return modules


430
def get_models(module, include_pretrained=False):
Patrick von Platen's avatar
Patrick von Platen committed
431
    """Get the objects in module that are models."""
432
    models = []
433
    model_classes = (transformers.PreTrainedModel, transformers.TFPreTrainedModel, transformers.FlaxPreTrainedModel)
434
    for attr_name in dir(module):
435
        if not include_pretrained and ("Pretrained" in attr_name or "PreTrained" in attr_name):
436
437
438
439
440
441
442
            continue
        attr = getattr(module, attr_name)
        if isinstance(attr, type) and issubclass(attr, model_classes) and attr.__module__ == module.__name__:
            models.append((attr_name, attr))
    return models


443
444
445
446
447
448
449
450
451
452
453
454
def is_a_private_model(model):
    """Returns True if the model should not be in the main init."""
    if model in PRIVATE_MODELS:
        return True

    # Wrapper, Encoder and Decoder are all privates
    if model.endswith("Wrapper"):
        return True
    if model.endswith("Encoder"):
        return True
    if model.endswith("Decoder"):
        return True
455
456
    if model.endswith("Prenet"):
        return True
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
    return False


def check_models_are_in_init():
    """Checks all models defined in the library are in the main init."""
    models_not_in_init = []
    dir_transformers = dir(transformers)
    for module in get_model_modules():
        models_not_in_init += [
            model[0] for model in get_models(module, include_pretrained=True) if model[0] not in dir_transformers
        ]

    # Remove private models
    models_not_in_init = [model for model in models_not_in_init if not is_a_private_model(model)]
    if len(models_not_in_init) > 0:
        raise Exception(f"The following models should be in the main init: {','.join(models_not_in_init)}.")


475
476
477
# If some test_modeling files should be ignored when checking models are all tested, they should be added in the
# nested list _ignore_files of this function.
def get_model_test_files():
Yih-Dar's avatar
Yih-Dar committed
478
479
480
481
482
483
    """Get the model test files.

    The returned files should NOT contain the `tests` (i.e. `PATH_TO_TESTS` defined in this script). They will be
    considered as paths relative to `tests`. A caller has to use `os.path.join(PATH_TO_TESTS, ...)` to access the files.
    """

484
485
486
    _ignore_files = [
        "test_modeling_common",
        "test_modeling_encoder_decoder",
487
        "test_modeling_flax_encoder_decoder",
488
        "test_modeling_flax_speech_encoder_decoder",
489
490
        "test_modeling_marian",
        "test_modeling_tf_common",
491
        "test_modeling_tf_encoder_decoder",
492
493
    ]
    test_files = []
Yih-Dar's avatar
Yih-Dar committed
494
495
496
497
498
499
500
501
502
503
504
505
506
    # Check both `PATH_TO_TESTS` and `PATH_TO_TESTS/models`
    model_test_root = os.path.join(PATH_TO_TESTS, "models")
    model_test_dirs = []
    for x in os.listdir(model_test_root):
        x = os.path.join(model_test_root, x)
        if os.path.isdir(x):
            model_test_dirs.append(x)

    for target_dir in [PATH_TO_TESTS] + model_test_dirs:
        for file_or_dir in os.listdir(target_dir):
            path = os.path.join(target_dir, file_or_dir)
            if os.path.isfile(path):
                filename = os.path.split(path)[-1]
507
                if "test_modeling" in filename and os.path.splitext(filename)[0] not in _ignore_files:
Yih-Dar's avatar
Yih-Dar committed
508
509
510
                    file = os.path.join(*path.split(os.sep)[1:])
                    test_files.append(file)

511
512
513
514
515
516
    return test_files


# This is a bit hacky but I didn't find a way to import the test_file as a module and read inside the tester class
# for the all_model_classes variable.
def find_tested_models(test_file):
Patrick von Platen's avatar
Patrick von Platen committed
517
    """Parse the content of test_file to detect what's in all_model_classes"""
Sylvain Gugger's avatar
Sylvain Gugger committed
518
    # This is a bit hacky but I didn't find a way to import the test_file as a module and read inside the class
519
    with open(os.path.join(PATH_TO_TESTS, test_file), "r", encoding="utf-8", newline="\n") as f:
520
        content = f.read()
Sylvain Gugger's avatar
Sylvain Gugger committed
521
    all_models = re.findall(r"all_model_classes\s+=\s+\(\s*\(([^\)]*)\)", content)
522
523
    # Check with one less parenthesis as well
    all_models += re.findall(r"all_model_classes\s+=\s+\(([^\)]*)\)", content)
Sylvain Gugger's avatar
Sylvain Gugger committed
524
    if len(all_models) > 0:
525
        model_tested = []
Sylvain Gugger's avatar
Sylvain Gugger committed
526
527
528
529
530
        for entry in all_models:
            for line in entry.split(","):
                name = line.strip()
                if len(name) > 0:
                    model_tested.append(name)
531
532
533
534
        return model_tested


def check_models_are_tested(module, test_file):
Patrick von Platen's avatar
Patrick von Platen committed
535
    """Check models defined in module are tested in test_file."""
536
    # XxxPreTrainedModel are not tested
537
538
539
    defined_models = get_models(module)
    tested_models = find_tested_models(test_file)
    if tested_models is None:
540
        if test_file.replace(os.path.sep, "/") in TEST_FILES_WITH_NO_COMMON_TESTS:
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
            return
        return [
            f"{test_file} should define `all_model_classes` to apply common tests to the models it tests. "
            + "If this intentional, add the test filename to `TEST_FILES_WITH_NO_COMMON_TESTS` in the file "
            + "`utils/check_repo.py`."
        ]
    failures = []
    for model_name, _ in defined_models:
        if model_name not in tested_models and model_name not in IGNORE_NON_TESTED:
            failures.append(
                f"{model_name} is defined in {module.__name__} but is not tested in "
                + f"{os.path.join(PATH_TO_TESTS, test_file)}. Add it to the all_model_classes in that file."
                + "If common tests should not applied to that model, add its name to `IGNORE_NON_TESTED`"
                + "in the file `utils/check_repo.py`."
            )
    return failures


def check_all_models_are_tested():
Patrick von Platen's avatar
Patrick von Platen committed
560
    """Check all models are properly tested."""
561
562
563
564
    modules = get_model_modules()
    test_files = get_model_test_files()
    failures = []
    for module in modules:
565
566
        test_file = [file for file in test_files if f"test_{module.__name__.split('.')[-1]}.py" in file]
        if len(test_file) == 0:
567
            failures.append(f"{module.__name__} does not have its corresponding test file {test_file}.")
568
569
570
571
        elif len(test_file) > 1:
            failures.append(f"{module.__name__} has several test files: {test_file}.")
        else:
            test_file = test_file[0]
572
573
574
            new_failures = check_models_are_tested(module, test_file)
            if new_failures is not None:
                failures += new_failures
575
576
577
578
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


579
def get_all_auto_configured_models():
Patrick von Platen's avatar
Patrick von Platen committed
580
    """Return the list of all models in at least one auto class."""
581
    result = set()  # To avoid duplicates we concatenate all model classes in a set.
582
583
    if is_torch_available():
        for attr_name in dir(transformers.models.auto.modeling_auto):
584
            if attr_name.startswith("MODEL_") and attr_name.endswith("MAPPING_NAMES"):
585
586
587
                result = result | set(get_values(getattr(transformers.models.auto.modeling_auto, attr_name)))
    if is_tf_available():
        for attr_name in dir(transformers.models.auto.modeling_tf_auto):
588
            if attr_name.startswith("TF_MODEL_") and attr_name.endswith("MAPPING_NAMES"):
589
590
591
                result = result | set(get_values(getattr(transformers.models.auto.modeling_tf_auto, attr_name)))
    if is_flax_available():
        for attr_name in dir(transformers.models.auto.modeling_flax_auto):
592
            if attr_name.startswith("FLAX_MODEL_") and attr_name.endswith("MAPPING_NAMES"):
593
                result = result | set(get_values(getattr(transformers.models.auto.modeling_flax_auto, attr_name)))
594
    return list(result)
595
596


597
598
599
600
601
602
603
604
605
606
607
def ignore_unautoclassed(model_name):
    """Rules to determine if `name` should be in an auto class."""
    # Special white list
    if model_name in IGNORE_NON_AUTO_CONFIGURED:
        return True
    # Encoder and Decoder should be ignored
    if "Encoder" in model_name or "Decoder" in model_name:
        return True
    return False


608
def check_models_are_auto_configured(module, all_auto_models):
Patrick von Platen's avatar
Patrick von Platen committed
609
    """Check models defined in module are each in an auto class."""
610
611
612
    defined_models = get_models(module)
    failures = []
    for model_name, _ in defined_models:
613
        if model_name not in all_auto_models and not ignore_unautoclassed(model_name):
614
615
616
617
618
619
620
621
622
            failures.append(
                f"{model_name} is defined in {module.__name__} but is not present in any of the auto mapping. "
                "If that is intended behavior, add its name to `IGNORE_NON_AUTO_CONFIGURED` in the file "
                "`utils/check_repo.py`."
            )
    return failures


def check_all_models_are_auto_configured():
Patrick von Platen's avatar
Patrick von Platen committed
623
    """Check all models are each in an auto class."""
624
    check_missing_backends()
625
626
627
628
629
630
631
632
633
634
635
    modules = get_model_modules()
    all_auto_models = get_all_auto_configured_models()
    failures = []
    for module in modules:
        new_failures = check_models_are_auto_configured(module, all_auto_models)
        if new_failures is not None:
            failures += new_failures
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


636
637
def check_all_auto_object_names_being_defined():
    """Check all names defined in auto (name) mappings exist in the library."""
638
    check_missing_backends()
639

640
    failures = []
641
    mappings_to_check = {
642
643
644
645
646
647
        "TOKENIZER_MAPPING_NAMES": TOKENIZER_MAPPING_NAMES,
        "IMAGE_PROCESSOR_MAPPING_NAMES": IMAGE_PROCESSOR_MAPPING_NAMES,
        "FEATURE_EXTRACTOR_MAPPING_NAMES": FEATURE_EXTRACTOR_MAPPING_NAMES,
        "PROCESSOR_MAPPING_NAMES": PROCESSOR_MAPPING_NAMES,
    }

648
649
    # Each auto modeling files contains multiple mappings. Let's get them in a dynamic way.
    for module_name in ["modeling_auto", "modeling_tf_auto", "modeling_flax_auto"]:
650
651
652
        module = getattr(transformers.models.auto, module_name, None)
        if module is None:
            continue
653
654
655
656
657
        # all mappings in a single auto modeling file
        mapping_names = [x for x in dir(module) if x.endswith("_MAPPING_NAMES")]
        mappings_to_check.update({name: getattr(module, name) for name in mapping_names})

    for name, mapping in mappings_to_check.items():
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
        for model_type, class_names in mapping.items():
            if not isinstance(class_names, tuple):
                class_names = (class_names,)
                for class_name in class_names:
                    if class_name is None:
                        continue
                    # dummy object is accepted
                    if not hasattr(transformers, class_name):
                        # If the class name is in a model name mapping, let's not check if there is a definition in any modeling
                        # module, if it's a private model defined in this file.
                        if name.endswith("MODEL_MAPPING_NAMES") and is_a_private_model(class_name):
                            continue
                        failures.append(
                            f"`{class_name}` appears in the mapping `{name}` but it is not defined in the library."
                        )
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


Yih-Dar's avatar
Yih-Dar committed
677
678
def check_all_auto_mapping_names_in_config_mapping_names():
    """Check all keys defined in auto mappings (mappings of names) appear in `CONFIG_MAPPING_NAMES`."""
679
    check_missing_backends()
Yih-Dar's avatar
Yih-Dar committed
680

681
    failures = []
Yih-Dar's avatar
Yih-Dar committed
682
    # `TOKENIZER_PROCESSOR_MAPPING_NAMES` and `AutoTokenizer` is special, and don't need to follow the rule.
683
    mappings_to_check = {
Yih-Dar's avatar
Yih-Dar committed
684
685
686
687
688
        "IMAGE_PROCESSOR_MAPPING_NAMES": IMAGE_PROCESSOR_MAPPING_NAMES,
        "FEATURE_EXTRACTOR_MAPPING_NAMES": FEATURE_EXTRACTOR_MAPPING_NAMES,
        "PROCESSOR_MAPPING_NAMES": PROCESSOR_MAPPING_NAMES,
    }

689
690
    # Each auto modeling files contains multiple mappings. Let's get them in a dynamic way.
    for module_name in ["modeling_auto", "modeling_tf_auto", "modeling_flax_auto"]:
691
692
693
        module = getattr(transformers.models.auto, module_name, None)
        if module is None:
            continue
694
695
696
697
698
        # all mappings in a single auto modeling file
        mapping_names = [x for x in dir(module) if x.endswith("_MAPPING_NAMES")]
        mappings_to_check.update({name: getattr(module, name) for name in mapping_names})

    for name, mapping in mappings_to_check.items():
Yih-Dar's avatar
Yih-Dar committed
699
700
701
702
703
704
705
706
707
708
        for model_type, class_names in mapping.items():
            if model_type not in CONFIG_MAPPING_NAMES:
                failures.append(
                    f"`{model_type}` appears in the mapping `{name}` but it is not defined in the keys of "
                    "`CONFIG_MAPPING_NAMES`."
                )
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


Sylvain Gugger's avatar
Sylvain Gugger committed
709
710
711
712
_re_decorator = re.compile(r"^\s*@(\S+)\s+$")


def check_decorator_order(filename):
Patrick von Platen's avatar
Patrick von Platen committed
713
    """Check that in the test file `filename` the slow decorator is always last."""
714
    with open(filename, "r", encoding="utf-8", newline="\n") as f:
Sylvain Gugger's avatar
Sylvain Gugger committed
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
        lines = f.readlines()
    decorator_before = None
    errors = []
    for i, line in enumerate(lines):
        search = _re_decorator.search(line)
        if search is not None:
            decorator_name = search.groups()[0]
            if decorator_before is not None and decorator_name.startswith("parameterized"):
                errors.append(i)
            decorator_before = decorator_name
        elif decorator_before is not None:
            decorator_before = None
    return errors


def check_all_decorator_order():
Patrick von Platen's avatar
Patrick von Platen committed
731
    """Check that in all test files, the slow decorator is always last."""
Sylvain Gugger's avatar
Sylvain Gugger committed
732
733
734
735
736
737
738
739
740
    errors = []
    for fname in os.listdir(PATH_TO_TESTS):
        if fname.endswith(".py"):
            filename = os.path.join(PATH_TO_TESTS, fname)
            new_errors = check_decorator_order(filename)
            errors += [f"- {filename}, line {i}" for i in new_errors]
    if len(errors) > 0:
        msg = "\n".join(errors)
        raise ValueError(
Sylvain Gugger's avatar
Sylvain Gugger committed
741
742
            "The parameterized decorator (and its variants) should always be first, but this is not the case in the"
            f" following files:\n{msg}"
Sylvain Gugger's avatar
Sylvain Gugger committed
743
744
745
        )


746
def find_all_documented_objects():
Patrick von Platen's avatar
Patrick von Platen committed
747
    """Parse the content of all doc files to detect which classes and functions it documents"""
748
749
    documented_obj = []
    for doc_file in Path(PATH_TO_DOC).glob("**/*.rst"):
Julien Plu's avatar
Julien Plu committed
750
        with open(doc_file, "r", encoding="utf-8", newline="\n") as f:
751
752
753
            content = f.read()
        raw_doc_objs = re.findall(r"(?:autoclass|autofunction):: transformers.(\S+)\s+", content)
        documented_obj += [obj.split(".")[-1] for obj in raw_doc_objs]
Sylvain Gugger's avatar
Sylvain Gugger committed
754
755
756
757
758
    for doc_file in Path(PATH_TO_DOC).glob("**/*.mdx"):
        with open(doc_file, "r", encoding="utf-8", newline="\n") as f:
            content = f.read()
        raw_doc_objs = re.findall("\[\[autodoc\]\]\s+(\S+)\s+", content)
        documented_obj += [obj.split(".")[-1] for obj in raw_doc_objs]
759
760
761
762
763
764
    return documented_obj


# One good reason for not being documented is to be deprecated. Put in this list deprecated objects.
DEPRECATED_OBJECTS = [
    "AutoModelWithLMHead",
765
    "BartPretrainedModel",
766
767
    "DataCollator",
    "DataCollatorForSOP",
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
    "GlueDataset",
    "GlueDataTrainingArguments",
    "LineByLineTextDataset",
    "LineByLineWithRefDataset",
    "LineByLineWithSOPTextDataset",
    "PretrainedBartModel",
    "PretrainedFSMTModel",
    "SingleSentenceClassificationProcessor",
    "SquadDataTrainingArguments",
    "SquadDataset",
    "SquadExample",
    "SquadFeatures",
    "SquadV1Processor",
    "SquadV2Processor",
    "TFAutoModelWithLMHead",
783
    "TFBartPretrainedModel",
784
785
    "TextDataset",
    "TextDatasetForNextSentencePrediction",
786
    "Wav2Vec2ForMaskedLM",
787
    "Wav2Vec2Tokenizer",
788
789
790
791
792
793
794
795
796
797
    "glue_compute_metrics",
    "glue_convert_examples_to_features",
    "glue_output_modes",
    "glue_processors",
    "glue_tasks_num_labels",
    "squad_convert_examples_to_features",
    "xnli_compute_metrics",
    "xnli_output_modes",
    "xnli_processors",
    "xnli_tasks_num_labels",
798
799
    "TFTrainer",
    "TFTrainingArguments",
800
801
802
803
804
805
806
]

# Exceptionally, some objects should not be documented after all rules passed.
# ONLY PUT SOMETHING IN THIS LIST AS A LAST RESORT!
UNDOCUMENTED_OBJECTS = [
    "AddedToken",  # This is a tokenizers class.
    "BasicTokenizer",  # Internal, should never have been in the main init.
807
    "CharacterTokenizer",  # Internal, should never have been in the main init.
808
    "DPRPretrainedReader",  # Like an Encoder.
Sylvain Gugger's avatar
Sylvain Gugger committed
809
    "DummyObject",  # Just picked by mistake sometimes.
810
    "MecabTokenizer",  # Internal, should never have been in the main init.
811
812
813
814
815
816
817
818
819
820
821
    "ModelCard",  # Internal type.
    "SqueezeBertModule",  # Internal building block (should have been called SqueezeBertLayer)
    "TFDPRPretrainedReader",  # Like an Encoder.
    "TransfoXLCorpus",  # Internal type.
    "WordpieceTokenizer",  # Internal, should never have been in the main init.
    "absl",  # External module
    "add_end_docstrings",  # Internal, should never have been in the main init.
    "add_start_docstrings",  # Internal, should never have been in the main init.
    "convert_tf_weight_name_to_pt_weight_name",  # Internal used to convert model weights
    "logger",  # Internal logger
    "logging",  # External module
822
    "requires_backends",  # Internal function
Jongjyh's avatar
Jongjyh committed
823
    "AltRobertaModel",  # Internal module
824
825
826
827
828
829
830
831
832
]

# This list should be empty. Objects in it should get their own doc page.
SHOULD_HAVE_THEIR_OWN_PAGE = [
    # Benchmarks
    "PyTorchBenchmark",
    "PyTorchBenchmarkArguments",
    "TensorFlowBenchmark",
    "TensorFlowBenchmarkArguments",
833
    "AutoBackbone",
NielsRogge's avatar
NielsRogge committed
834
835
    "BitBackbone",
    "ConvNextBackbone",
Alara Dirik's avatar
Alara Dirik committed
836
    "ConvNextV2Backbone",
837
    "DinatBackbone",
NielsRogge's avatar
NielsRogge committed
838
    "MaskFormerSwinBackbone",
839
840
    "MaskFormerSwinConfig",
    "MaskFormerSwinModel",
NielsRogge's avatar
NielsRogge committed
841
842
    "NatBackbone",
    "ResNetBackbone",
NielsRogge's avatar
NielsRogge committed
843
    "SwinBackbone",
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
]


def ignore_undocumented(name):
    """Rules to determine if `name` should be undocumented."""
    # NOT DOCUMENTED ON PURPOSE.
    # Constants uppercase are not documented.
    if name.isupper():
        return True
    # PreTrainedModels / Encoders / Decoders / Layers / Embeddings / Attention are not documented.
    if (
        name.endswith("PreTrainedModel")
        or name.endswith("Decoder")
        or name.endswith("Encoder")
        or name.endswith("Layer")
        or name.endswith("Embeddings")
        or name.endswith("Attention")
    ):
        return True
    # Submodules are not documented.
    if os.path.isdir(os.path.join(PATH_TO_TRANSFORMERS, name)) or os.path.isfile(
        os.path.join(PATH_TO_TRANSFORMERS, f"{name}.py")
    ):
        return True
    # All load functions are not documented.
    if name.startswith("load_tf") or name.startswith("load_pytorch"):
        return True
    # is_xxx_available functions are not documented.
    if name.startswith("is_") and name.endswith("_available"):
        return True
    # Deprecated objects are not documented.
    if name in DEPRECATED_OBJECTS or name in UNDOCUMENTED_OBJECTS:
        return True
    # MMBT model does not really work.
    if name.startswith("MMBT"):
        return True
    if name in SHOULD_HAVE_THEIR_OWN_PAGE:
        return True
    return False


def check_all_objects_are_documented():
Patrick von Platen's avatar
Patrick von Platen committed
886
    """Check all models are properly documented."""
887
    documented_objs = find_all_documented_objects()
888
889
890
    modules = transformers._modules
    objects = [c for c in dir(transformers) if c not in modules and not c.startswith("_")]
    undocumented_objs = [c for c in objects if c not in documented_objs and not ignore_undocumented(c)]
891
892
893
894
895
    if len(undocumented_objs) > 0:
        raise Exception(
            "The following objects are in the public init so should be documented:\n - "
            + "\n - ".join(undocumented_objs)
        )
896
    check_docstrings_are_in_md()
897
898
899
900
901
902
903
904
905
    check_model_type_doc_match()


def check_model_type_doc_match():
    """Check all doc pages have a corresponding model type."""
    model_doc_folder = Path(PATH_TO_DOC) / "model_doc"
    model_docs = [m.stem for m in model_doc_folder.glob("*.mdx")]

    model_types = list(transformers.models.auto.configuration_auto.MODEL_NAMES_MAPPING.keys())
906
    model_types = [MODEL_TYPE_TO_DOC_MAPPING[m] if m in MODEL_TYPE_TO_DOC_MAPPING else m for m in model_types]
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924

    errors = []
    for m in model_docs:
        if m not in model_types and m != "auto":
            close_matches = get_close_matches(m, model_types)
            error_message = f"{m} is not a proper model identifier."
            if len(close_matches) > 0:
                close_matches = "/".join(close_matches)
                error_message += f" Did you mean {close_matches}?"
            errors.append(error_message)

    if len(errors) > 0:
        raise ValueError(
            "Some model doc pages do not match any existing model type:\n"
            + "\n".join(errors)
            + "\nYou can add any missing model type to the `MODEL_NAMES_MAPPING` constant in "
            "models/auto/configuration_auto.py."
        )
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951


# Re pattern to catch :obj:`xx`, :class:`xx`, :func:`xx` or :meth:`xx`.
_re_rst_special_words = re.compile(r":(?:obj|func|class|meth):`([^`]+)`")
# Re pattern to catch things between double backquotes.
_re_double_backquotes = re.compile(r"(^|[^`])``([^`]+)``([^`]|$)")
# Re pattern to catch example introduction.
_re_rst_example = re.compile(r"^\s*Example.*::\s*$", flags=re.MULTILINE)


def is_rst_docstring(docstring):
    """
    Returns `True` if `docstring` is written in rst.
    """
    if _re_rst_special_words.search(docstring) is not None:
        return True
    if _re_double_backquotes.search(docstring) is not None:
        return True
    if _re_rst_example.search(docstring) is not None:
        return True
    return False


def check_docstrings_are_in_md():
    """Check all docstrings are in md"""
    files_with_rst = []
    for file in Path(PATH_TO_TRANSFORMERS).glob("**/*.py"):
952
        with open(file, encoding="utf-8") as f:
953
954
955
956
957
958
959
960
961
962
963
964
965
            code = f.read()
        docstrings = code.split('"""')

        for idx, docstring in enumerate(docstrings):
            if idx % 2 == 0 or not is_rst_docstring(docstring):
                continue
            files_with_rst.append(file)
            break

    if len(files_with_rst) > 0:
        raise ValueError(
            "The following files have docstrings written in rst:\n"
            + "\n".join([f"- {f}" for f in files_with_rst])
Kamal Raj's avatar
Kamal Raj committed
966
            + "\nTo fix this run `doc-builder convert path_to_py_file` after installing `doc-builder`\n"
967
968
            "(`pip install git+https://github.com/huggingface/doc-builder`)"
        )
969
970


971
def check_repo_quality():
Patrick von Platen's avatar
Patrick von Platen committed
972
    """Check all models are properly tested and documented."""
973
974
    print("Checking all models are included.")
    check_model_list()
975
976
    print("Checking all models are public.")
    check_models_are_in_init()
977
    print("Checking all models are properly tested.")
Sylvain Gugger's avatar
Sylvain Gugger committed
978
    check_all_decorator_order()
979
    check_all_models_are_tested()
980
    print("Checking all objects are properly documented.")
981
    check_all_objects_are_documented()
982
983
    print("Checking all models are in at least one auto class.")
    check_all_models_are_auto_configured()
984
985
    print("Checking all names in auto name mappings are defined.")
    check_all_auto_object_names_being_defined()
Yih-Dar's avatar
Yih-Dar committed
986
987
    print("Checking all keys in auto name mappings are defined in `CONFIG_MAPPING_NAMES`.")
    check_all_auto_mapping_names_in_config_mapping_names()
988
989
990
991


if __name__ == "__main__":
    check_repo_quality()