check_repo.py 35.6 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
# coding=utf-8
# Copyright 2020 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

16
17
18
19
import importlib
import inspect
import os
import re
20
import warnings
21
from collections import OrderedDict
22
from difflib import get_close_matches
23
from pathlib import Path
24

25
from transformers import is_flax_available, is_tf_available, is_torch_available
26
from transformers.models.auto import get_values
27
from transformers.utils import ENV_VARS_TRUE_VALUES
28

29
30
31
32
33

# All paths are set with the intent you should run this script from the root of the repo with the command
# python utils/check_repo.py
PATH_TO_TRANSFORMERS = "src/transformers"
PATH_TO_TESTS = "tests"
34
PATH_TO_DOC = "docs/source/en"
35

36
37
# Update this list with models that are supposed to be private.
PRIVATE_MODELS = [
Jongjyh's avatar
Jongjyh committed
38
    "AltRobertaModel",
39
    "DPRSpanPredictor",
Daniel Stancl's avatar
Daniel Stancl committed
40
    "LongT5Stack",
Li-Huai (Allan) Lin's avatar
Li-Huai (Allan) Lin committed
41
    "RealmBertModel",
42
    "T5Stack",
43
    "MT5Stack",
44
    "SwitchTransformersStack",
45
    "TFDPRSpanPredictor",
46
47
    "MaskFormerSwinModel",
    "MaskFormerSwinPreTrainedModel",
48
49
    "BridgeTowerTextModel",
    "BridgeTowerVisionModel",
50
51
]

52
53
# Update this list for models that are not tested with a comment explaining the reason it should not be.
# Being in this list is an exception and should **not** be the rule.
54
IGNORE_NON_TESTED = PRIVATE_MODELS.copy() + [
55
    # models to ignore for not tested
NielsRogge's avatar
NielsRogge committed
56
    "Blip2QFormerModel",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
57
58
    "DetaEncoder",  # Building part of bigger (tested) model.
    "DetaDecoder",  # Building part of bigger (tested) model.
59
60
    "GraphormerEncoder",  # Building part of bigger (tested) model.
    "GraphormerDecoderHead",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
61
    "CLIPSegDecoder",  # Building part of bigger (tested) model.
62
63
    "TableTransformerEncoder",  # Building part of bigger (tested) model.
    "TableTransformerDecoder",  # Building part of bigger (tested) model.
64
65
    "TimeSeriesTransformerEncoder",  # Building part of bigger (tested) model.
    "TimeSeriesTransformerDecoder",  # Building part of bigger (tested) model.
66
67
    "JukeboxVQVAE",  # Building part of bigger (tested) model.
    "JukeboxPrior",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
68
69
    "DeformableDetrEncoder",  # Building part of bigger (tested) model.
    "DeformableDetrDecoder",  # Building part of bigger (tested) model.
Younes Belkada's avatar
Younes Belkada committed
70
    "OPTDecoder",  # Building part of bigger (tested) model.
71
72
    "WhisperDecoder",  # Building part of bigger (tested) model.
    "WhisperEncoder",  # Building part of bigger (tested) model.
73
    "DecisionTransformerGPT2Model",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
74
    "SegformerDecodeHead",  # Building part of bigger (tested) model.
Gunjan Chhablani's avatar
Gunjan Chhablani committed
75
76
77
    "PLBartEncoder",  # Building part of bigger (tested) model.
    "PLBartDecoder",  # Building part of bigger (tested) model.
    "PLBartDecoderWrapper",  # Building part of bigger (tested) model.
Vasudev Gupta's avatar
Vasudev Gupta committed
78
79
80
    "BigBirdPegasusEncoder",  # Building part of bigger (tested) model.
    "BigBirdPegasusDecoder",  # Building part of bigger (tested) model.
    "BigBirdPegasusDecoderWrapper",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
81
82
83
    "DetrEncoder",  # Building part of bigger (tested) model.
    "DetrDecoder",  # Building part of bigger (tested) model.
    "DetrDecoderWrapper",  # Building part of bigger (tested) model.
84
85
    "ConditionalDetrEncoder",  # Building part of bigger (tested) model.
    "ConditionalDetrDecoder",  # Building part of bigger (tested) model.
Suraj Patil's avatar
Suraj Patil committed
86
87
    "M2M100Encoder",  # Building part of bigger (tested) model.
    "M2M100Decoder",  # Building part of bigger (tested) model.
Chan Woo Kim's avatar
Chan Woo Kim committed
88
    "MCTCTEncoder",  # Building part of bigger (tested) model.
Suraj Patil's avatar
Suraj Patil committed
89
90
    "Speech2TextEncoder",  # Building part of bigger (tested) model.
    "Speech2TextDecoder",  # Building part of bigger (tested) model.
Patrick von Platen's avatar
Patrick von Platen committed
91
92
    "LEDEncoder",  # Building part of bigger (tested) model.
    "LEDDecoder",  # Building part of bigger (tested) model.
93
    "BartDecoderWrapper",  # Building part of bigger (tested) model.
94
    "BartEncoder",  # Building part of bigger (tested) model.
95
    "BertLMHeadModel",  # Needs to be setup as decoder.
96
    "BlenderbotSmallEncoder",  # Building part of bigger (tested) model.
97
    "BlenderbotSmallDecoderWrapper",  # Building part of bigger (tested) model.
98
    "BlenderbotEncoder",  # Building part of bigger (tested) model.
99
    "BlenderbotDecoderWrapper",  # Building part of bigger (tested) model.
100
    "MBartEncoder",  # Building part of bigger (tested) model.
101
    "MBartDecoderWrapper",  # Building part of bigger (tested) model.
102
103
104
105
    "MegatronBertLMHeadModel",  # Building part of bigger (tested) model.
    "MegatronBertEncoder",  # Building part of bigger (tested) model.
    "MegatronBertDecoder",  # Building part of bigger (tested) model.
    "MegatronBertDecoderWrapper",  # Building part of bigger (tested) model.
StevenTang1998's avatar
StevenTang1998 committed
106
107
    "MvpDecoderWrapper",  # Building part of bigger (tested) model.
    "MvpEncoder",  # Building part of bigger (tested) model.
108
    "PegasusEncoder",  # Building part of bigger (tested) model.
109
    "PegasusDecoderWrapper",  # Building part of bigger (tested) model.
Jason Phang's avatar
Jason Phang committed
110
111
112
    "PegasusXEncoder",  # Building part of bigger (tested) model.
    "PegasusXDecoder",  # Building part of bigger (tested) model.
    "PegasusXDecoderWrapper",  # Building part of bigger (tested) model.
113
    "DPREncoder",  # Building part of bigger (tested) model.
114
    "ProphetNetDecoderWrapper",  # Building part of bigger (tested) model.
Li-Huai (Allan) Lin's avatar
Li-Huai (Allan) Lin committed
115
116
117
118
    "RealmBertModel",  # Building part of bigger (tested) model.
    "RealmReader",  # Not regular model.
    "RealmScorer",  # Not regular model.
    "RealmForOpenQA",  # Not regular model.
119
    "ReformerForMaskedLM",  # Needs to be setup as decoder.
120
    "Speech2Text2DecoderWrapper",  # Building part of bigger (tested) model.
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
121
    "TFDPREncoder",  # Building part of bigger (tested) model.
122
123
    "TFElectraMainLayer",  # Building part of bigger (tested) model (should it be a TFPreTrainedModel ?)
    "TFRobertaForMultipleChoice",  # TODO: fix
124
    "TFRobertaPreLayerNormForMultipleChoice",  # TODO: fix
125
    "TrOCRDecoderWrapper",  # Building part of bigger (tested) model.
amyeroberts's avatar
amyeroberts committed
126
127
    "TFWhisperEncoder",  # Building part of bigger (tested) model.
    "TFWhisperDecoder",  # Building part of bigger (tested) model.
abhishek thakur's avatar
abhishek thakur committed
128
    "SeparableConv1D",  # Building part of bigger (tested) model.
129
    "FlaxBartForCausalLM",  # Building part of bigger (tested) model.
130
    "FlaxBertForCausalLM",  # Building part of bigger (tested) model. Tested implicitly through FlaxRobertaForCausalLM.
Younes Belkada's avatar
Younes Belkada committed
131
    "OPTDecoderWrapper",
132
    "TFSegformerDecodeHead",  # Not a regular model.
Jongjyh's avatar
Jongjyh committed
133
    "AltRobertaModel",  # Building part of bigger (tested) model.
Younes Belkada's avatar
Younes Belkada committed
134
    "BlipTextLMHeadModel",  # No need to test it as it is tested by BlipTextVision models
135
136
    "BridgeTowerTextModel",  # No need to test it as it is tested by BridgeTowerModel model.
    "BridgeTowerVisionModel",  # No need to test it as it is tested by BridgeTowerModel model.
137
138
139
140
141
142
143
144
145
146
147
148
    "SpeechT5Decoder",  # Building part of bigger (tested) model.
    "SpeechT5DecoderWithoutPrenet",  # Building part of bigger (tested) model.
    "SpeechT5DecoderWithSpeechPrenet",  # Building part of bigger (tested) model.
    "SpeechT5DecoderWithTextPrenet",  # Building part of bigger (tested) model.
    "SpeechT5Encoder",  # Building part of bigger (tested) model.
    "SpeechT5EncoderWithoutPrenet",  # Building part of bigger (tested) model.
    "SpeechT5EncoderWithSpeechPrenet",  # Building part of bigger (tested) model.
    "SpeechT5EncoderWithTextPrenet",  # Building part of bigger (tested) model.
    "SpeechT5SpeechDecoder",  # Building part of bigger (tested) model.
    "SpeechT5SpeechEncoder",  # Building part of bigger (tested) model.
    "SpeechT5TextDecoder",  # Building part of bigger (tested) model.
    "SpeechT5TextEncoder",  # Building part of bigger (tested) model.
149
150
151
152
153
]

# Update this list with test files that don't have a tester with a `all_model_classes` variable and which don't
# trigger the common tests.
TEST_FILES_WITH_NO_COMMON_TESTS = [
Yih-Dar's avatar
Yih-Dar committed
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
    "models/decision_transformer/test_modeling_decision_transformer.py",
    "models/camembert/test_modeling_camembert.py",
    "models/mt5/test_modeling_flax_mt5.py",
    "models/mbart/test_modeling_mbart.py",
    "models/mt5/test_modeling_mt5.py",
    "models/pegasus/test_modeling_pegasus.py",
    "models/camembert/test_modeling_tf_camembert.py",
    "models/mt5/test_modeling_tf_mt5.py",
    "models/xlm_roberta/test_modeling_tf_xlm_roberta.py",
    "models/xlm_roberta/test_modeling_flax_xlm_roberta.py",
    "models/xlm_prophetnet/test_modeling_xlm_prophetnet.py",
    "models/xlm_roberta/test_modeling_xlm_roberta.py",
    "models/vision_text_dual_encoder/test_modeling_vision_text_dual_encoder.py",
    "models/vision_text_dual_encoder/test_modeling_flax_vision_text_dual_encoder.py",
    "models/decision_transformer/test_modeling_decision_transformer.py",
169
170
]

171
172
# Update this list for models that are not in any of the auto MODEL_XXX_MAPPING. Being in this list is an exception and
# should **not** be the rule.
173
IGNORE_NON_AUTO_CONFIGURED = PRIVATE_MODELS.copy() + [
174
    # models to ignore for model xxx mapping
NielsRogge's avatar
NielsRogge committed
175
176
177
    "Blip2ForConditionalGeneration",
    "Blip2QFormerModel",
    "Blip2VisionModel",
178
    "GitVisionModel",
179
180
    "GraphormerModel",
    "GraphormerForGraphClassification",
Younes Belkada's avatar
Younes Belkada committed
181
182
183
184
185
186
    "BlipForConditionalGeneration",
    "BlipForImageTextRetrieval",
    "BlipForQuestionAnswering",
    "BlipVisionModel",
    "BlipTextLMHeadModel",
    "BlipTextModel",
NielsRogge's avatar
NielsRogge committed
187
    "Swin2SRForImageSuperResolution",
188
189
    "BridgeTowerForImageAndTextRetrieval",
    "BridgeTowerForMaskedLM",
NielsRogge's avatar
NielsRogge committed
190
191
192
    "CLIPSegForImageSegmentation",
    "CLIPSegVisionModel",
    "CLIPSegTextModel",
Matt's avatar
Matt committed
193
    "EsmForProteinFolding",
194
    "TimeSeriesTransformerForPrediction",
195
196
    "JukeboxVQVAE",
    "JukeboxPrior",
Jason Phang's avatar
Jason Phang committed
197
198
199
200
201
202
    "PegasusXEncoder",
    "PegasusXDecoder",
    "PegasusXDecoderWrapper",
    "PegasusXEncoder",
    "PegasusXDecoder",
    "PegasusXDecoderWrapper",
NielsRogge's avatar
NielsRogge committed
203
    "DPTForDepthEstimation",
204
    "DecisionTransformerGPT2Model",
NielsRogge's avatar
NielsRogge committed
205
    "GLPNForDepthEstimation",
NielsRogge's avatar
NielsRogge committed
206
207
    "ViltForImagesAndTextClassification",
    "ViltForImageAndTextRetrieval",
208
    "ViltForTokenClassification",
NielsRogge's avatar
NielsRogge committed
209
    "ViltForMaskedLM",
Suraj Patil's avatar
Suraj Patil committed
210
211
212
    "XGLMEncoder",
    "XGLMDecoder",
    "XGLMDecoderWrapper",
NielsRogge's avatar
NielsRogge committed
213
214
    "PerceiverForMultimodalAutoencoding",
    "PerceiverForOpticalFlow",
NielsRogge's avatar
NielsRogge committed
215
    "SegformerDecodeHead",
216
    "TFSegformerDecodeHead",
Kamal Raj's avatar
Kamal Raj committed
217
    "FlaxBeitForMaskedImageModeling",
Gunjan Chhablani's avatar
Gunjan Chhablani committed
218
219
220
    "PLBartEncoder",
    "PLBartDecoder",
    "PLBartDecoderWrapper",
NielsRogge's avatar
NielsRogge committed
221
    "BeitForMaskedImageModeling",
222
223
    "ChineseCLIPTextModel",
    "ChineseCLIPVisionModel",
Suraj Patil's avatar
Suraj Patil committed
224
    "CLIPTextModel",
225
    "CLIPTextModelWithProjection",
Suraj Patil's avatar
Suraj Patil committed
226
    "CLIPVisionModel",
227
    "CLIPVisionModelWithProjection",
228
229
    "GroupViTTextModel",
    "GroupViTVisionModel",
Yih-Dar's avatar
Yih-Dar committed
230
231
    "TFCLIPTextModel",
    "TFCLIPVisionModel",
232
233
    "TFGroupViTTextModel",
    "TFGroupViTVisionModel",
Suraj Patil's avatar
Suraj Patil committed
234
235
    "FlaxCLIPTextModel",
    "FlaxCLIPVisionModel",
236
    "FlaxWav2Vec2ForCTC",
NielsRogge's avatar
NielsRogge committed
237
    "DetrForSegmentation",
238
    "ConditionalDetrForSegmentation",
239
240
    "DPRReader",
    "FlaubertForQuestionAnswering",
241
242
243
244
    "FlavaImageCodebook",
    "FlavaTextModel",
    "FlavaImageModel",
    "FlavaMultimodalModel",
245
    "GPT2DoubleHeadsModel",
246
    "GPTSw3DoubleHeadsModel",
247
    "LayoutLMForQuestionAnswering",
Ryokan RI's avatar
Ryokan RI committed
248
    "LukeForMaskedLM",
NielsRogge's avatar
NielsRogge committed
249
250
251
    "LukeForEntityClassification",
    "LukeForEntityPairClassification",
    "LukeForEntitySpanClassification",
252
    "OpenAIGPTDoubleHeadsModel",
253
254
255
    "OwlViTTextModel",
    "OwlViTVisionModel",
    "OwlViTForObjectDetection",
256
257
258
    "RagModel",
    "RagSequenceForGeneration",
    "RagTokenForGeneration",
Li-Huai (Allan) Lin's avatar
Li-Huai (Allan) Lin committed
259
260
261
262
    "RealmEmbedder",
    "RealmForOpenQA",
    "RealmScorer",
    "RealmReader",
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
263
    "TFDPRReader",
264
    "TFGPT2DoubleHeadsModel",
265
    "TFLayoutLMForQuestionAnswering",
266
    "TFOpenAIGPTDoubleHeadsModel",
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
267
268
269
    "TFRagModel",
    "TFRagSequenceForGeneration",
    "TFRagTokenForGeneration",
270
    "Wav2Vec2ForCTC",
Patrick von Platen's avatar
Patrick von Platen committed
271
    "HubertForCTC",
272
273
    "SEWForCTC",
    "SEWDForCTC",
274
275
    "XLMForQuestionAnswering",
    "XLNetForQuestionAnswering",
abhishek thakur's avatar
abhishek thakur committed
276
    "SeparableConv1D",
Gunjan Chhablani's avatar
Gunjan Chhablani committed
277
278
279
280
    "VisualBertForRegionToPhraseAlignment",
    "VisualBertForVisualReasoning",
    "VisualBertForQuestionAnswering",
    "VisualBertForMultipleChoice",
Will Rice's avatar
Will Rice committed
281
    "TFWav2Vec2ForCTC",
Will Rice's avatar
Will Rice committed
282
    "TFHubertForCTC",
NielsRogge's avatar
NielsRogge committed
283
284
    "XCLIPVisionModel",
    "XCLIPTextModel",
Jongjyh's avatar
Jongjyh committed
285
286
287
    "AltCLIPTextModel",
    "AltCLIPVisionModel",
    "AltRobertaModel",
288
289
290
    "SpeechT5ForSpeechToSpeech",
    "SpeechT5ForTextToSpeech",
    "SpeechT5HifiGan",
291
292
]

293
294
295
296
297
298
# Update this list for models that have multiple model types for the same
# model doc
MODEL_TYPE_TO_DOC_MAPPING = OrderedDict(
    [
        ("data2vec-text", "data2vec"),
        ("data2vec-audio", "data2vec"),
299
        ("data2vec-vision", "data2vec"),
NielsRogge's avatar
NielsRogge committed
300
        ("donut-swin", "donut"),
301
302
303
304
    ]
)


305
306
307
308
309
310
311
312
313
# This is to make sure the transformers module imported is the one in the repo.
spec = importlib.util.spec_from_file_location(
    "transformers",
    os.path.join(PATH_TO_TRANSFORMERS, "__init__.py"),
    submodule_search_locations=[PATH_TO_TRANSFORMERS],
)
transformers = spec.loader.load_module()


314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
def check_model_list():
    """Check the model list inside the transformers library."""
    # Get the models from the directory structure of `src/transformers/models/`
    models_dir = os.path.join(PATH_TO_TRANSFORMERS, "models")
    _models = []
    for model in os.listdir(models_dir):
        model_dir = os.path.join(models_dir, model)
        if os.path.isdir(model_dir) and "__init__.py" in os.listdir(model_dir):
            _models.append(model)

    # Get the models from the directory structure of `src/transformers/models/`
    models = [model for model in dir(transformers.models) if not model.startswith("__")]

    missing_models = sorted(list(set(_models).difference(models)))
    if missing_models:
        raise Exception(
            f"The following models should be included in {models_dir}/__init__.py: {','.join(missing_models)}."
        )


334
335
336
# If some modeling modules should be ignored for all checks, they should be added in the nested list
# _ignore_modules of this function.
def get_model_modules():
Patrick von Platen's avatar
Patrick von Platen committed
337
    """Get the model modules inside the transformers library."""
338
339
340
341
342
343
344
345
    _ignore_modules = [
        "modeling_auto",
        "modeling_encoder_decoder",
        "modeling_marian",
        "modeling_mmbt",
        "modeling_outputs",
        "modeling_retribert",
        "modeling_utils",
Sylvain Gugger's avatar
Sylvain Gugger committed
346
        "modeling_flax_auto",
347
        "modeling_flax_encoder_decoder",
Stas Bekman's avatar
Stas Bekman committed
348
        "modeling_flax_utils",
349
        "modeling_speech_encoder_decoder",
350
        "modeling_flax_speech_encoder_decoder",
351
        "modeling_flax_vision_encoder_decoder",
352
353
        "modeling_transfo_xl_utilities",
        "modeling_tf_auto",
354
        "modeling_tf_encoder_decoder",
355
356
357
358
        "modeling_tf_outputs",
        "modeling_tf_pytorch_utils",
        "modeling_tf_utils",
        "modeling_tf_transfo_xl_utilities",
359
        "modeling_tf_vision_encoder_decoder",
360
        "modeling_vision_encoder_decoder",
361
362
    ]
    modules = []
Sylvain Gugger's avatar
Sylvain Gugger committed
363
364
365
366
367
368
369
370
371
    for model in dir(transformers.models):
        # There are some magic dunder attributes in the dir, we ignore them
        if not model.startswith("__"):
            model_module = getattr(transformers.models, model)
            for submodule in dir(model_module):
                if submodule.startswith("modeling") and submodule not in _ignore_modules:
                    modeling_module = getattr(model_module, submodule)
                    if inspect.ismodule(modeling_module):
                        modules.append(modeling_module)
372
373
374
    return modules


375
def get_models(module, include_pretrained=False):
Patrick von Platen's avatar
Patrick von Platen committed
376
    """Get the objects in module that are models."""
377
    models = []
378
    model_classes = (transformers.PreTrainedModel, transformers.TFPreTrainedModel, transformers.FlaxPreTrainedModel)
379
    for attr_name in dir(module):
380
        if not include_pretrained and ("Pretrained" in attr_name or "PreTrained" in attr_name):
381
382
383
384
385
386
387
            continue
        attr = getattr(module, attr_name)
        if isinstance(attr, type) and issubclass(attr, model_classes) and attr.__module__ == module.__name__:
            models.append((attr_name, attr))
    return models


388
389
390
391
392
393
394
395
396
397
398
399
def is_a_private_model(model):
    """Returns True if the model should not be in the main init."""
    if model in PRIVATE_MODELS:
        return True

    # Wrapper, Encoder and Decoder are all privates
    if model.endswith("Wrapper"):
        return True
    if model.endswith("Encoder"):
        return True
    if model.endswith("Decoder"):
        return True
400
401
    if model.endswith("Prenet"):
        return True
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
    return False


def check_models_are_in_init():
    """Checks all models defined in the library are in the main init."""
    models_not_in_init = []
    dir_transformers = dir(transformers)
    for module in get_model_modules():
        models_not_in_init += [
            model[0] for model in get_models(module, include_pretrained=True) if model[0] not in dir_transformers
        ]

    # Remove private models
    models_not_in_init = [model for model in models_not_in_init if not is_a_private_model(model)]
    if len(models_not_in_init) > 0:
        raise Exception(f"The following models should be in the main init: {','.join(models_not_in_init)}.")


420
421
422
# If some test_modeling files should be ignored when checking models are all tested, they should be added in the
# nested list _ignore_files of this function.
def get_model_test_files():
Yih-Dar's avatar
Yih-Dar committed
423
424
425
426
427
428
    """Get the model test files.

    The returned files should NOT contain the `tests` (i.e. `PATH_TO_TESTS` defined in this script). They will be
    considered as paths relative to `tests`. A caller has to use `os.path.join(PATH_TO_TESTS, ...)` to access the files.
    """

429
430
431
    _ignore_files = [
        "test_modeling_common",
        "test_modeling_encoder_decoder",
432
        "test_modeling_flax_encoder_decoder",
433
        "test_modeling_flax_speech_encoder_decoder",
434
435
        "test_modeling_marian",
        "test_modeling_tf_common",
436
        "test_modeling_tf_encoder_decoder",
437
438
    ]
    test_files = []
Yih-Dar's avatar
Yih-Dar committed
439
440
441
442
443
444
445
446
447
448
449
450
451
    # Check both `PATH_TO_TESTS` and `PATH_TO_TESTS/models`
    model_test_root = os.path.join(PATH_TO_TESTS, "models")
    model_test_dirs = []
    for x in os.listdir(model_test_root):
        x = os.path.join(model_test_root, x)
        if os.path.isdir(x):
            model_test_dirs.append(x)

    for target_dir in [PATH_TO_TESTS] + model_test_dirs:
        for file_or_dir in os.listdir(target_dir):
            path = os.path.join(target_dir, file_or_dir)
            if os.path.isfile(path):
                filename = os.path.split(path)[-1]
452
                if "test_modeling" in filename and os.path.splitext(filename)[0] not in _ignore_files:
Yih-Dar's avatar
Yih-Dar committed
453
454
455
                    file = os.path.join(*path.split(os.sep)[1:])
                    test_files.append(file)

456
457
458
459
460
461
    return test_files


# This is a bit hacky but I didn't find a way to import the test_file as a module and read inside the tester class
# for the all_model_classes variable.
def find_tested_models(test_file):
Patrick von Platen's avatar
Patrick von Platen committed
462
    """Parse the content of test_file to detect what's in all_model_classes"""
Sylvain Gugger's avatar
Sylvain Gugger committed
463
    # This is a bit hacky but I didn't find a way to import the test_file as a module and read inside the class
464
    with open(os.path.join(PATH_TO_TESTS, test_file), "r", encoding="utf-8", newline="\n") as f:
465
        content = f.read()
Sylvain Gugger's avatar
Sylvain Gugger committed
466
    all_models = re.findall(r"all_model_classes\s+=\s+\(\s*\(([^\)]*)\)", content)
467
468
    # Check with one less parenthesis as well
    all_models += re.findall(r"all_model_classes\s+=\s+\(([^\)]*)\)", content)
Sylvain Gugger's avatar
Sylvain Gugger committed
469
    if len(all_models) > 0:
470
        model_tested = []
Sylvain Gugger's avatar
Sylvain Gugger committed
471
472
473
474
475
        for entry in all_models:
            for line in entry.split(","):
                name = line.strip()
                if len(name) > 0:
                    model_tested.append(name)
476
477
478
479
        return model_tested


def check_models_are_tested(module, test_file):
Patrick von Platen's avatar
Patrick von Platen committed
480
    """Check models defined in module are tested in test_file."""
481
    # XxxPreTrainedModel are not tested
482
483
484
    defined_models = get_models(module)
    tested_models = find_tested_models(test_file)
    if tested_models is None:
485
        if test_file.replace(os.path.sep, "/") in TEST_FILES_WITH_NO_COMMON_TESTS:
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
            return
        return [
            f"{test_file} should define `all_model_classes` to apply common tests to the models it tests. "
            + "If this intentional, add the test filename to `TEST_FILES_WITH_NO_COMMON_TESTS` in the file "
            + "`utils/check_repo.py`."
        ]
    failures = []
    for model_name, _ in defined_models:
        if model_name not in tested_models and model_name not in IGNORE_NON_TESTED:
            failures.append(
                f"{model_name} is defined in {module.__name__} but is not tested in "
                + f"{os.path.join(PATH_TO_TESTS, test_file)}. Add it to the all_model_classes in that file."
                + "If common tests should not applied to that model, add its name to `IGNORE_NON_TESTED`"
                + "in the file `utils/check_repo.py`."
            )
    return failures


def check_all_models_are_tested():
Patrick von Platen's avatar
Patrick von Platen committed
505
    """Check all models are properly tested."""
506
507
508
509
    modules = get_model_modules()
    test_files = get_model_test_files()
    failures = []
    for module in modules:
510
511
        test_file = [file for file in test_files if f"test_{module.__name__.split('.')[-1]}.py" in file]
        if len(test_file) == 0:
512
            failures.append(f"{module.__name__} does not have its corresponding test file {test_file}.")
513
514
515
516
        elif len(test_file) > 1:
            failures.append(f"{module.__name__} has several test files: {test_file}.")
        else:
            test_file = test_file[0]
517
518
519
            new_failures = check_models_are_tested(module, test_file)
            if new_failures is not None:
                failures += new_failures
520
521
522
523
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


524
def get_all_auto_configured_models():
Patrick von Platen's avatar
Patrick von Platen committed
525
    """Return the list of all models in at least one auto class."""
526
    result = set()  # To avoid duplicates we concatenate all model classes in a set.
527
528
    if is_torch_available():
        for attr_name in dir(transformers.models.auto.modeling_auto):
529
            if attr_name.startswith("MODEL_") and attr_name.endswith("MAPPING_NAMES"):
530
531
532
                result = result | set(get_values(getattr(transformers.models.auto.modeling_auto, attr_name)))
    if is_tf_available():
        for attr_name in dir(transformers.models.auto.modeling_tf_auto):
533
            if attr_name.startswith("TF_MODEL_") and attr_name.endswith("MAPPING_NAMES"):
534
535
536
                result = result | set(get_values(getattr(transformers.models.auto.modeling_tf_auto, attr_name)))
    if is_flax_available():
        for attr_name in dir(transformers.models.auto.modeling_flax_auto):
537
            if attr_name.startswith("FLAX_MODEL_") and attr_name.endswith("MAPPING_NAMES"):
538
                result = result | set(get_values(getattr(transformers.models.auto.modeling_flax_auto, attr_name)))
539
    return [cls for cls in result]
540
541


542
543
544
545
546
547
548
549
550
551
552
def ignore_unautoclassed(model_name):
    """Rules to determine if `name` should be in an auto class."""
    # Special white list
    if model_name in IGNORE_NON_AUTO_CONFIGURED:
        return True
    # Encoder and Decoder should be ignored
    if "Encoder" in model_name or "Decoder" in model_name:
        return True
    return False


553
def check_models_are_auto_configured(module, all_auto_models):
Patrick von Platen's avatar
Patrick von Platen committed
554
    """Check models defined in module are each in an auto class."""
555
556
557
    defined_models = get_models(module)
    failures = []
    for model_name, _ in defined_models:
558
        if model_name not in all_auto_models and not ignore_unautoclassed(model_name):
559
560
561
562
563
564
565
566
567
            failures.append(
                f"{model_name} is defined in {module.__name__} but is not present in any of the auto mapping. "
                "If that is intended behavior, add its name to `IGNORE_NON_AUTO_CONFIGURED` in the file "
                "`utils/check_repo.py`."
            )
    return failures


def check_all_models_are_auto_configured():
Patrick von Platen's avatar
Patrick von Platen committed
568
    """Check all models are each in an auto class."""
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
    missing_backends = []
    if not is_torch_available():
        missing_backends.append("PyTorch")
    if not is_tf_available():
        missing_backends.append("TensorFlow")
    if not is_flax_available():
        missing_backends.append("Flax")
    if len(missing_backends) > 0:
        missing = ", ".join(missing_backends)
        if os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES:
            raise Exception(
                "Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the "
                f"Transformers repo, the following are missing: {missing}."
            )
        else:
            warnings.warn(
                "Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the "
                f"Transformers repo, the following are missing: {missing}. While it's probably fine as long as you "
                "didn't make any change in one of those backends modeling files, you should probably execute the "
                "command above to be on the safe side."
            )
590
591
592
593
594
595
596
597
598
599
600
    modules = get_model_modules()
    all_auto_models = get_all_auto_configured_models()
    failures = []
    for module in modules:
        new_failures = check_models_are_auto_configured(module, all_auto_models)
        if new_failures is not None:
            failures += new_failures
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


Sylvain Gugger's avatar
Sylvain Gugger committed
601
602
603
604
_re_decorator = re.compile(r"^\s*@(\S+)\s+$")


def check_decorator_order(filename):
Patrick von Platen's avatar
Patrick von Platen committed
605
    """Check that in the test file `filename` the slow decorator is always last."""
606
    with open(filename, "r", encoding="utf-8", newline="\n") as f:
Sylvain Gugger's avatar
Sylvain Gugger committed
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
        lines = f.readlines()
    decorator_before = None
    errors = []
    for i, line in enumerate(lines):
        search = _re_decorator.search(line)
        if search is not None:
            decorator_name = search.groups()[0]
            if decorator_before is not None and decorator_name.startswith("parameterized"):
                errors.append(i)
            decorator_before = decorator_name
        elif decorator_before is not None:
            decorator_before = None
    return errors


def check_all_decorator_order():
Patrick von Platen's avatar
Patrick von Platen committed
623
    """Check that in all test files, the slow decorator is always last."""
Sylvain Gugger's avatar
Sylvain Gugger committed
624
625
626
627
628
629
630
631
632
    errors = []
    for fname in os.listdir(PATH_TO_TESTS):
        if fname.endswith(".py"):
            filename = os.path.join(PATH_TO_TESTS, fname)
            new_errors = check_decorator_order(filename)
            errors += [f"- {filename}, line {i}" for i in new_errors]
    if len(errors) > 0:
        msg = "\n".join(errors)
        raise ValueError(
Sylvain Gugger's avatar
Sylvain Gugger committed
633
634
            "The parameterized decorator (and its variants) should always be first, but this is not the case in the"
            f" following files:\n{msg}"
Sylvain Gugger's avatar
Sylvain Gugger committed
635
636
637
        )


638
def find_all_documented_objects():
Patrick von Platen's avatar
Patrick von Platen committed
639
    """Parse the content of all doc files to detect which classes and functions it documents"""
640
641
    documented_obj = []
    for doc_file in Path(PATH_TO_DOC).glob("**/*.rst"):
Julien Plu's avatar
Julien Plu committed
642
        with open(doc_file, "r", encoding="utf-8", newline="\n") as f:
643
644
645
            content = f.read()
        raw_doc_objs = re.findall(r"(?:autoclass|autofunction):: transformers.(\S+)\s+", content)
        documented_obj += [obj.split(".")[-1] for obj in raw_doc_objs]
Sylvain Gugger's avatar
Sylvain Gugger committed
646
647
648
649
650
    for doc_file in Path(PATH_TO_DOC).glob("**/*.mdx"):
        with open(doc_file, "r", encoding="utf-8", newline="\n") as f:
            content = f.read()
        raw_doc_objs = re.findall("\[\[autodoc\]\]\s+(\S+)\s+", content)
        documented_obj += [obj.split(".")[-1] for obj in raw_doc_objs]
651
652
653
654
655
656
    return documented_obj


# One good reason for not being documented is to be deprecated. Put in this list deprecated objects.
DEPRECATED_OBJECTS = [
    "AutoModelWithLMHead",
657
    "BartPretrainedModel",
658
659
    "DataCollator",
    "DataCollatorForSOP",
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
    "GlueDataset",
    "GlueDataTrainingArguments",
    "LineByLineTextDataset",
    "LineByLineWithRefDataset",
    "LineByLineWithSOPTextDataset",
    "PretrainedBartModel",
    "PretrainedFSMTModel",
    "SingleSentenceClassificationProcessor",
    "SquadDataTrainingArguments",
    "SquadDataset",
    "SquadExample",
    "SquadFeatures",
    "SquadV1Processor",
    "SquadV2Processor",
    "TFAutoModelWithLMHead",
675
    "TFBartPretrainedModel",
676
677
    "TextDataset",
    "TextDatasetForNextSentencePrediction",
678
    "Wav2Vec2ForMaskedLM",
679
    "Wav2Vec2Tokenizer",
680
681
682
683
684
685
686
687
688
689
    "glue_compute_metrics",
    "glue_convert_examples_to_features",
    "glue_output_modes",
    "glue_processors",
    "glue_tasks_num_labels",
    "squad_convert_examples_to_features",
    "xnli_compute_metrics",
    "xnli_output_modes",
    "xnli_processors",
    "xnli_tasks_num_labels",
690
691
    "TFTrainer",
    "TFTrainingArguments",
692
693
694
695
696
697
698
]

# Exceptionally, some objects should not be documented after all rules passed.
# ONLY PUT SOMETHING IN THIS LIST AS A LAST RESORT!
UNDOCUMENTED_OBJECTS = [
    "AddedToken",  # This is a tokenizers class.
    "BasicTokenizer",  # Internal, should never have been in the main init.
699
    "CharacterTokenizer",  # Internal, should never have been in the main init.
700
    "DPRPretrainedReader",  # Like an Encoder.
Sylvain Gugger's avatar
Sylvain Gugger committed
701
    "DummyObject",  # Just picked by mistake sometimes.
702
    "MecabTokenizer",  # Internal, should never have been in the main init.
703
704
705
706
707
708
709
710
711
712
713
    "ModelCard",  # Internal type.
    "SqueezeBertModule",  # Internal building block (should have been called SqueezeBertLayer)
    "TFDPRPretrainedReader",  # Like an Encoder.
    "TransfoXLCorpus",  # Internal type.
    "WordpieceTokenizer",  # Internal, should never have been in the main init.
    "absl",  # External module
    "add_end_docstrings",  # Internal, should never have been in the main init.
    "add_start_docstrings",  # Internal, should never have been in the main init.
    "convert_tf_weight_name_to_pt_weight_name",  # Internal used to convert model weights
    "logger",  # Internal logger
    "logging",  # External module
714
    "requires_backends",  # Internal function
Jongjyh's avatar
Jongjyh committed
715
    "AltRobertaModel",  # Internal module
716
717
718
719
720
721
722
723
724
]

# This list should be empty. Objects in it should get their own doc page.
SHOULD_HAVE_THEIR_OWN_PAGE = [
    # Benchmarks
    "PyTorchBenchmark",
    "PyTorchBenchmarkArguments",
    "TensorFlowBenchmark",
    "TensorFlowBenchmarkArguments",
725
    "AutoBackbone",
NielsRogge's avatar
NielsRogge committed
726
727
    "BitBackbone",
    "ConvNextBackbone",
728
    "DinatBackbone",
NielsRogge's avatar
NielsRogge committed
729
    "MaskFormerSwinBackbone",
730
731
    "MaskFormerSwinConfig",
    "MaskFormerSwinModel",
NielsRogge's avatar
NielsRogge committed
732
733
    "NatBackbone",
    "ResNetBackbone",
NielsRogge's avatar
NielsRogge committed
734
    "SwinBackbone",
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
]


def ignore_undocumented(name):
    """Rules to determine if `name` should be undocumented."""
    # NOT DOCUMENTED ON PURPOSE.
    # Constants uppercase are not documented.
    if name.isupper():
        return True
    # PreTrainedModels / Encoders / Decoders / Layers / Embeddings / Attention are not documented.
    if (
        name.endswith("PreTrainedModel")
        or name.endswith("Decoder")
        or name.endswith("Encoder")
        or name.endswith("Layer")
        or name.endswith("Embeddings")
        or name.endswith("Attention")
    ):
        return True
    # Submodules are not documented.
    if os.path.isdir(os.path.join(PATH_TO_TRANSFORMERS, name)) or os.path.isfile(
        os.path.join(PATH_TO_TRANSFORMERS, f"{name}.py")
    ):
        return True
    # All load functions are not documented.
    if name.startswith("load_tf") or name.startswith("load_pytorch"):
        return True
    # is_xxx_available functions are not documented.
    if name.startswith("is_") and name.endswith("_available"):
        return True
    # Deprecated objects are not documented.
    if name in DEPRECATED_OBJECTS or name in UNDOCUMENTED_OBJECTS:
        return True
    # MMBT model does not really work.
    if name.startswith("MMBT"):
        return True
    if name in SHOULD_HAVE_THEIR_OWN_PAGE:
        return True
    return False


def check_all_objects_are_documented():
Patrick von Platen's avatar
Patrick von Platen committed
777
    """Check all models are properly documented."""
778
    documented_objs = find_all_documented_objects()
779
780
781
    modules = transformers._modules
    objects = [c for c in dir(transformers) if c not in modules and not c.startswith("_")]
    undocumented_objs = [c for c in objects if c not in documented_objs and not ignore_undocumented(c)]
782
783
784
785
786
    if len(undocumented_objs) > 0:
        raise Exception(
            "The following objects are in the public init so should be documented:\n - "
            + "\n - ".join(undocumented_objs)
        )
787
    check_docstrings_are_in_md()
788
789
790
791
792
793
794
795
796
    check_model_type_doc_match()


def check_model_type_doc_match():
    """Check all doc pages have a corresponding model type."""
    model_doc_folder = Path(PATH_TO_DOC) / "model_doc"
    model_docs = [m.stem for m in model_doc_folder.glob("*.mdx")]

    model_types = list(transformers.models.auto.configuration_auto.MODEL_NAMES_MAPPING.keys())
797
    model_types = [MODEL_TYPE_TO_DOC_MAPPING[m] if m in MODEL_TYPE_TO_DOC_MAPPING else m for m in model_types]
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815

    errors = []
    for m in model_docs:
        if m not in model_types and m != "auto":
            close_matches = get_close_matches(m, model_types)
            error_message = f"{m} is not a proper model identifier."
            if len(close_matches) > 0:
                close_matches = "/".join(close_matches)
                error_message += f" Did you mean {close_matches}?"
            errors.append(error_message)

    if len(errors) > 0:
        raise ValueError(
            "Some model doc pages do not match any existing model type:\n"
            + "\n".join(errors)
            + "\nYou can add any missing model type to the `MODEL_NAMES_MAPPING` constant in "
            "models/auto/configuration_auto.py."
        )
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842


# Re pattern to catch :obj:`xx`, :class:`xx`, :func:`xx` or :meth:`xx`.
_re_rst_special_words = re.compile(r":(?:obj|func|class|meth):`([^`]+)`")
# Re pattern to catch things between double backquotes.
_re_double_backquotes = re.compile(r"(^|[^`])``([^`]+)``([^`]|$)")
# Re pattern to catch example introduction.
_re_rst_example = re.compile(r"^\s*Example.*::\s*$", flags=re.MULTILINE)


def is_rst_docstring(docstring):
    """
    Returns `True` if `docstring` is written in rst.
    """
    if _re_rst_special_words.search(docstring) is not None:
        return True
    if _re_double_backquotes.search(docstring) is not None:
        return True
    if _re_rst_example.search(docstring) is not None:
        return True
    return False


def check_docstrings_are_in_md():
    """Check all docstrings are in md"""
    files_with_rst = []
    for file in Path(PATH_TO_TRANSFORMERS).glob("**/*.py"):
843
        with open(file, encoding="utf-8") as f:
844
845
846
847
848
849
850
851
852
853
854
855
856
            code = f.read()
        docstrings = code.split('"""')

        for idx, docstring in enumerate(docstrings):
            if idx % 2 == 0 or not is_rst_docstring(docstring):
                continue
            files_with_rst.append(file)
            break

    if len(files_with_rst) > 0:
        raise ValueError(
            "The following files have docstrings written in rst:\n"
            + "\n".join([f"- {f}" for f in files_with_rst])
Kamal Raj's avatar
Kamal Raj committed
857
            + "\nTo fix this run `doc-builder convert path_to_py_file` after installing `doc-builder`\n"
858
859
            "(`pip install git+https://github.com/huggingface/doc-builder`)"
        )
860
861


862
def check_repo_quality():
Patrick von Platen's avatar
Patrick von Platen committed
863
    """Check all models are properly tested and documented."""
864
865
    print("Checking all models are included.")
    check_model_list()
866
867
    print("Checking all models are public.")
    check_models_are_in_init()
868
    print("Checking all models are properly tested.")
Sylvain Gugger's avatar
Sylvain Gugger committed
869
    check_all_decorator_order()
870
    check_all_models_are_tested()
871
    print("Checking all objects are properly documented.")
872
    check_all_objects_are_documented()
873
874
    print("Checking all models are in at least one auto class.")
    check_all_models_are_auto_configured()
875
876
877
878


if __name__ == "__main__":
    check_repo_quality()