check_repo.py 22.6 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
# coding=utf-8
# Copyright 2020 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

16
17
18
19
import importlib
import inspect
import os
import re
20
import warnings
21
from pathlib import Path
22

23
24
from transformers import is_flax_available, is_tf_available, is_torch_available
from transformers.file_utils import ENV_VARS_TRUE_VALUES
25
26
from transformers.models.auto import get_values

27
28
29
30
31

# All paths are set with the intent you should run this script from the root of the repo with the command
# python utils/check_repo.py
PATH_TO_TRANSFORMERS = "src/transformers"
PATH_TO_TESTS = "tests"
32
PATH_TO_DOC = "docs/source"
33

34
35
36
37
38
39
40
# Update this list with models that are supposed to be private.
PRIVATE_MODELS = [
    "DPRSpanPredictor",
    "T5Stack",
    "TFDPRSpanPredictor",
]

41
42
# Update this list for models that are not tested with a comment explaining the reason it should not be.
# Being in this list is an exception and should **not** be the rule.
43
IGNORE_NON_TESTED = PRIVATE_MODELS.copy() + [
44
    # models to ignore for not tested
Vasudev Gupta's avatar
Vasudev Gupta committed
45
46
47
    "BigBirdPegasusEncoder",  # Building part of bigger (tested) model.
    "BigBirdPegasusDecoder",  # Building part of bigger (tested) model.
    "BigBirdPegasusDecoderWrapper",  # Building part of bigger (tested) model.
NielsRogge's avatar
NielsRogge committed
48
49
50
    "DetrEncoder",  # Building part of bigger (tested) model.
    "DetrDecoder",  # Building part of bigger (tested) model.
    "DetrDecoderWrapper",  # Building part of bigger (tested) model.
Suraj Patil's avatar
Suraj Patil committed
51
52
    "M2M100Encoder",  # Building part of bigger (tested) model.
    "M2M100Decoder",  # Building part of bigger (tested) model.
Suraj Patil's avatar
Suraj Patil committed
53
54
    "Speech2TextEncoder",  # Building part of bigger (tested) model.
    "Speech2TextDecoder",  # Building part of bigger (tested) model.
Patrick von Platen's avatar
Patrick von Platen committed
55
56
    "LEDEncoder",  # Building part of bigger (tested) model.
    "LEDDecoder",  # Building part of bigger (tested) model.
57
    "BartDecoderWrapper",  # Building part of bigger (tested) model.
58
    "BartEncoder",  # Building part of bigger (tested) model.
59
    "BertLMHeadModel",  # Needs to be setup as decoder.
60
    "BlenderbotSmallEncoder",  # Building part of bigger (tested) model.
61
    "BlenderbotSmallDecoderWrapper",  # Building part of bigger (tested) model.
62
    "BlenderbotEncoder",  # Building part of bigger (tested) model.
63
    "BlenderbotDecoderWrapper",  # Building part of bigger (tested) model.
64
    "MBartEncoder",  # Building part of bigger (tested) model.
65
    "MBartDecoderWrapper",  # Building part of bigger (tested) model.
66
67
68
69
    "MegatronBertLMHeadModel",  # Building part of bigger (tested) model.
    "MegatronBertEncoder",  # Building part of bigger (tested) model.
    "MegatronBertDecoder",  # Building part of bigger (tested) model.
    "MegatronBertDecoderWrapper",  # Building part of bigger (tested) model.
70
    "PegasusEncoder",  # Building part of bigger (tested) model.
71
    "PegasusDecoderWrapper",  # Building part of bigger (tested) model.
72
    "DPREncoder",  # Building part of bigger (tested) model.
73
    "ProphetNetDecoderWrapper",  # Building part of bigger (tested) model.
74
    "ReformerForMaskedLM",  # Needs to be setup as decoder.
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
75
    "TFDPREncoder",  # Building part of bigger (tested) model.
76
77
    "TFElectraMainLayer",  # Building part of bigger (tested) model (should it be a TFPreTrainedModel ?)
    "TFRobertaForMultipleChoice",  # TODO: fix
abhishek thakur's avatar
abhishek thakur committed
78
    "SeparableConv1D",  # Building part of bigger (tested) model.
79
80
81
82
83
84
]

# Update this list with test files that don't have a tester with a `all_model_classes` variable and which don't
# trigger the common tests.
TEST_FILES_WITH_NO_COMMON_TESTS = [
    "test_modeling_camembert.py",
85
    "test_modeling_flax_mt5.py",
Stas Bekman's avatar
Stas Bekman committed
86
    "test_modeling_mbart.py",
Patrick von Platen's avatar
Patrick von Platen committed
87
    "test_modeling_mt5.py",
Stas Bekman's avatar
Stas Bekman committed
88
    "test_modeling_pegasus.py",
89
    "test_modeling_tf_camembert.py",
Sylvain Gugger's avatar
Sylvain Gugger committed
90
    "test_modeling_tf_mt5.py",
91
    "test_modeling_tf_xlm_roberta.py",
Weizhen's avatar
Weizhen committed
92
    "test_modeling_xlm_prophetnet.py",
93
94
95
    "test_modeling_xlm_roberta.py",
]

96
97
# Update this list for models that are not in any of the auto MODEL_XXX_MAPPING. Being in this list is an exception and
# should **not** be the rule.
98
IGNORE_NON_AUTO_CONFIGURED = PRIVATE_MODELS.copy() + [
99
    # models to ignore for model xxx mapping
NielsRogge's avatar
NielsRogge committed
100
    "BeitForMaskedImageModeling",
Suraj Patil's avatar
Suraj Patil committed
101
102
    "CLIPTextModel",
    "CLIPVisionModel",
Suraj Patil's avatar
Suraj Patil committed
103
104
    "FlaxCLIPTextModel",
    "FlaxCLIPVisionModel",
105
    "FlaxWav2Vec2ForCTC",
NielsRogge's avatar
NielsRogge committed
106
    "DetrForSegmentation",
107
108
109
    "DPRReader",
    "FlaubertForQuestionAnswering",
    "GPT2DoubleHeadsModel",
NielsRogge's avatar
NielsRogge committed
110
111
112
    "LukeForEntityClassification",
    "LukeForEntityPairClassification",
    "LukeForEntitySpanClassification",
113
114
115
116
    "OpenAIGPTDoubleHeadsModel",
    "RagModel",
    "RagSequenceForGeneration",
    "RagTokenForGeneration",
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
117
    "TFDPRReader",
118
119
    "TFGPT2DoubleHeadsModel",
    "TFOpenAIGPTDoubleHeadsModel",
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
120
121
122
    "TFRagModel",
    "TFRagSequenceForGeneration",
    "TFRagTokenForGeneration",
123
    "Wav2Vec2ForCTC",
Patrick von Platen's avatar
Patrick von Platen committed
124
    "HubertForCTC",
125
126
    "XLMForQuestionAnswering",
    "XLNetForQuestionAnswering",
abhishek thakur's avatar
abhishek thakur committed
127
    "SeparableConv1D",
Gunjan Chhablani's avatar
Gunjan Chhablani committed
128
129
130
131
    "VisualBertForRegionToPhraseAlignment",
    "VisualBertForVisualReasoning",
    "VisualBertForQuestionAnswering",
    "VisualBertForMultipleChoice",
Will Rice's avatar
Will Rice committed
132
    "TFWav2Vec2ForCTC",
Will Rice's avatar
Will Rice committed
133
    "TFHubertForCTC",
134
135
]

136
137
138
139
140
141
142
143
144
145
146
147
# This is to make sure the transformers module imported is the one in the repo.
spec = importlib.util.spec_from_file_location(
    "transformers",
    os.path.join(PATH_TO_TRANSFORMERS, "__init__.py"),
    submodule_search_locations=[PATH_TO_TRANSFORMERS],
)
transformers = spec.loader.load_module()


# If some modeling modules should be ignored for all checks, they should be added in the nested list
# _ignore_modules of this function.
def get_model_modules():
Patrick von Platen's avatar
Patrick von Platen committed
148
    """Get the model modules inside the transformers library."""
149
150
151
152
153
154
155
156
    _ignore_modules = [
        "modeling_auto",
        "modeling_encoder_decoder",
        "modeling_marian",
        "modeling_mmbt",
        "modeling_outputs",
        "modeling_retribert",
        "modeling_utils",
Sylvain Gugger's avatar
Sylvain Gugger committed
157
        "modeling_flax_auto",
158
        "modeling_flax_encoder_decoder",
Stas Bekman's avatar
Stas Bekman committed
159
        "modeling_flax_utils",
160
161
162
163
164
165
166
167
        "modeling_transfo_xl_utilities",
        "modeling_tf_auto",
        "modeling_tf_outputs",
        "modeling_tf_pytorch_utils",
        "modeling_tf_utils",
        "modeling_tf_transfo_xl_utilities",
    ]
    modules = []
Sylvain Gugger's avatar
Sylvain Gugger committed
168
169
170
171
172
173
174
175
176
    for model in dir(transformers.models):
        # There are some magic dunder attributes in the dir, we ignore them
        if not model.startswith("__"):
            model_module = getattr(transformers.models, model)
            for submodule in dir(model_module):
                if submodule.startswith("modeling") and submodule not in _ignore_modules:
                    modeling_module = getattr(model_module, submodule)
                    if inspect.ismodule(modeling_module):
                        modules.append(modeling_module)
177
178
179
    return modules


180
def get_models(module, include_pretrained=False):
Patrick von Platen's avatar
Patrick von Platen committed
181
    """Get the objects in module that are models."""
182
    models = []
183
    model_classes = (transformers.PreTrainedModel, transformers.TFPreTrainedModel, transformers.FlaxPreTrainedModel)
184
    for attr_name in dir(module):
185
        if not include_pretrained and ("Pretrained" in attr_name or "PreTrained" in attr_name):
186
187
188
189
190
191
192
            continue
        attr = getattr(module, attr_name)
        if isinstance(attr, type) and issubclass(attr, model_classes) and attr.__module__ == module.__name__:
            models.append((attr_name, attr))
    return models


193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
def is_a_private_model(model):
    """Returns True if the model should not be in the main init."""
    if model in PRIVATE_MODELS:
        return True

    # Wrapper, Encoder and Decoder are all privates
    if model.endswith("Wrapper"):
        return True
    if model.endswith("Encoder"):
        return True
    if model.endswith("Decoder"):
        return True
    return False


def check_models_are_in_init():
    """Checks all models defined in the library are in the main init."""
    models_not_in_init = []
    dir_transformers = dir(transformers)
    for module in get_model_modules():
        models_not_in_init += [
            model[0] for model in get_models(module, include_pretrained=True) if model[0] not in dir_transformers
        ]

    # Remove private models
    models_not_in_init = [model for model in models_not_in_init if not is_a_private_model(model)]
    if len(models_not_in_init) > 0:
        raise Exception(f"The following models should be in the main init: {','.join(models_not_in_init)}.")


223
224
225
# If some test_modeling files should be ignored when checking models are all tested, they should be added in the
# nested list _ignore_files of this function.
def get_model_test_files():
Patrick von Platen's avatar
Patrick von Platen committed
226
    """Get the model test files."""
227
228
229
    _ignore_files = [
        "test_modeling_common",
        "test_modeling_encoder_decoder",
230
        "test_modeling_flax_encoder_decoder",
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
        "test_modeling_marian",
        "test_modeling_tf_common",
    ]
    test_files = []
    for filename in os.listdir(PATH_TO_TESTS):
        if (
            os.path.isfile(f"{PATH_TO_TESTS}/{filename}")
            and filename.startswith("test_modeling")
            and not os.path.splitext(filename)[0] in _ignore_files
        ):
            test_files.append(filename)
    return test_files


# This is a bit hacky but I didn't find a way to import the test_file as a module and read inside the tester class
# for the all_model_classes variable.
def find_tested_models(test_file):
Patrick von Platen's avatar
Patrick von Platen committed
248
    """Parse the content of test_file to detect what's in all_model_classes"""
Sylvain Gugger's avatar
Sylvain Gugger committed
249
    # This is a bit hacky but I didn't find a way to import the test_file as a module and read inside the class
250
    with open(os.path.join(PATH_TO_TESTS, test_file), "r", encoding="utf-8", newline="\n") as f:
251
        content = f.read()
Sylvain Gugger's avatar
Sylvain Gugger committed
252
    all_models = re.findall(r"all_model_classes\s+=\s+\(\s*\(([^\)]*)\)", content)
253
254
    # Check with one less parenthesis as well
    all_models += re.findall(r"all_model_classes\s+=\s+\(([^\)]*)\)", content)
Sylvain Gugger's avatar
Sylvain Gugger committed
255
    if len(all_models) > 0:
256
        model_tested = []
Sylvain Gugger's avatar
Sylvain Gugger committed
257
258
259
260
261
        for entry in all_models:
            for line in entry.split(","):
                name = line.strip()
                if len(name) > 0:
                    model_tested.append(name)
262
263
264
265
        return model_tested


def check_models_are_tested(module, test_file):
Patrick von Platen's avatar
Patrick von Platen committed
266
    """Check models defined in module are tested in test_file."""
267
    # XxxPreTrainedModel are not tested
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
    defined_models = get_models(module)
    tested_models = find_tested_models(test_file)
    if tested_models is None:
        if test_file in TEST_FILES_WITH_NO_COMMON_TESTS:
            return
        return [
            f"{test_file} should define `all_model_classes` to apply common tests to the models it tests. "
            + "If this intentional, add the test filename to `TEST_FILES_WITH_NO_COMMON_TESTS` in the file "
            + "`utils/check_repo.py`."
        ]
    failures = []
    for model_name, _ in defined_models:
        if model_name not in tested_models and model_name not in IGNORE_NON_TESTED:
            failures.append(
                f"{model_name} is defined in {module.__name__} but is not tested in "
                + f"{os.path.join(PATH_TO_TESTS, test_file)}. Add it to the all_model_classes in that file."
                + "If common tests should not applied to that model, add its name to `IGNORE_NON_TESTED`"
                + "in the file `utils/check_repo.py`."
            )
    return failures


def check_all_models_are_tested():
Patrick von Platen's avatar
Patrick von Platen committed
291
    """Check all models are properly tested."""
292
293
294
295
    modules = get_model_modules()
    test_files = get_model_test_files()
    failures = []
    for module in modules:
Sylvain Gugger's avatar
Sylvain Gugger committed
296
        test_file = f"test_{module.__name__.split('.')[-1]}.py"
297
298
299
300
301
302
303
304
305
        if test_file not in test_files:
            failures.append(f"{module.__name__} does not have its corresponding test file {test_file}.")
        new_failures = check_models_are_tested(module, test_file)
        if new_failures is not None:
            failures += new_failures
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


306
def get_all_auto_configured_models():
Patrick von Platen's avatar
Patrick von Platen committed
307
    """Return the list of all models in at least one auto class."""
308
    result = set()  # To avoid duplicates we concatenate all model classes in a set.
309
310
    if is_torch_available():
        for attr_name in dir(transformers.models.auto.modeling_auto):
311
            if attr_name.startswith("MODEL_") and attr_name.endswith("MAPPING_NAMES"):
312
313
314
                result = result | set(get_values(getattr(transformers.models.auto.modeling_auto, attr_name)))
    if is_tf_available():
        for attr_name in dir(transformers.models.auto.modeling_tf_auto):
315
            if attr_name.startswith("TF_MODEL_") and attr_name.endswith("MAPPING_NAMES"):
316
317
318
                result = result | set(get_values(getattr(transformers.models.auto.modeling_tf_auto, attr_name)))
    if is_flax_available():
        for attr_name in dir(transformers.models.auto.modeling_flax_auto):
319
            if attr_name.startswith("FLAX_MODEL_") and attr_name.endswith("MAPPING_NAMES"):
320
                result = result | set(get_values(getattr(transformers.models.auto.modeling_flax_auto, attr_name)))
321
    return [cls for cls in result]
322
323


324
325
326
327
328
329
330
331
332
333
334
def ignore_unautoclassed(model_name):
    """Rules to determine if `name` should be in an auto class."""
    # Special white list
    if model_name in IGNORE_NON_AUTO_CONFIGURED:
        return True
    # Encoder and Decoder should be ignored
    if "Encoder" in model_name or "Decoder" in model_name:
        return True
    return False


335
def check_models_are_auto_configured(module, all_auto_models):
Patrick von Platen's avatar
Patrick von Platen committed
336
    """Check models defined in module are each in an auto class."""
337
338
339
    defined_models = get_models(module)
    failures = []
    for model_name, _ in defined_models:
340
        if model_name not in all_auto_models and not ignore_unautoclassed(model_name):
341
342
343
344
345
346
347
348
349
            failures.append(
                f"{model_name} is defined in {module.__name__} but is not present in any of the auto mapping. "
                "If that is intended behavior, add its name to `IGNORE_NON_AUTO_CONFIGURED` in the file "
                "`utils/check_repo.py`."
            )
    return failures


def check_all_models_are_auto_configured():
Patrick von Platen's avatar
Patrick von Platen committed
350
    """Check all models are each in an auto class."""
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
    missing_backends = []
    if not is_torch_available():
        missing_backends.append("PyTorch")
    if not is_tf_available():
        missing_backends.append("TensorFlow")
    if not is_flax_available():
        missing_backends.append("Flax")
    if len(missing_backends) > 0:
        missing = ", ".join(missing_backends)
        if os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES:
            raise Exception(
                "Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the "
                f"Transformers repo, the following are missing: {missing}."
            )
        else:
            warnings.warn(
                "Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the "
                f"Transformers repo, the following are missing: {missing}. While it's probably fine as long as you "
                "didn't make any change in one of those backends modeling files, you should probably execute the "
                "command above to be on the safe side."
            )
372
373
374
375
376
377
378
379
380
381
382
    modules = get_model_modules()
    all_auto_models = get_all_auto_configured_models()
    failures = []
    for module in modules:
        new_failures = check_models_are_auto_configured(module, all_auto_models)
        if new_failures is not None:
            failures += new_failures
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


Sylvain Gugger's avatar
Sylvain Gugger committed
383
384
385
386
_re_decorator = re.compile(r"^\s*@(\S+)\s+$")


def check_decorator_order(filename):
Patrick von Platen's avatar
Patrick von Platen committed
387
    """Check that in the test file `filename` the slow decorator is always last."""
388
    with open(filename, "r", encoding="utf-8", newline="\n") as f:
Sylvain Gugger's avatar
Sylvain Gugger committed
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
        lines = f.readlines()
    decorator_before = None
    errors = []
    for i, line in enumerate(lines):
        search = _re_decorator.search(line)
        if search is not None:
            decorator_name = search.groups()[0]
            if decorator_before is not None and decorator_name.startswith("parameterized"):
                errors.append(i)
            decorator_before = decorator_name
        elif decorator_before is not None:
            decorator_before = None
    return errors


def check_all_decorator_order():
Patrick von Platen's avatar
Patrick von Platen committed
405
    """Check that in all test files, the slow decorator is always last."""
Sylvain Gugger's avatar
Sylvain Gugger committed
406
407
408
409
410
411
412
413
414
415
416
417
418
    errors = []
    for fname in os.listdir(PATH_TO_TESTS):
        if fname.endswith(".py"):
            filename = os.path.join(PATH_TO_TESTS, fname)
            new_errors = check_decorator_order(filename)
            errors += [f"- {filename}, line {i}" for i in new_errors]
    if len(errors) > 0:
        msg = "\n".join(errors)
        raise ValueError(
            f"The parameterized decorator (and its variants) should always be first, but this is not the case in the following files:\n{msg}"
        )


419
def find_all_documented_objects():
Patrick von Platen's avatar
Patrick von Platen committed
420
    """Parse the content of all doc files to detect which classes and functions it documents"""
421
422
    documented_obj = []
    for doc_file in Path(PATH_TO_DOC).glob("**/*.rst"):
Julien Plu's avatar
Julien Plu committed
423
        with open(doc_file, "r", encoding="utf-8", newline="\n") as f:
424
425
426
427
428
429
430
431
432
            content = f.read()
        raw_doc_objs = re.findall(r"(?:autoclass|autofunction):: transformers.(\S+)\s+", content)
        documented_obj += [obj.split(".")[-1] for obj in raw_doc_objs]
    return documented_obj


# One good reason for not being documented is to be deprecated. Put in this list deprecated objects.
DEPRECATED_OBJECTS = [
    "AutoModelWithLMHead",
433
    "BartPretrainedModel",
434
435
    "DataCollator",
    "DataCollatorForSOP",
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
    "GlueDataset",
    "GlueDataTrainingArguments",
    "LineByLineTextDataset",
    "LineByLineWithRefDataset",
    "LineByLineWithSOPTextDataset",
    "PretrainedBartModel",
    "PretrainedFSMTModel",
    "SingleSentenceClassificationProcessor",
    "SquadDataTrainingArguments",
    "SquadDataset",
    "SquadExample",
    "SquadFeatures",
    "SquadV1Processor",
    "SquadV2Processor",
    "TFAutoModelWithLMHead",
451
    "TFBartPretrainedModel",
452
453
    "TextDataset",
    "TextDatasetForNextSentencePrediction",
454
    "Wav2Vec2ForMaskedLM",
455
    "Wav2Vec2Tokenizer",
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
    "glue_compute_metrics",
    "glue_convert_examples_to_features",
    "glue_output_modes",
    "glue_processors",
    "glue_tasks_num_labels",
    "squad_convert_examples_to_features",
    "xnli_compute_metrics",
    "xnli_output_modes",
    "xnli_processors",
    "xnli_tasks_num_labels",
]

# Exceptionally, some objects should not be documented after all rules passed.
# ONLY PUT SOMETHING IN THIS LIST AS A LAST RESORT!
UNDOCUMENTED_OBJECTS = [
    "AddedToken",  # This is a tokenizers class.
    "BasicTokenizer",  # Internal, should never have been in the main init.
473
    "CharacterTokenizer",  # Internal, should never have been in the main init.
474
    "DPRPretrainedReader",  # Like an Encoder.
475
    "MecabTokenizer",  # Internal, should never have been in the main init.
476
477
478
479
480
481
482
483
484
485
486
487
    "ModelCard",  # Internal type.
    "SqueezeBertModule",  # Internal building block (should have been called SqueezeBertLayer)
    "TFDPRPretrainedReader",  # Like an Encoder.
    "TransfoXLCorpus",  # Internal type.
    "WordpieceTokenizer",  # Internal, should never have been in the main init.
    "absl",  # External module
    "add_end_docstrings",  # Internal, should never have been in the main init.
    "add_start_docstrings",  # Internal, should never have been in the main init.
    "cached_path",  # Internal used for downloading models.
    "convert_tf_weight_name_to_pt_weight_name",  # Internal used to convert model weights
    "logger",  # Internal logger
    "logging",  # External module
488
    "requires_backends",  # Internal function
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
]

# This list should be empty. Objects in it should get their own doc page.
SHOULD_HAVE_THEIR_OWN_PAGE = [
    # Benchmarks
    "PyTorchBenchmark",
    "PyTorchBenchmarkArguments",
    "TensorFlowBenchmark",
    "TensorFlowBenchmarkArguments",
]


def ignore_undocumented(name):
    """Rules to determine if `name` should be undocumented."""
    # NOT DOCUMENTED ON PURPOSE.
    # Constants uppercase are not documented.
    if name.isupper():
        return True
    # PreTrainedModels / Encoders / Decoders / Layers / Embeddings / Attention are not documented.
    if (
        name.endswith("PreTrainedModel")
        or name.endswith("Decoder")
        or name.endswith("Encoder")
        or name.endswith("Layer")
        or name.endswith("Embeddings")
        or name.endswith("Attention")
    ):
        return True
    # Submodules are not documented.
    if os.path.isdir(os.path.join(PATH_TO_TRANSFORMERS, name)) or os.path.isfile(
        os.path.join(PATH_TO_TRANSFORMERS, f"{name}.py")
    ):
        return True
    # All load functions are not documented.
    if name.startswith("load_tf") or name.startswith("load_pytorch"):
        return True
    # is_xxx_available functions are not documented.
    if name.startswith("is_") and name.endswith("_available"):
        return True
    # Deprecated objects are not documented.
    if name in DEPRECATED_OBJECTS or name in UNDOCUMENTED_OBJECTS:
        return True
    # MMBT model does not really work.
    if name.startswith("MMBT"):
        return True
    if name in SHOULD_HAVE_THEIR_OWN_PAGE:
        return True
    return False


def check_all_objects_are_documented():
Patrick von Platen's avatar
Patrick von Platen committed
540
    """Check all models are properly documented."""
541
    documented_objs = find_all_documented_objects()
542
543
544
    modules = transformers._modules
    objects = [c for c in dir(transformers) if c not in modules and not c.startswith("_")]
    undocumented_objs = [c for c in objects if c not in documented_objs and not ignore_undocumented(c)]
545
546
547
548
549
550
551
    if len(undocumented_objs) > 0:
        raise Exception(
            "The following objects are in the public init so should be documented:\n - "
            + "\n - ".join(undocumented_objs)
        )


552
def check_repo_quality():
Patrick von Platen's avatar
Patrick von Platen committed
553
    """Check all models are properly tested and documented."""
554
555
    print("Checking all models are public.")
    check_models_are_in_init()
556
    print("Checking all models are properly tested.")
Sylvain Gugger's avatar
Sylvain Gugger committed
557
    check_all_decorator_order()
558
    check_all_models_are_tested()
559
    print("Checking all objects are properly documented.")
560
    check_all_objects_are_documented()
561
562
    print("Checking all models are in at least one auto class.")
    check_all_models_are_auto_configured()
563
564
565
566


if __name__ == "__main__":
    check_repo_quality()