check_repo.py 21.1 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
# coding=utf-8
# Copyright 2020 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

16
17
18
19
import importlib
import inspect
import os
import re
20
import warnings
21
from pathlib import Path
22

23
24
from transformers import is_flax_available, is_tf_available, is_torch_available
from transformers.file_utils import ENV_VARS_TRUE_VALUES
25
26
from transformers.models.auto import get_values

27
28
29
30
31

# All paths are set with the intent you should run this script from the root of the repo with the command
# python utils/check_repo.py
PATH_TO_TRANSFORMERS = "src/transformers"
PATH_TO_TESTS = "tests"
32
PATH_TO_DOC = "docs/source"
33
34
35
36

# Update this list for models that are not tested with a comment explaining the reason it should not be.
# Being in this list is an exception and should **not** be the rule.
IGNORE_NON_TESTED = [
37
    # models to ignore for not tested
Vasudev Gupta's avatar
Vasudev Gupta committed
38
39
40
    "BigBirdPegasusEncoder",  # Building part of bigger (tested) model.
    "BigBirdPegasusDecoder",  # Building part of bigger (tested) model.
    "BigBirdPegasusDecoderWrapper",  # Building part of bigger (tested) model.
Suraj Patil's avatar
Suraj Patil committed
41
42
    "M2M100Encoder",  # Building part of bigger (tested) model.
    "M2M100Decoder",  # Building part of bigger (tested) model.
Suraj Patil's avatar
Suraj Patil committed
43
44
    "Speech2TextEncoder",  # Building part of bigger (tested) model.
    "Speech2TextDecoder",  # Building part of bigger (tested) model.
Patrick von Platen's avatar
Patrick von Platen committed
45
46
    "LEDEncoder",  # Building part of bigger (tested) model.
    "LEDDecoder",  # Building part of bigger (tested) model.
47
    "BartDecoderWrapper",  # Building part of bigger (tested) model.
48
    "BartEncoder",  # Building part of bigger (tested) model.
49
    "BertLMHeadModel",  # Needs to be setup as decoder.
50
    "BlenderbotSmallEncoder",  # Building part of bigger (tested) model.
51
    "BlenderbotSmallDecoderWrapper",  # Building part of bigger (tested) model.
52
    "BlenderbotEncoder",  # Building part of bigger (tested) model.
53
    "BlenderbotDecoderWrapper",  # Building part of bigger (tested) model.
54
    "MBartEncoder",  # Building part of bigger (tested) model.
55
    "MBartDecoderWrapper",  # Building part of bigger (tested) model.
56
57
58
59
    "MegatronBertLMHeadModel",  # Building part of bigger (tested) model.
    "MegatronBertEncoder",  # Building part of bigger (tested) model.
    "MegatronBertDecoder",  # Building part of bigger (tested) model.
    "MegatronBertDecoderWrapper",  # Building part of bigger (tested) model.
60
    "PegasusEncoder",  # Building part of bigger (tested) model.
61
    "PegasusDecoderWrapper",  # Building part of bigger (tested) model.
62
63
    "DPREncoder",  # Building part of bigger (tested) model.
    "DPRSpanPredictor",  # Building part of bigger (tested) model.
64
    "ProphetNetDecoderWrapper",  # Building part of bigger (tested) model.
65
66
    "ReformerForMaskedLM",  # Needs to be setup as decoder.
    "T5Stack",  # Building part of bigger (tested) model.
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
67
68
    "TFDPREncoder",  # Building part of bigger (tested) model.
    "TFDPRSpanPredictor",  # Building part of bigger (tested) model.
69
70
    "TFElectraMainLayer",  # Building part of bigger (tested) model (should it be a TFPreTrainedModel ?)
    "TFRobertaForMultipleChoice",  # TODO: fix
abhishek thakur's avatar
abhishek thakur committed
71
    "SeparableConv1D",  # Building part of bigger (tested) model.
72
73
74
75
76
77
]

# Update this list with test files that don't have a tester with a `all_model_classes` variable and which don't
# trigger the common tests.
TEST_FILES_WITH_NO_COMMON_TESTS = [
    "test_modeling_camembert.py",
Stas Bekman's avatar
Stas Bekman committed
78
79
80
    "test_modeling_flax_bert.py",
    "test_modeling_flax_roberta.py",
    "test_modeling_mbart.py",
Patrick von Platen's avatar
Patrick von Platen committed
81
    "test_modeling_mt5.py",
Stas Bekman's avatar
Stas Bekman committed
82
    "test_modeling_pegasus.py",
83
    "test_modeling_tf_camembert.py",
Sylvain Gugger's avatar
Sylvain Gugger committed
84
    "test_modeling_tf_mt5.py",
85
    "test_modeling_tf_xlm_roberta.py",
Weizhen's avatar
Weizhen committed
86
    "test_modeling_xlm_prophetnet.py",
87
88
89
    "test_modeling_xlm_roberta.py",
]

90
91
92
# Update this list for models that are not in any of the auto MODEL_XXX_MAPPING. Being in this list is an exception and
# should **not** be the rule.
IGNORE_NON_AUTO_CONFIGURED = [
93
    # models to ignore for model xxx mapping
Suraj Patil's avatar
Suraj Patil committed
94
95
    "CLIPTextModel",
    "CLIPVisionModel",
Suraj Patil's avatar
Suraj Patil committed
96
97
    "FlaxCLIPTextModel",
    "FlaxCLIPVisionModel",
98
99
100
101
    "DPRReader",
    "DPRSpanPredictor",
    "FlaubertForQuestionAnswering",
    "GPT2DoubleHeadsModel",
NielsRogge's avatar
NielsRogge committed
102
103
104
    "LukeForEntityClassification",
    "LukeForEntityPairClassification",
    "LukeForEntitySpanClassification",
105
106
107
108
109
    "OpenAIGPTDoubleHeadsModel",
    "RagModel",
    "RagSequenceForGeneration",
    "RagTokenForGeneration",
    "T5Stack",
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
110
111
    "TFDPRReader",
    "TFDPRSpanPredictor",
112
113
    "TFGPT2DoubleHeadsModel",
    "TFOpenAIGPTDoubleHeadsModel",
Ratthachat (Jung)'s avatar
Ratthachat (Jung) committed
114
115
116
    "TFRagModel",
    "TFRagSequenceForGeneration",
    "TFRagTokenForGeneration",
117
    "Wav2Vec2ForCTC",
118
119
    "XLMForQuestionAnswering",
    "XLNetForQuestionAnswering",
abhishek thakur's avatar
abhishek thakur committed
120
    "SeparableConv1D",
Gunjan Chhablani's avatar
Gunjan Chhablani committed
121
122
123
124
    "VisualBertForRegionToPhraseAlignment",
    "VisualBertForVisualReasoning",
    "VisualBertForQuestionAnswering",
    "VisualBertForMultipleChoice",
125
126
]

127
128
129
130
131
132
133
134
135
136
137
138
# This is to make sure the transformers module imported is the one in the repo.
spec = importlib.util.spec_from_file_location(
    "transformers",
    os.path.join(PATH_TO_TRANSFORMERS, "__init__.py"),
    submodule_search_locations=[PATH_TO_TRANSFORMERS],
)
transformers = spec.loader.load_module()


# If some modeling modules should be ignored for all checks, they should be added in the nested list
# _ignore_modules of this function.
def get_model_modules():
Patrick von Platen's avatar
Patrick von Platen committed
139
    """Get the model modules inside the transformers library."""
140
141
142
143
144
145
146
147
    _ignore_modules = [
        "modeling_auto",
        "modeling_encoder_decoder",
        "modeling_marian",
        "modeling_mmbt",
        "modeling_outputs",
        "modeling_retribert",
        "modeling_utils",
Sylvain Gugger's avatar
Sylvain Gugger committed
148
        "modeling_flax_auto",
Stas Bekman's avatar
Stas Bekman committed
149
        "modeling_flax_utils",
150
151
152
153
154
155
156
157
        "modeling_transfo_xl_utilities",
        "modeling_tf_auto",
        "modeling_tf_outputs",
        "modeling_tf_pytorch_utils",
        "modeling_tf_utils",
        "modeling_tf_transfo_xl_utilities",
    ]
    modules = []
Sylvain Gugger's avatar
Sylvain Gugger committed
158
159
160
161
162
163
164
165
166
    for model in dir(transformers.models):
        # There are some magic dunder attributes in the dir, we ignore them
        if not model.startswith("__"):
            model_module = getattr(transformers.models, model)
            for submodule in dir(model_module):
                if submodule.startswith("modeling") and submodule not in _ignore_modules:
                    modeling_module = getattr(model_module, submodule)
                    if inspect.ismodule(modeling_module):
                        modules.append(modeling_module)
167
168
169
170
    return modules


def get_models(module):
Patrick von Platen's avatar
Patrick von Platen committed
171
    """Get the objects in module that are models."""
172
    models = []
173
    model_classes = (transformers.PreTrainedModel, transformers.TFPreTrainedModel, transformers.FlaxPreTrainedModel)
174
175
176
177
178
179
180
181
182
183
184
185
    for attr_name in dir(module):
        if "Pretrained" in attr_name or "PreTrained" in attr_name:
            continue
        attr = getattr(module, attr_name)
        if isinstance(attr, type) and issubclass(attr, model_classes) and attr.__module__ == module.__name__:
            models.append((attr_name, attr))
    return models


# If some test_modeling files should be ignored when checking models are all tested, they should be added in the
# nested list _ignore_files of this function.
def get_model_test_files():
Patrick von Platen's avatar
Patrick von Platen committed
186
    """Get the model test files."""
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
    _ignore_files = [
        "test_modeling_common",
        "test_modeling_encoder_decoder",
        "test_modeling_marian",
        "test_modeling_tf_common",
    ]
    test_files = []
    for filename in os.listdir(PATH_TO_TESTS):
        if (
            os.path.isfile(f"{PATH_TO_TESTS}/{filename}")
            and filename.startswith("test_modeling")
            and not os.path.splitext(filename)[0] in _ignore_files
        ):
            test_files.append(filename)
    return test_files


# This is a bit hacky but I didn't find a way to import the test_file as a module and read inside the tester class
# for the all_model_classes variable.
def find_tested_models(test_file):
Patrick von Platen's avatar
Patrick von Platen committed
207
    """Parse the content of test_file to detect what's in all_model_classes"""
Sylvain Gugger's avatar
Sylvain Gugger committed
208
    # This is a bit hacky but I didn't find a way to import the test_file as a module and read inside the class
209
    with open(os.path.join(PATH_TO_TESTS, test_file), "r", encoding="utf-8", newline="\n") as f:
210
        content = f.read()
Sylvain Gugger's avatar
Sylvain Gugger committed
211
    all_models = re.findall(r"all_model_classes\s+=\s+\(\s*\(([^\)]*)\)", content)
212
213
    # Check with one less parenthesis as well
    all_models += re.findall(r"all_model_classes\s+=\s+\(([^\)]*)\)", content)
Sylvain Gugger's avatar
Sylvain Gugger committed
214
    if len(all_models) > 0:
215
        model_tested = []
Sylvain Gugger's avatar
Sylvain Gugger committed
216
217
218
219
220
        for entry in all_models:
            for line in entry.split(","):
                name = line.strip()
                if len(name) > 0:
                    model_tested.append(name)
221
222
223
224
        return model_tested


def check_models_are_tested(module, test_file):
Patrick von Platen's avatar
Patrick von Platen committed
225
    """Check models defined in module are tested in test_file."""
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
    defined_models = get_models(module)
    tested_models = find_tested_models(test_file)
    if tested_models is None:
        if test_file in TEST_FILES_WITH_NO_COMMON_TESTS:
            return
        return [
            f"{test_file} should define `all_model_classes` to apply common tests to the models it tests. "
            + "If this intentional, add the test filename to `TEST_FILES_WITH_NO_COMMON_TESTS` in the file "
            + "`utils/check_repo.py`."
        ]
    failures = []
    for model_name, _ in defined_models:
        if model_name not in tested_models and model_name not in IGNORE_NON_TESTED:
            failures.append(
                f"{model_name} is defined in {module.__name__} but is not tested in "
                + f"{os.path.join(PATH_TO_TESTS, test_file)}. Add it to the all_model_classes in that file."
                + "If common tests should not applied to that model, add its name to `IGNORE_NON_TESTED`"
                + "in the file `utils/check_repo.py`."
            )
    return failures


def check_all_models_are_tested():
Patrick von Platen's avatar
Patrick von Platen committed
249
    """Check all models are properly tested."""
250
251
252
253
    modules = get_model_modules()
    test_files = get_model_test_files()
    failures = []
    for module in modules:
Sylvain Gugger's avatar
Sylvain Gugger committed
254
        test_file = f"test_{module.__name__.split('.')[-1]}.py"
255
256
257
258
259
260
261
262
263
        if test_file not in test_files:
            failures.append(f"{module.__name__} does not have its corresponding test file {test_file}.")
        new_failures = check_models_are_tested(module, test_file)
        if new_failures is not None:
            failures += new_failures
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


264
def get_all_auto_configured_models():
Patrick von Platen's avatar
Patrick von Platen committed
265
    """Return the list of all models in at least one auto class."""
266
    result = set()  # To avoid duplicates we concatenate all model classes in a set.
267
268
269
270
271
272
273
274
275
276
277
278
    if is_torch_available():
        for attr_name in dir(transformers.models.auto.modeling_auto):
            if attr_name.startswith("MODEL_") and attr_name.endswith("MAPPING"):
                result = result | set(get_values(getattr(transformers.models.auto.modeling_auto, attr_name)))
    if is_tf_available():
        for attr_name in dir(transformers.models.auto.modeling_tf_auto):
            if attr_name.startswith("TF_MODEL_") and attr_name.endswith("MAPPING"):
                result = result | set(get_values(getattr(transformers.models.auto.modeling_tf_auto, attr_name)))
    if is_flax_available():
        for attr_name in dir(transformers.models.auto.modeling_flax_auto):
            if attr_name.startswith("FLAX_MODEL_") and attr_name.endswith("MAPPING"):
                result = result | set(get_values(getattr(transformers.models.auto.modeling_flax_auto, attr_name)))
279
280
281
    return [cls.__name__ for cls in result]


282
283
284
285
286
287
288
289
290
291
292
def ignore_unautoclassed(model_name):
    """Rules to determine if `name` should be in an auto class."""
    # Special white list
    if model_name in IGNORE_NON_AUTO_CONFIGURED:
        return True
    # Encoder and Decoder should be ignored
    if "Encoder" in model_name or "Decoder" in model_name:
        return True
    return False


293
def check_models_are_auto_configured(module, all_auto_models):
Patrick von Platen's avatar
Patrick von Platen committed
294
    """Check models defined in module are each in an auto class."""
295
296
297
    defined_models = get_models(module)
    failures = []
    for model_name, _ in defined_models:
298
        if model_name not in all_auto_models and not ignore_unautoclassed(model_name):
299
300
301
302
303
304
305
306
307
            failures.append(
                f"{model_name} is defined in {module.__name__} but is not present in any of the auto mapping. "
                "If that is intended behavior, add its name to `IGNORE_NON_AUTO_CONFIGURED` in the file "
                "`utils/check_repo.py`."
            )
    return failures


def check_all_models_are_auto_configured():
Patrick von Platen's avatar
Patrick von Platen committed
308
    """Check all models are each in an auto class."""
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
    missing_backends = []
    if not is_torch_available():
        missing_backends.append("PyTorch")
    if not is_tf_available():
        missing_backends.append("TensorFlow")
    if not is_flax_available():
        missing_backends.append("Flax")
    if len(missing_backends) > 0:
        missing = ", ".join(missing_backends)
        if os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES:
            raise Exception(
                "Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the "
                f"Transformers repo, the following are missing: {missing}."
            )
        else:
            warnings.warn(
                "Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the "
                f"Transformers repo, the following are missing: {missing}. While it's probably fine as long as you "
                "didn't make any change in one of those backends modeling files, you should probably execute the "
                "command above to be on the safe side."
            )
330
331
332
333
334
335
336
337
338
339
340
    modules = get_model_modules()
    all_auto_models = get_all_auto_configured_models()
    failures = []
    for module in modules:
        new_failures = check_models_are_auto_configured(module, all_auto_models)
        if new_failures is not None:
            failures += new_failures
    if len(failures) > 0:
        raise Exception(f"There were {len(failures)} failures:\n" + "\n".join(failures))


Sylvain Gugger's avatar
Sylvain Gugger committed
341
342
343
344
_re_decorator = re.compile(r"^\s*@(\S+)\s+$")


def check_decorator_order(filename):
Patrick von Platen's avatar
Patrick von Platen committed
345
    """Check that in the test file `filename` the slow decorator is always last."""
346
    with open(filename, "r", encoding="utf-8", newline="\n") as f:
Sylvain Gugger's avatar
Sylvain Gugger committed
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
        lines = f.readlines()
    decorator_before = None
    errors = []
    for i, line in enumerate(lines):
        search = _re_decorator.search(line)
        if search is not None:
            decorator_name = search.groups()[0]
            if decorator_before is not None and decorator_name.startswith("parameterized"):
                errors.append(i)
            decorator_before = decorator_name
        elif decorator_before is not None:
            decorator_before = None
    return errors


def check_all_decorator_order():
Patrick von Platen's avatar
Patrick von Platen committed
363
    """Check that in all test files, the slow decorator is always last."""
Sylvain Gugger's avatar
Sylvain Gugger committed
364
365
366
367
368
369
370
371
372
373
374
375
376
    errors = []
    for fname in os.listdir(PATH_TO_TESTS):
        if fname.endswith(".py"):
            filename = os.path.join(PATH_TO_TESTS, fname)
            new_errors = check_decorator_order(filename)
            errors += [f"- {filename}, line {i}" for i in new_errors]
    if len(errors) > 0:
        msg = "\n".join(errors)
        raise ValueError(
            f"The parameterized decorator (and its variants) should always be first, but this is not the case in the following files:\n{msg}"
        )


377
def find_all_documented_objects():
Patrick von Platen's avatar
Patrick von Platen committed
378
    """Parse the content of all doc files to detect which classes and functions it documents"""
379
380
    documented_obj = []
    for doc_file in Path(PATH_TO_DOC).glob("**/*.rst"):
Julien Plu's avatar
Julien Plu committed
381
        with open(doc_file, "r", encoding="utf-8", newline="\n") as f:
382
383
384
385
386
387
388
389
390
            content = f.read()
        raw_doc_objs = re.findall(r"(?:autoclass|autofunction):: transformers.(\S+)\s+", content)
        documented_obj += [obj.split(".")[-1] for obj in raw_doc_objs]
    return documented_obj


# One good reason for not being documented is to be deprecated. Put in this list deprecated objects.
DEPRECATED_OBJECTS = [
    "AutoModelWithLMHead",
391
    "BartPretrainedModel",
392
393
    "DataCollator",
    "DataCollatorForSOP",
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
    "GlueDataset",
    "GlueDataTrainingArguments",
    "LineByLineTextDataset",
    "LineByLineWithRefDataset",
    "LineByLineWithSOPTextDataset",
    "PretrainedBartModel",
    "PretrainedFSMTModel",
    "SingleSentenceClassificationProcessor",
    "SquadDataTrainingArguments",
    "SquadDataset",
    "SquadExample",
    "SquadFeatures",
    "SquadV1Processor",
    "SquadV2Processor",
    "TFAutoModelWithLMHead",
409
    "TFBartPretrainedModel",
410
411
    "TextDataset",
    "TextDatasetForNextSentencePrediction",
412
    "Wav2Vec2ForMaskedLM",
413
    "Wav2Vec2Tokenizer",
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
    "glue_compute_metrics",
    "glue_convert_examples_to_features",
    "glue_output_modes",
    "glue_processors",
    "glue_tasks_num_labels",
    "squad_convert_examples_to_features",
    "xnli_compute_metrics",
    "xnli_output_modes",
    "xnli_processors",
    "xnli_tasks_num_labels",
]

# Exceptionally, some objects should not be documented after all rules passed.
# ONLY PUT SOMETHING IN THIS LIST AS A LAST RESORT!
UNDOCUMENTED_OBJECTS = [
    "AddedToken",  # This is a tokenizers class.
    "BasicTokenizer",  # Internal, should never have been in the main init.
431
    "CharacterTokenizer",  # Internal, should never have been in the main init.
432
    "DPRPretrainedReader",  # Like an Encoder.
433
    "MecabTokenizer",  # Internal, should never have been in the main init.
434
435
436
437
438
439
440
441
442
443
444
445
    "ModelCard",  # Internal type.
    "SqueezeBertModule",  # Internal building block (should have been called SqueezeBertLayer)
    "TFDPRPretrainedReader",  # Like an Encoder.
    "TransfoXLCorpus",  # Internal type.
    "WordpieceTokenizer",  # Internal, should never have been in the main init.
    "absl",  # External module
    "add_end_docstrings",  # Internal, should never have been in the main init.
    "add_start_docstrings",  # Internal, should never have been in the main init.
    "cached_path",  # Internal used for downloading models.
    "convert_tf_weight_name_to_pt_weight_name",  # Internal used to convert model weights
    "logger",  # Internal logger
    "logging",  # External module
446
    "requires_backends",  # Internal function
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
]

# This list should be empty. Objects in it should get their own doc page.
SHOULD_HAVE_THEIR_OWN_PAGE = [
    # Benchmarks
    "PyTorchBenchmark",
    "PyTorchBenchmarkArguments",
    "TensorFlowBenchmark",
    "TensorFlowBenchmarkArguments",
]


def ignore_undocumented(name):
    """Rules to determine if `name` should be undocumented."""
    # NOT DOCUMENTED ON PURPOSE.
    # Constants uppercase are not documented.
    if name.isupper():
        return True
    # PreTrainedModels / Encoders / Decoders / Layers / Embeddings / Attention are not documented.
    if (
        name.endswith("PreTrainedModel")
        or name.endswith("Decoder")
        or name.endswith("Encoder")
        or name.endswith("Layer")
        or name.endswith("Embeddings")
        or name.endswith("Attention")
    ):
        return True
    # Submodules are not documented.
    if os.path.isdir(os.path.join(PATH_TO_TRANSFORMERS, name)) or os.path.isfile(
        os.path.join(PATH_TO_TRANSFORMERS, f"{name}.py")
    ):
        return True
    # All load functions are not documented.
    if name.startswith("load_tf") or name.startswith("load_pytorch"):
        return True
    # is_xxx_available functions are not documented.
    if name.startswith("is_") and name.endswith("_available"):
        return True
    # Deprecated objects are not documented.
    if name in DEPRECATED_OBJECTS or name in UNDOCUMENTED_OBJECTS:
        return True
    # MMBT model does not really work.
    if name.startswith("MMBT"):
        return True
    if name in SHOULD_HAVE_THEIR_OWN_PAGE:
        return True
    return False


def check_all_objects_are_documented():
Patrick von Platen's avatar
Patrick von Platen committed
498
    """Check all models are properly documented."""
499
    documented_objs = find_all_documented_objects()
500
501
502
    modules = transformers._modules
    objects = [c for c in dir(transformers) if c not in modules and not c.startswith("_")]
    undocumented_objs = [c for c in objects if c not in documented_objs and not ignore_undocumented(c)]
503
504
505
506
507
508
509
    if len(undocumented_objs) > 0:
        raise Exception(
            "The following objects are in the public init so should be documented:\n - "
            + "\n - ".join(undocumented_objs)
        )


510
def check_repo_quality():
Patrick von Platen's avatar
Patrick von Platen committed
511
    """Check all models are properly tested and documented."""
512
    print("Checking all models are properly tested.")
Sylvain Gugger's avatar
Sylvain Gugger committed
513
    check_all_decorator_order()
514
    check_all_models_are_tested()
515
    print("Checking all objects are properly documented.")
516
    check_all_objects_are_documented()
517
518
    print("Checking all models are in at least one auto class.")
    check_all_models_are_auto_configured()
519
520
521
522


if __name__ == "__main__":
    check_repo_quality()