"docs/source/de/index.mdx" did not exist on "adc0ff25028d29af30386f2d7d3f85e290fbef57"
conftest.py 4.83 KB
Newer Older
Sylvain Gugger's avatar
Sylvain Gugger committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
16
17
# tests directory-specific settings - this file is run automatically
# by pytest before any tests are run

18
import doctest
19
import sys
20
import warnings
21
22
from os.path import abspath, dirname, join

23
import _pytest
24
import pytest
25
26
27

from transformers.testing_utils import HfDoctestModule, HfDocTestParser

28

29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
NOT_DEVICE_TESTS = {
    "test_tokenization",
    "test_processor",
    "test_processing",
    "test_feature_extraction",
    "test_image_processing",
    "test_image_processor",
    "test_retrieval",
    "test_config",
    "test_from_pretrained_no_checkpoint",
    "test_keep_in_fp32_modules",
    "test_gradient_checkpointing_backward_compatibility",
    "test_gradient_checkpointing_enable_disable",
    "test_save_load_fast_init_from_base",
    "test_fast_init_context_manager",
    "test_fast_init_tied_embeddings",
    "test_save_load_fast_init_to_base",
    "test_torch_save_load",
    "test_initialization",
    "test_forward_signature",
    "test_model_common_attributes",
    "test_model_main_input_name",
    "test_correct_missing_keys",
    "test_tie_model_weights",
    "test_can_use_safetensors",
    "test_load_save_without_tied_weights",
    "test_tied_weights_keys",
    "test_model_weights_reload_no_missing_tied_weights",
    "test_pt_tf_model_equivalence",
    "test_mismatched_shapes_have_properly_initialized_weights",
    "test_matched_shapes_have_loaded_weights_when_some_mismatched_shapes_exist",
    "test_model_is_small",
    "test_tf_from_pt_safetensors",
    "test_flax_from_pt_safetensors",
    "ModelTest::test_pipeline_",  # None of the pipeline tests from PipelineTesterMixin (of which XxxModelTest inherits from) are running on device
    "ModelTester::test_pipeline_",
}

67
# allow having multiple repository checkouts and not needing to remember to rerun
68
# `pip install -e '.[dev]'` when switching between checkouts and running tests.
69
git_repo_path = abspath(join(dirname(__file__), "src"))
70
sys.path.insert(1, git_repo_path)
71
72
73
74

# silence FutureWarning warnings in tests since often we can't act on them until
# they become normal warnings - i.e. the tests still need to test the current functionality
warnings.simplefilter(action="ignore", category=FutureWarning)
75
76
77
78
79
80


def pytest_configure(config):
    config.addinivalue_line(
        "markers", "is_pt_tf_cross_test: mark test to run only when PT and TF interactions are tested"
    )
81
82
83
    config.addinivalue_line(
        "markers", "is_pt_flax_cross_test: mark test to run only when PT and FLAX interactions are tested"
    )
84
    config.addinivalue_line("markers", "is_pipeline_test: mark test to run only when pipelines are tested")
Sylvain Gugger's avatar
Sylvain Gugger committed
85
    config.addinivalue_line("markers", "is_staging_test: mark test to run only in the staging environment")
86
    config.addinivalue_line("markers", "accelerate_tests: mark test that require accelerate")
Sylvain Gugger's avatar
Sylvain Gugger committed
87
    config.addinivalue_line("markers", "tool_tests: mark the tool tests that are run on their specific schedule")
88
89
90
91
92
93
94
    config.addinivalue_line("markers", "not_device_test: mark the tests always running on cpu")


def pytest_collection_modifyitems(items):
    for item in items:
        if any(test_name in item.nodeid for test_name in NOT_DEVICE_TESTS):
            item.add_marker(pytest.mark.not_device_test)
95
96
97


def pytest_addoption(parser):
98
99
100
    from transformers.testing_utils import pytest_addoption_shared

    pytest_addoption_shared(parser)
101
102
103
104
105


def pytest_terminal_summary(terminalreporter):
    from transformers.testing_utils import pytest_terminal_summary_main

106
    make_reports = terminalreporter.config.getoption("--make-reports")
107
108
    if make_reports:
        pytest_terminal_summary_main(terminalreporter, id=make_reports)
109
110
111
112
113
114


def pytest_sessionfinish(session, exitstatus):
    # If no tests are collected, pytest exists with code 5, which makes the CI fail.
    if exitstatus == 5:
        session.exitstatus = 0
115
116
117


# Doctest custom flag to ignore output.
118
IGNORE_RESULT = doctest.register_optionflag("IGNORE_RESULT")
119
120
121

OutputChecker = doctest.OutputChecker

122

123
124
class CustomOutputChecker(OutputChecker):
    def check_output(self, want, got, optionflags):
125
        if IGNORE_RESULT & optionflags:
126
127
128
            return True
        return OutputChecker.check_output(self, want, got, optionflags)

129

130
doctest.OutputChecker = CustomOutputChecker
131
132
_pytest.doctest.DoctestModule = HfDoctestModule
doctest.DocTestParser = HfDocTestParser