create_circleci_config.py 27.1 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
# coding=utf-8
# Copyright 2022 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import argparse
import copy
18
import glob
19
import os
20
import random
21
22
23
import subprocess
import yaml
from base64 import b64encode
24
from dataclasses import dataclass
25
from pathlib import Path
26
27
from typing import Any, Dict, List, Optional

28
from git import Repo
29
30


31
32
33
34
35
36
37
38
COMMON_ENV_VARIABLES = {
    "OMP_NUM_THREADS": 1,
    "TRANSFORMERS_IS_CI": True,
    "PYTEST_TIMEOUT": 120,
    "RUN_PIPELINE_TESTS": False,
    "RUN_PT_TF_CROSS_TESTS": False,
    "RUN_PT_FLAX_CROSS_TESTS": False,
}
39
COMMON_PYTEST_OPTIONS = {"max-worker-restart": 0, "dist": "loadfile", "s": None}
40
DEFAULT_DOCKER_IMAGE = [{"image": "cimg/python:3.8.12"}]
41
42


43
44
45
46
47
48
49
50
51
52
53
class EmptyJob:
    job_name = "empty"

    def to_dict(self):
        return {
            "working_directory": "~/transformers",
            "docker": copy.deepcopy(DEFAULT_DOCKER_IMAGE),
            "steps":["checkout"],
        }


54
55
56
57
58
@dataclass
class CircleCIJob:
    name: str
    additional_env: Dict[str, Any] = None
    cache_name: str = None
Yih-Dar's avatar
Yih-Dar committed
59
    cache_version: str = "0.7"
60
61
62
63
64
65
66
67
68
    docker_image: List[Dict[str, str]] = None
    install_steps: List[str] = None
    marker: Optional[str] = None
    parallelism: Optional[int] = 1
    pytest_num_workers: int = 8
    pytest_options: Dict[str, Any] = None
    resource_class: Optional[str] = "xlarge"
    tests_to_run: Optional[List[str]] = None
    working_directory: str = "~/transformers"
69
70
    # This should be only used for doctest job!
    command_timeout: Optional[int] = None
71
72
    # The explicit checksum to use for cache load/save
    checksum: Optional[str] = None
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88

    def __post_init__(self):
        # Deal with defaults for mutable attributes.
        if self.additional_env is None:
            self.additional_env = {}
        if self.cache_name is None:
            self.cache_name = self.name
        if self.docker_image is None:
            # Let's avoid changing the default list and make a copy.
            self.docker_image = copy.deepcopy(DEFAULT_DOCKER_IMAGE)
        if self.install_steps is None:
            self.install_steps = []
        if self.pytest_options is None:
            self.pytest_options = {}
        if isinstance(self.tests_to_run, str):
            self.tests_to_run = [self.tests_to_run]
89
90
        if self.parallelism is None:
            self.parallelism = 1
91
92

    def to_dict(self):
93
94
        env = COMMON_ENV_VARIABLES.copy()
        env.update(self.additional_env)
95
96
97
98
99

        cache_branch_prefix = os.environ.get("CIRCLE_BRANCH", "pull")
        if cache_branch_prefix != "main":
            cache_branch_prefix = "pull"

100
101
102
        job = {
            "working_directory": self.working_directory,
            "docker": self.docker_image,
103
            "environment": env,
104
105
106
107
108
        }
        if self.resource_class is not None:
            job["resource_class"] = self.resource_class
        if self.parallelism is not None:
            job["parallelism"] = self.parallelism
109
110
111
112
113
114
115
116

        checksum = self.checksum if self.checksum is not None else '{{ checksum "setup.py" }}'
        save_cache = True
        if self.checksum is not None:
            # `setup.py` is not modified and we are not on `main` branch
            cache_branch_prefix = "main"
            save_cache = False

117
118
119
120
121
122
        steps = [
            "checkout",
            {"attach_workspace": {"at": "~/transformers/test_preparation"}},
            {
                "restore_cache": {
                    "keys": [
123
                        # check the fully-matched cache first
124
                        f"v{self.cache_version}-{self.cache_name}-{cache_branch_prefix}-pip-{checksum}",
125
126
127
128
                        # try the partially-matched cache from `main`
                        f"v{self.cache_version}-{self.cache_name}-main-pip-",
                        # try the general partially-matched cache
                        f"v{self.cache_version}-{self.cache_name}-{cache_branch_prefix}-pip-",
129
130
131
                    ]
                }
            },
132
133
134
            {
                "restore_cache": {
                    "keys": [
135
                        f"v{self.cache_version}-{self.cache_name}-{cache_branch_prefix}-site-packages-{checksum}",
136
137
                        f"v{self.cache_version}-{self.cache_name}-main-site-packages-",
                        f"v{self.cache_version}-{self.cache_name}-{cache_branch_prefix}-site-packages-",
138
139
140
                    ]
                }
            },
141
142
        ]
        steps.extend([{"run": l} for l in self.install_steps])
143
144
145
146
147
148
149
        if save_cache:
            steps.append(
                {
                    "save_cache": {
                        "key": f"v{self.cache_version}-{self.cache_name}-{cache_branch_prefix}-pip-{checksum}",
                        "paths": ["~/.cache/pip"],
                    }
150
                }
151
152
153
154
155
156
157
            )
            steps.append(
                {
                    "save_cache": {
                        "key": f"v{self.cache_version}-{self.cache_name}-{cache_branch_prefix}-site-packages-{checksum}",
                        "paths": ["~/.pyenv/versions/"],
                    }
158
                }
159
            )
160
161
        steps.append({"run": {"name": "Show installed libraries and their versions", "command": "pip freeze | tee installed.txt"}})
        steps.append({"store_artifacts": {"path": "~/transformers/installed.txt"}})
162
163

        all_options = {**COMMON_PYTEST_OPTIONS, **self.pytest_options}
164
        pytest_flags = [f"--{key}={value}" if (value is not None or key in ["doctest-modules"]) else f"-{key}" for key, value in all_options.items()]
165
166
167
        pytest_flags.append(
            f"--make-reports={self.name}" if "examples" in self.name else f"--make-reports=tests_{self.name}"
        )
168
169
170
171
        test_command = ""
        if self.command_timeout:
            test_command = f"timeout {self.command_timeout} "
        test_command += f"python -m pytest -n {self.pytest_num_workers} " + " ".join(pytest_flags)
amyeroberts's avatar
amyeroberts committed
172

173
174
175
176
177
        if self.parallelism == 1:
            if self.tests_to_run is None:
                test_command += " << pipeline.parameters.tests_to_run >>"
            else:
                test_command += " " + " ".join(self.tests_to_run)
178
        else:
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
            # We need explicit list instead of `pipeline.parameters.tests_to_run` (only available at job runtime)
            tests = self.tests_to_run
            if tests is None:
                folder = os.environ["test_preparation_dir"]
                test_file = os.path.join(folder, "filtered_test_list.txt")
                if os.path.exists(test_file):
                    with open(test_file) as f:
                        tests = f.read().split(" ")

            # expand the test list
            if tests == ["tests"]:
                tests = [os.path.join("tests", x) for x in os.listdir("tests")]
            expanded_tests = []
            for test in tests:
                if test.endswith(".py"):
                    expanded_tests.append(test)
                elif test == "tests/models":
                    expanded_tests.extend([os.path.join(test, x) for x in os.listdir(test)])
                elif test == "tests/pipelines":
                    expanded_tests.extend([os.path.join(test, x) for x in os.listdir(test)])
                else:
                    expanded_tests.append(test)
            # Avoid long tests always being collected together
            random.shuffle(expanded_tests)
            tests = " ".join(expanded_tests)

            # Each executor to run ~10 tests
            n_executors = max(len(tests) // 10, 1)
            # Avoid empty test list on some executor(s) or launching too many executors
            if n_executors > self.parallelism:
                n_executors = self.parallelism
            job["parallelism"] = n_executors

            # Need to be newline separated for the command `circleci tests split` below
            command = f'echo {tests} | tr " " "\\n" >> tests.txt'
            steps.append({"run": {"name": "Get tests", "command": command}})

            command = 'TESTS=$(circleci tests split tests.txt) && echo $TESTS > splitted_tests.txt'
            steps.append({"run": {"name": "Split tests", "command": command}})

            steps.append({"store_artifacts": {"path": "~/transformers/tests.txt"}})
            steps.append({"store_artifacts": {"path": "~/transformers/splitted_tests.txt"}})

222
223
224
225
            test_command = ""
            if self.timeout:
                test_command = f"timeout {self.timeout} "
            test_command += f"python -m pytest -n {self.pytest_num_workers} " + " ".join(pytest_flags)
226
            test_command += " $(cat splitted_tests.txt)"
227
228
        if self.marker is not None:
            test_command += f" -m {self.marker}"
229
230
231
232
233
234
235
236
237
238
239
240

        if self.name == "pr_documentation_tests":
            # can't use ` | tee tee tests_output.txt` as usual
            test_command += " > tests_output.txt"
            # Save the return code, so we can check if it is timeout in the next step.
            test_command += '; touch "$?".txt'
            # Never fail the test step for the doctest job. We will check the results in the next step, and fail that
            # step instead if the actual test failures are found. This is to avoid the timeout being reported as test
            # failure.
            test_command = f"({test_command}) || true"
        else:
            test_command += " | tee tests_output.txt"
241
        steps.append({"run": {"name": "Run tests", "command": test_command}})
242
243
244
245
246
247
248
249
250
251
252

        # return code `124` means the previous (pytest run) step is timeout
        if self.name == "pr_documentation_tests":
            checkout_doctest_command = 'if [ -s reports/tests_pr_documentation_tests/failures_short.txt ]; '
            checkout_doctest_command += 'then echo "some test failed"; '
            checkout_doctest_command += 'cat reports/tests_pr_documentation_tests/failures_short.txt; '
            checkout_doctest_command += 'cat reports/tests_pr_documentation_tests/summary_short.txt; exit -1; '
            checkout_doctest_command += 'elif [ -s reports/tests_pr_documentation_tests/stats.txt ]; then echo "All tests pass!"; '
            checkout_doctest_command += 'elif [ -f 124.txt ]; then echo "doctest timeout!"; else echo "other fatal error)"; exit -1; fi;'
            steps.append({"run": {"name": "Check doctest results", "command": checkout_doctest_command}})

253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
        steps.append({"store_artifacts": {"path": "~/transformers/tests_output.txt"}})
        steps.append({"store_artifacts": {"path": "~/transformers/reports"}})
        job["steps"] = steps
        return job

    @property
    def job_name(self):
        return self.name if "examples" in self.name else f"tests_{self.name}"


# JOBS
torch_and_tf_job = CircleCIJob(
    "torch_and_tf",
    additional_env={"RUN_PT_TF_CROSS_TESTS": True},
    install_steps=[
268
        "sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng git-lfs cmake",
269
        "git lfs install",
Yih-Dar's avatar
Yih-Dar committed
270
271
272
273
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager .[sklearn,tf-cpu,torch,testing,sentencepiece,torch-speech,vision]",
        "pip install -U --upgrade-strategy eager tensorflow_probability",
        "pip install -U --upgrade-strategy eager git+https://github.com/huggingface/accelerate",
274
275
276
277
278
279
280
281
282
283
284
    ],
    marker="is_pt_tf_cross_test",
    pytest_options={"rA": None, "durations": 0},
)


torch_and_flax_job = CircleCIJob(
    "torch_and_flax",
    additional_env={"RUN_PT_FLAX_CROSS_TESTS": True},
    install_steps=[
        "sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng",
Yih-Dar's avatar
Yih-Dar committed
285
286
287
        "pip install -U --upgrade-strategy eager --upgrade pip",
        "pip install -U --upgrade-strategy eager .[sklearn,flax,torch,testing,sentencepiece,torch-speech,vision]",
        "pip install -U --upgrade-strategy eager git+https://github.com/huggingface/accelerate",
288
289
290
291
292
293
294
295
296
297
    ],
    marker="is_pt_flax_cross_test",
    pytest_options={"rA": None, "durations": 0},
)


torch_job = CircleCIJob(
    "torch",
    install_steps=[
        "sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng time",
Yih-Dar's avatar
Yih-Dar committed
298
299
300
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager .[sklearn,torch,testing,sentencepiece,torch-speech,vision,timm]",
        "pip install -U --upgrade-strategy eager git+https://github.com/huggingface/accelerate",
301
    ],
302
    parallelism=1,
303
    pytest_num_workers=8,
304
305
306
307
308
309
)


tf_job = CircleCIJob(
    "tf",
    install_steps=[
310
        "sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng cmake",
Yih-Dar's avatar
Yih-Dar committed
311
312
313
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager .[sklearn,tf-cpu,testing,sentencepiece,tf-speech,vision]",
        "pip install -U --upgrade-strategy eager tensorflow_probability",
314
    ],
315
    parallelism=1,
316
317
318
319
320
321
322
)


flax_job = CircleCIJob(
    "flax",
    install_steps=[
        "sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng",
Yih-Dar's avatar
Yih-Dar committed
323
324
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager .[flax,testing,sentencepiece,flax-speech,vision]",
325
    ],
326
    parallelism=1,
327
328
329
330
331
)


pipelines_torch_job = CircleCIJob(
    "pipelines_torch",
332
    additional_env={"RUN_PIPELINE_TESTS": True},
333
334
    install_steps=[
        "sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng",
Yih-Dar's avatar
Yih-Dar committed
335
336
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager .[sklearn,torch,testing,sentencepiece,torch-speech,vision,timm,video]",
337
    ],
338
    marker="is_pipeline_test",
339
340
341
342
343
)


pipelines_tf_job = CircleCIJob(
    "pipelines_tf",
344
    additional_env={"RUN_PIPELINE_TESTS": True},
345
    install_steps=[
Sylvain Gugger's avatar
Sylvain Gugger committed
346
        "sudo apt-get -y update && sudo apt-get install -y cmake",
Yih-Dar's avatar
Yih-Dar committed
347
348
349
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager .[sklearn,tf-cpu,testing,sentencepiece,vision]",
        "pip install -U --upgrade-strategy eager tensorflow_probability",
350
    ],
351
    marker="is_pipeline_test",
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
)


custom_tokenizers_job = CircleCIJob(
    "custom_tokenizers",
    additional_env={"RUN_CUSTOM_TOKENIZERS": True},
    install_steps=[
        "sudo apt-get -y update && sudo apt-get install -y cmake",
        {
            "name": "install jumanpp",
            "command":
                "wget https://github.com/ku-nlp/jumanpp/releases/download/v2.0.0-rc3/jumanpp-2.0.0-rc3.tar.xz\n"
                "tar xvf jumanpp-2.0.0-rc3.tar.xz\n"
                "mkdir jumanpp-2.0.0-rc3/bld\n"
                "cd jumanpp-2.0.0-rc3/bld\n"
                "sudo cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local\n"
                "sudo make install\n",
        },
Yih-Dar's avatar
Yih-Dar committed
370
371
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager .[ja,testing,sentencepiece,jieba,spacy,ftfy,rjieba]",
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
        "python -m unidic download",
    ],
    parallelism=None,
    resource_class=None,
    tests_to_run=[
        "./tests/models/bert_japanese/test_tokenization_bert_japanese.py",
        "./tests/models/openai/test_tokenization_openai.py",
        "./tests/models/clip/test_tokenization_clip.py",
    ],
)


examples_torch_job = CircleCIJob(
    "examples_torch",
    cache_name="torch_examples",
    install_steps=[
        "sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng",
Yih-Dar's avatar
Yih-Dar committed
389
390
391
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager .[sklearn,torch,sentencepiece,testing,torch-speech]",
        "pip install -U --upgrade-strategy eager -r examples/pytorch/_tests_requirements.txt",
392
393
394
395
396
397
398
399
    ],
)


examples_tensorflow_job = CircleCIJob(
    "examples_tensorflow",
    cache_name="tensorflow_examples",
    install_steps=[
400
        "sudo apt-get -y update && sudo apt-get install -y cmake",
Yih-Dar's avatar
Yih-Dar committed
401
402
403
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager .[sklearn,tensorflow,sentencepiece,testing]",
        "pip install -U --upgrade-strategy eager -r examples/tensorflow/_tests_requirements.txt",
404
405
406
407
408
409
410
411
    ],
)


examples_flax_job = CircleCIJob(
    "examples_flax",
    cache_name="flax_examples",
    install_steps=[
Yih-Dar's avatar
Yih-Dar committed
412
413
414
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager .[flax,testing,sentencepiece]",
        "pip install -U --upgrade-strategy eager -r examples/flax/_tests_requirements.txt",
415
416
417
418
419
420
    ],
)


hub_job = CircleCIJob(
    "hub",
Sylvain Gugger's avatar
Sylvain Gugger committed
421
    additional_env={"HUGGINGFACE_CO_STAGING": True},
422
423
424
425
    install_steps=[
        "sudo apt-get -y update && sudo apt-get install git-lfs",
        'git config --global user.email "ci@dummy.com"',
        'git config --global user.name "ci"',
Yih-Dar's avatar
Yih-Dar committed
426
        "pip install --upgrade --upgrade-strategy eager pip",
Sylvain Gugger's avatar
Sylvain Gugger committed
427
        "pip install -U --upgrade-strategy eager .[torch,sentencepiece,testing,vision]",
428
429
430
431
432
433
434
435
436
    ],
    marker="is_staging_test",
    pytest_num_workers=1,
)


onnx_job = CircleCIJob(
    "onnx",
    install_steps=[
437
        "sudo apt-get -y update && sudo apt-get install -y cmake",
Yih-Dar's avatar
Yih-Dar committed
438
439
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager .[torch,tf,testing,sentencepiece,onnxruntime,vision,rjieba]",
440
441
442
443
444
445
    ],
    pytest_options={"k onnx": None},
    pytest_num_workers=1,
)


446
447
exotic_models_job = CircleCIJob(
    "exotic_models",
448
449
    install_steps=[
        "sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev",
Yih-Dar's avatar
Yih-Dar committed
450
451
452
453
454
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager .[torch,testing,vision]",
        "pip install -U --upgrade-strategy eager torchvision",
        "pip install -U --upgrade-strategy eager scipy",
        "pip install -U --upgrade-strategy eager 'git+https://github.com/facebookresearch/detectron2.git'",
455
        "sudo apt install tesseract-ocr",
Yih-Dar's avatar
Yih-Dar committed
456
457
458
459
        "pip install -U --upgrade-strategy eager pytesseract",
        "pip install -U --upgrade-strategy eager natten",
        # TODO (ydshieh): Remove this line once `https://github.com/facebookresearch/detectron2/issues/5010` is resolved
        'pip install -U --upgrade-strategy eager "Pillow<10.0.0"',
460
461
462
463
    ],
    tests_to_run=[
        "tests/models/*layoutlmv*",
        "tests/models/*nat",
NielsRogge's avatar
NielsRogge committed
464
        "tests/models/deta",
465
466
467
468
469
470
    ],
    pytest_num_workers=1,
    pytest_options={"durations": 100},
)


Sylvain Gugger's avatar
Sylvain Gugger committed
471
472
473
repo_utils_job = CircleCIJob(
    "repo_utils",
    install_steps=[
Yih-Dar's avatar
Yih-Dar committed
474
475
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager .[quality,testing,torch]",
Sylvain Gugger's avatar
Sylvain Gugger committed
476
477
478
    ],
    parallelism=None,
    pytest_num_workers=1,
479
    resource_class="large",
Sylvain Gugger's avatar
Sylvain Gugger committed
480
481
482
    tests_to_run="tests/repo_utils",
)

483
484
485
486
487

# We also include a `dummy.py` file in the files to be doc-tested to prevent edge case failure. Otherwise, the pytest
# hangs forever during test collection while showing `collecting 0 items / 21 errors`. (To see this, we have to remove
# the bash output redirection.)
py_command = 'from utils.tests_fetcher import get_doctest_files; to_test = get_doctest_files() + ["dummy.py"]; to_test = " ".join(to_test); print(to_test)'
488
py_command = f"$(python3 -c '{py_command}')"
489
command = f'echo "{py_command}" > pr_documentation_tests_temp.txt'
490
491
492
493
doc_test_job = CircleCIJob(
    "pr_documentation_tests",
    additional_env={"TRANSFORMERS_VERBOSITY": "error", "DATASETS_VERBOSITY": "error", "SKIP_CUDA_DOCTEST": "1"},
    install_steps=[
494
        "sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng time ffmpeg",
Yih-Dar's avatar
Yih-Dar committed
495
496
497
498
499
        "pip install --upgrade --upgrade-strategy eager pip",
        "pip install -U --upgrade-strategy eager -e .[dev]",
        "pip install -U --upgrade-strategy eager git+https://github.com/huggingface/accelerate",
        "pip install --upgrade --upgrade-strategy eager pytest pytest-sugar",
        "pip install -U --upgrade-strategy eager natten",
500
501
502
503
504
505
        "find -name __pycache__ -delete",
        "find . -name \*.pyc -delete",
        # Add an empty file to keep the test step running correctly even no file is selected to be tested.
        "touch dummy.py",
        {
            "name": "Get files to test",
506
            "command": command,
507
508
        },
        {
509
            "name": "Show information in `Get files to test`",
510
            "command":
511
                "cat pr_documentation_tests_temp.txt"
512
513
        },
        {
514
            "name": "Get the last line in `pr_documentation_tests.txt`",
515
            "command":
516
                "tail -n1 pr_documentation_tests_temp.txt | tee pr_documentation_tests.txt"
517
518
        },
    ],
519
    tests_to_run="$(cat pr_documentation_tests.txt)",  # noqa
520
    pytest_options={"-doctest-modules": None, "doctest-glob": "*.md", "dist": "loadfile", "rvsA": None},
521
522
523
524
    command_timeout=1200,  # test cannot run longer than 1200 seconds
    pytest_num_workers=1,
)

525
526
527
528
529
530
531
532
533
REGULAR_TESTS = [
    torch_and_tf_job,
    torch_and_flax_job,
    torch_job,
    tf_job,
    flax_job,
    custom_tokenizers_job,
    hub_job,
    onnx_job,
534
    exotic_models_job,
535
536
537
538
539
540
541
542
543
544
]
EXAMPLES_TESTS = [
    examples_torch_job,
    examples_tensorflow_job,
    examples_flax_job,
]
PIPELINE_TESTS = [
    pipelines_torch_job,
    pipelines_tf_job,
]
Sylvain Gugger's avatar
Sylvain Gugger committed
545
REPO_UTIL_TESTS = [repo_utils_job]
546
547
DOC_TESTS = [doc_test_job]

548

549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
def get_main_setup_checksum():

    PATH_TO_REPO = Path(__file__).parent.parent.resolve()
    repo = Repo(PATH_TO_REPO)

    current_head = repo.head.ref
    main_head = repo.refs.main

    setup_file_path = os.path.join(PATH_TO_REPO, "setup.py")

    main_head.checkout()
    proc = subprocess.Popen(["sha256sum", f"{setup_file_path}"], stdout=subprocess.PIPE)
    checksum = proc.stdout.read().decode().split(" ")[0]
    checksum = b64encode(bytes.fromhex(checksum)).decode()

    # go back to the original branch
    current_head.checkout()

    return checksum


570
571
572
def create_circleci_config(folder=None):
    if folder is None:
        folder = os.getcwd()
573
574
    # Used in CircleCIJob.to_dict() to expand the test list (for using parallelism)
    os.environ["test_preparation_dir"] = folder
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590

    checksum = None
    # if already on `main`, don't try to use the latest commit on `main` to avoid (rare) race condition where multiple
    # commits are merged into `main`.
    if os.environ.get("CIRCLE_BRANCH", "pull") != "main":
        # Check if `setup.py` is modified.
        summary_file = os.path.join(folder, "tests_fetched_summary.txt")
        if os.path.exists(summary_file):
            with open(summary_file) as f:
                tests_fetched_summary = f.read()
                setup_file_modifiled = "### TEST TO RUN ###\n- tests\n" in tests_fetched_summary
                if not setup_file_modifiled:
                    # If not, we use `setup.py` of the `latest` commit on the `main` branch to compute the checksum for
                    # cache
                    checksum = get_main_setup_checksum()

591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
    jobs = []
    all_test_file = os.path.join(folder, "test_list.txt")
    if os.path.exists(all_test_file):
        with open(all_test_file) as f:
            all_test_list = f.read()
    else:
        all_test_list = []
    if len(all_test_list) > 0:
        jobs.extend(PIPELINE_TESTS)

    test_file = os.path.join(folder, "filtered_test_list.txt")
    if os.path.exists(test_file):
        with open(test_file) as f:
            test_list = f.read()
    else:
        test_list = []
    if len(test_list) > 0:
        jobs.extend(REGULAR_TESTS)

Yih-Dar's avatar
Yih-Dar committed
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
        extended_tests_to_run = set(test_list.split())
        # Extend the test files for cross test jobs
        for job in jobs:
            if job.job_name in ["tests_torch_and_tf", "tests_torch_and_flax"]:
                for test_path in copy.copy(extended_tests_to_run):
                    dir_path, fn = os.path.split(test_path)
                    if fn.startswith("test_modeling_tf_"):
                        fn = fn.replace("test_modeling_tf_", "test_modeling_")
                    elif fn.startswith("test_modeling_flax_"):
                        fn = fn.replace("test_modeling_flax_", "test_modeling_")
                    else:
                        if job.job_name == "test_torch_and_tf":
                            fn = fn.replace("test_modeling_", "test_modeling_tf_")
                        elif job.job_name == "test_torch_and_flax":
                            fn = fn.replace("test_modeling_", "test_modeling_flax_")
                    new_test_file = str(os.path.join(dir_path, fn))
                    if os.path.isfile(new_test_file):
                        if new_test_file not in extended_tests_to_run:
                            extended_tests_to_run.add(new_test_file)
        extended_tests_to_run = sorted(extended_tests_to_run)
        for job in jobs:
            if job.job_name in ["tests_torch_and_tf", "tests_torch_and_flax"]:
                job.tests_to_run = extended_tests_to_run
        fn = "filtered_test_list_cross_tests.txt"
        f_path = os.path.join(folder, fn)
        with open(f_path, "w") as fp:
            fp.write(" ".join(extended_tests_to_run))

638
639
    example_file = os.path.join(folder, "examples_test_list.txt")
    if os.path.exists(example_file) and os.path.getsize(example_file) > 0:
640
        with open(example_file, "r", encoding="utf-8") as f:
641
            example_tests = f.read()
642
643
644
645
646
        for job in EXAMPLES_TESTS:
            framework = job.name.replace("examples_", "").replace("torch", "pytorch")
            if example_tests == "all":
                job.tests_to_run = [f"examples/{framework}"]
            else:
647
                job.tests_to_run = [f for f in example_tests.split(" ") if f.startswith(f"examples/{framework}")]
648
649
650
            
            if len(job.tests_to_run) > 0:
                jobs.append(job)
651

652
653
654
655
656
657
658
659
660
    doctest_file = os.path.join(folder, "doctest_list.txt")
    if os.path.exists(doctest_file):
        with open(doctest_file) as f:
            doctest_list = f.read()
    else:
        doctest_list = []
    if len(doctest_list) > 0:
        jobs.extend(DOC_TESTS)

Sylvain Gugger's avatar
Sylvain Gugger committed
661
662
663
    repo_util_file = os.path.join(folder, "test_repo_utils.txt")
    if os.path.exists(repo_util_file) and os.path.getsize(repo_util_file) > 0:
        jobs.extend(REPO_UTIL_TESTS)
664

665
666
667
668
669
670
671
672
    if len(jobs) == 0:
        jobs = [EmptyJob()]
    config = {"version": "2.1"}
    config["parameters"] = {
        # Only used to accept the parameters from the trigger
        "nightly": {"type": "boolean", "default": False},
        "tests_to_run": {"type": "string", "default": test_list},
    }
673
674
675
676

    for job in jobs:
        job.checksum = checksum

677
678
679
680
    config["jobs"] = {j.job_name: j.to_dict() for j in jobs}
    config["workflows"] = {"version": 2, "run_tests": {"jobs": [j.job_name for j in jobs]}}
    with open(os.path.join(folder, "generated_config.yml"), "w") as f:
        f.write(yaml.dump(config, indent=2, width=1000000, sort_keys=False))
681
682
683
684
685
686
687
688
689
690


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--fetcher_folder", type=str, default=None, help="Only test that all tests and modules are accounted for."
    )
    args = parser.parse_args()

    create_circleci_config(args.fetcher_folder)