regenerate.py 12.9 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
#!/usr/bin/env python3

"""
This script should use a very simple, functional programming style.
Avoid Jinja macros in favor of native Python functions.

Don't go overboard on code generation; use Python only to generate
content that can't be easily declared statically using CircleCI's YAML API.

Data declarations (e.g. the nested loops for defining the configuration matrix)
should be at the top of the file for easy updating.

See this comment for design rationale:
https://github.com/pytorch/vision/pull/1321#issuecomment-531033978
"""
16

17
18
import os.path

19
import jinja2
20
import yaml
21
from jinja2 import select_autoescape
22

23

24
PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"]
25

26
27
RC_PATTERN = r"/v[0-9]+(\.[0-9]+)*-rc[0-9]+/"

28

29
def build_workflows(prefix="", filter_branch=None, upload=False, indentation=6, windows_latest_only=False):
30
31
    w = []
    for btype in ["wheel", "conda"]:
32
        for os_type in ["linux", "macos", "win"]:
33
            python_versions = PYTHON_VERSIONS
34
            cu_versions_dict = {
35
36
                "linux": ["cpu", "cu117", "cu118", "rocm5.2", "rocm5.3"],
                "win": ["cpu", "cu117", "cu118"],
37
38
                "macos": ["cpu"],
            }
39
            cu_versions = cu_versions_dict[os_type]
40
41
            for python_version in python_versions:
                for cu_version in cu_versions:
Jeff Daily's avatar
Jeff Daily committed
42
                    # ROCm conda packages not yet supported
43
                    if cu_version.startswith("rocm") and btype == "conda":
Jeff Daily's avatar
Jeff Daily committed
44
                        continue
Nikita Shulga's avatar
Nikita Shulga committed
45
                    for unicode in [False]:
46
                        fb = filter_branch
47
48
49
50
51
52
53
54
55
                        if (
                            windows_latest_only
                            and os_type == "win"
                            and filter_branch is None
                            and (
                                python_version != python_versions[-1]
                                or (cu_version not in [cu_versions[0], cu_versions[-1]])
                            )
                        ):
56
                            fb = "main"
57
                        if not fb and (
58
                            os_type == "linux" and cu_version == "cpu" and btype == "wheel" and python_version == "3.8"
59
                        ):
60
                            # the fields must match the build_docs "requires" dependency
61
                            fb = "/.*/"
62
63
64

                        # Disable all Linux Wheels Workflows from CircleCI
                        # since those will now be done through Nova. We'll keep
65
                        # around the py3.8 CPU Linux Wheels build since the docs
66
                        # job depends on it.
67
68
69
                        if os_type == "linux" and btype == "wheel":
                            if not (python_version == "3.8" and cu_version == "cpu"):
                                continue
70

71
72
73
74
                        # Disable all Macos Wheels Workflows from CircleCI.
                        if os_type == "macos" and btype == "wheel":
                            continue

75
76
77
78
                        # Disable all non-Windows Conda workflows
                        if os_type != "win" and btype == "conda":
                            continue

79
80
81
82
83
                        # Not supporting Python 3.11 conda packages at the
                        # moment since the necessary dependencies are not
                        # available. Windows 3.11 Wheels will be built from
                        # CircleCI here, however.

84
                        w += workflow_pair(
85
86
                            btype, os_type, python_version, cu_version, unicode, prefix, upload, filter_branch=fb
                        )
87

88
89
    if not filter_branch:
        # Build on every pull request, but upload only on nightly and tags
90
91
        w += build_doc_job("/.*/")
        w += upload_doc_job("nightly")
92
93
94
    return indent(indentation, w)


95
def workflow_pair(btype, os_type, python_version, cu_version, unicode, prefix="", upload=False, *, filter_branch=None):
96
97
98
99
100

    w = []
    unicode_suffix = "u" if unicode else ""
    base_workflow_name = f"{prefix}binary_{os_type}_{btype}_py{python_version}{unicode_suffix}_{cu_version}"

101
102
103
104
105
    w.append(
        generate_base_workflow(
            base_workflow_name, python_version, cu_version, unicode, os_type, btype, filter_branch=filter_branch
        )
    )
106

107
    # For the remaining py3.8 Linux Wheels job left around for the docs build,
108
109
110
111
    # we'll disable uploads.
    if os_type == "linux" and btype == "wheel":
        upload = False

112
    if upload:
113
        w.append(generate_upload_workflow(base_workflow_name, os_type, btype, cu_version, filter_branch=filter_branch))
114
115
116
117
        # disable smoke tests, they are broken and needs to be fixed
        # if filter_branch == "nightly" and os_type in ["linux", "win"]:
        #     pydistro = "pip" if btype == "wheel" else "conda"
        #     w.append(generate_smoketest_workflow(pydistro, base_workflow_name, filter_branch, python_version, os_type))
118
119
120
121

    return w


122
123
124
def build_doc_job(filter_branch):
    job = {
        "name": "build_docs",
125
        "python_version": "3.8",
126
        "requires": [
127
            "binary_linux_wheel_py3.8_cpu",
128
        ],
129
130
131
    }

    if filter_branch:
132
        job["filters"] = gen_filter_branch_tree(filter_branch, tags_list=RC_PATTERN)
133
134
135
136
137
138
139
    return [{"build_docs": job}]


def upload_doc_job(filter_branch):
    job = {
        "name": "upload_docs",
        "context": "org-member",
140
        "python_version": "3.8",
141
142
143
        "requires": [
            "build_docs",
        ],
144
145
146
    }

    if filter_branch:
147
        job["filters"] = gen_filter_branch_tree(filter_branch, tags_list=RC_PATTERN)
148
149
150
    return [{"upload_docs": job}]


151
manylinux_images = {
ptrblck's avatar
ptrblck committed
152
    "cu117": "pytorch/manylinux-cuda117",
Andrey Talman's avatar
Andrey Talman committed
153
    "cu118": "pytorch/manylinux-cuda118",
154
155
156
157
}


def get_manylinux_image(cu_version):
Jeff Daily's avatar
Jeff Daily committed
158
    if cu_version == "cpu":
159
        return "pytorch/manylinux-cpu"
160
161
    elif cu_version.startswith("cu"):
        cu_suffix = cu_version[len("cu") :]
Jeff Daily's avatar
Jeff Daily committed
162
        return f"pytorch/manylinux-cuda{cu_suffix}"
163
164
    elif cu_version.startswith("rocm"):
        rocm_suffix = cu_version[len("rocm") :]
Jeff Daily's avatar
Jeff Daily committed
165
        return f"pytorch/manylinux-rocm:{rocm_suffix}"
166
167


168
169
170
def get_conda_image(cu_version):
    if cu_version == "cpu":
        return "pytorch/conda-builder:cpu"
171
172
    elif cu_version.startswith("cu"):
        cu_suffix = cu_version[len("cu") :]
Jeff Daily's avatar
Jeff Daily committed
173
        return f"pytorch/conda-builder:cuda{cu_suffix}"
174
175


176
177
178
def generate_base_workflow(
    base_workflow_name, python_version, cu_version, unicode, os_type, btype, *, filter_branch=None
):
179
180
181
182

    d = {
        "name": base_workflow_name,
        "python_version": python_version,
183
        "cu_version": cu_version,
184
185
    }

186
    if os_type != "win" and unicode:
187
        d["unicode_abi"] = "1"
188

189
190
    if os_type != "win":
        d["wheel_docker_image"] = get_manylinux_image(cu_version)
Jeff Daily's avatar
Jeff Daily committed
191
192
193
        # ROCm conda packages not yet supported
        if "rocm" not in cu_version:
            d["conda_docker_image"] = get_conda_image(cu_version)
194

195
    if filter_branch is not None:
196
        d["filters"] = {
197
            "branches": {"only": filter_branch},
198
199
200
201
            "tags": {
                # Using a raw string here to avoid having to escape
                # anything
                "only": r"/v[0-9]+(\.[0-9]+)*-rc[0-9]+/"
202
            },
203
        }
204

205
    w = f"binary_{os_type}_{btype}"
206
    return {w: d}
207
208


209
210
211
212
213
def gen_filter_branch_tree(*branches, tags_list=None):
    filter_dict = {"branches": {"only": [b for b in branches]}}
    if tags_list is not None:
        filter_dict["tags"] = {"only": tags_list}
    return filter_dict
214
215


216
def generate_upload_workflow(base_workflow_name, os_type, btype, cu_version, *, filter_branch=None):
217
218
219
220
221
222
    d = {
        "name": f"{base_workflow_name}_upload",
        "context": "org-member",
        "requires": [base_workflow_name],
    }

223
224
    if btype == "wheel":
        d["subfolder"] = "" if os_type == "macos" else cu_version + "/"
225

226
    if filter_branch is not None:
227
        d["filters"] = {
228
            "branches": {"only": filter_branch},
229
230
231
232
            "tags": {
                # Using a raw string here to avoid having to escape
                # anything
                "only": r"/v[0-9]+(\.[0-9]+)*-rc[0-9]+/"
233
            },
234
        }
235

236
237
238
    return {f"binary_{btype}_upload": d}


guyang3532's avatar
guyang3532 committed
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
def generate_smoketest_workflow(pydistro, base_workflow_name, filter_branch, python_version, os_type):

    required_build_suffix = "_upload"
    required_build_name = base_workflow_name + required_build_suffix

    smoke_suffix = f"smoke_test_{pydistro}"
    d = {
        "name": f"{base_workflow_name}_{smoke_suffix}",
        "requires": [required_build_name],
        "python_version": python_version,
    }

    if filter_branch:
        d["filters"] = gen_filter_branch_tree(filter_branch)

Nikita Shulga's avatar
Nikita Shulga committed
254
    return {f"smoke_test_{os_type}_{pydistro}": d}
guyang3532's avatar
guyang3532 committed
255
256


257
def indent(indentation, data_list):
258
    return ("\n" + " " * indentation).join(yaml.dump(data_list, default_flow_style=False).splitlines())
259
260


261
262
def unittest_workflows(indentation=6):
    jobs = []
Francisco Massa's avatar
Francisco Massa committed
263
    for os_type in ["linux", "windows", "macos"]:
264
        for device_type in ["cpu", "gpu"]:
Francisco Massa's avatar
Francisco Massa committed
265
266
            if os_type == "macos" and device_type == "gpu":
                continue
267
268
            if os_type == "linux" and device_type == "cpu":
                continue
269
270
271
272
273
274
            for i, python_version in enumerate(PYTHON_VERSIONS):
                job = {
                    "name": f"unittest_{os_type}_{device_type}_py{python_version}",
                    "python_version": python_version,
                }

275
                if device_type == "gpu":
276
                    if python_version != "3.8":
277
                        job["filters"] = gen_filter_branch_tree("main", "nightly")
278
                    job["cu_version"] = "cu117"
279
                else:
280
                    job["cu_version"] = "cpu"
281
282
283
284
285
286

                jobs.append({f"unittest_{os_type}_{device_type}": job})

    return indent(indentation, jobs)


287
288
def cmake_workflows(indentation=6):
    jobs = []
289
290
    python_version = "3.8"
    for os_type in ["linux", "windows", "macos"]:
291
        # Skip OSX CUDA
292
        device_types = ["cpu", "gpu"] if os_type != "macos" else ["cpu"]
293
        for device in device_types:
294
            job = {"name": f"cmake_{os_type}_{device}", "python_version": python_version}
295

296
            job["cu_version"] = "cu117" if device == "gpu" else "cpu"
297
            if device == "gpu" and os_type == "linux":
Andrey Talman's avatar
Andrey Talman committed
298
                job["wheel_docker_image"] = "pytorch/manylinux-cuda116"
299
            jobs.append({f"cmake_{os_type}_{device}": job})
300
301
302
    return indent(indentation, jobs)


303
304
305
306
307
def ios_workflows(indentation=6, nightly=False):
    jobs = []
    build_job_names = []
    name_prefix = "nightly_" if nightly else ""
    env_prefix = "nightly-" if nightly else ""
308
309
    for arch, platform in [("x86_64", "SIMULATOR"), ("arm64", "OS")]:
        name = f"{name_prefix}binary_libtorchvision_ops_ios_12.0.0_{arch}"
310
311
        build_job_names.append(name)
        build_job = {
312
313
314
315
            "build_environment": f"{env_prefix}binary-libtorchvision_ops-ios-12.0.0-{arch}",
            "ios_arch": arch,
            "ios_platform": platform,
            "name": name,
316
317
        }
        if nightly:
318
319
            build_job["filters"] = gen_filter_branch_tree("nightly")
        jobs.append({"binary_ios_build": build_job})
320
321
322

    if nightly:
        upload_job = {
323
324
325
326
            "build_environment": f"{env_prefix}binary-libtorchvision_ops-ios-12.0.0-upload",
            "context": "org-member",
            "filters": gen_filter_branch_tree("nightly"),
            "requires": build_job_names,
327
        }
328
        jobs.append({"binary_ios_upload": upload_job})
329
330
331
    return indent(indentation, jobs)


332
333
334
335
336
337
def android_workflows(indentation=6, nightly=False):
    jobs = []
    build_job_names = []
    name_prefix = "nightly_" if nightly else ""
    env_prefix = "nightly-" if nightly else ""

338
    name = f"{name_prefix}binary_libtorchvision_ops_android"
339
340
    build_job_names.append(name)
    build_job = {
341
342
        "build_environment": f"{env_prefix}binary-libtorchvision_ops-android",
        "name": name,
343
344
345
346
    }

    if nightly:
        upload_job = {
347
348
349
350
            "build_environment": f"{env_prefix}binary-libtorchvision_ops-android-upload",
            "context": "org-member",
            "filters": gen_filter_branch_tree("nightly"),
            "name": f"{name_prefix}binary_libtorchvision_ops_android_upload",
351
        }
352
        jobs.append({"binary_android_upload": upload_job})
353
    else:
354
        jobs.append({"binary_android_build": build_job})
355
356
357
    return indent(indentation, jobs)


358
359
360
361
362
if __name__ == "__main__":
    d = os.path.dirname(__file__)
    env = jinja2.Environment(
        loader=jinja2.FileSystemLoader(d),
        lstrip_blocks=True,
363
        autoescape=select_autoescape(enabled_extensions=("html", "xml")),
364
        keep_trailing_newline=True,
365
366
    )

367
368
369
370
371
372
373
374
375
376
    with open(os.path.join(d, "config.yml"), "w") as f:
        f.write(
            env.get_template("config.yml.in").render(
                build_workflows=build_workflows,
                unittest_workflows=unittest_workflows,
                cmake_workflows=cmake_workflows,
                ios_workflows=ios_workflows,
                android_workflows=android_workflows,
            )
        )