"vscode:/vscode.git/clone" did not exist on "67cdf8828f9bed16fe0a0c93fccd7cb63e9f10df"
setup.py 11 KB
Newer Older
Aryan's avatar
Aryan committed
1
# Copyright 2025 The HuggingFace Team. All rights reserved.
Patrick von Platen's avatar
Patrick von Platen committed
2
3
4
5
6
7
8
9
10
11
12
13
14
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
16
17
"""
Simple check list from AllenNLP repo: https://github.com/allenai/allennlp/blob/main/setup.py

18
To create the package for PyPI.
19
20
21
22

1. Run `make pre-release` (or `make pre-patch` for a patch release) then run `make fix-copies` to fix the index of the
   documentation.

23
   If releasing on a special branch, copy the updated README.md on the main branch for the commit you will make
24
25
   for the post-release and run `make fix-copies` on the main branch as well.

26
2. Unpin specific versions from setup.py that use a git install.
27

28
3. Checkout the release branch (v<RELEASE>-release, for example v4.19-release), and commit these changes with the
Patrick von Platen's avatar
Patrick von Platen committed
29
   message: "Release: <RELEASE>" and push.
30

31
32
33
4. Manually trigger the "Nightly and release tests on main/release branch" workflow from the release branch. Wait for
   the tests to complete. We can safely ignore the known test failures.

34
5. Wait for the tests on main to be completed and be green (otherwise revert and fix bugs).
35

36
6. Add a tag in git to mark the release: "git tag v<RELEASE> -m 'Adds tag v<RELEASE> for PyPI'"
37
38
39
40
41
   Push the tag to git: git push --tags origin v<RELEASE>-release

7. Build both the sources and the wheel. Do not change anything in setup.py between
   creating the wheel and the source distribution (obviously).

42
43
   For the wheel, run: "python setup.py bdist_wheel" in the top level directory
   (This will build a wheel for the Python version you use to build it).
44
45
46
47

   For the sources, run: "python setup.py sdist"
   You should now have a /dist directory with both .whl and .tar.gz source versions.

48
49
50
   Long story cut short, you need to run both before you can upload the distribution to the
   test PyPI and the actual PyPI servers:

Sayak Paul's avatar
Sayak Paul committed
51
52
   python setup.py bdist_wheel && python setup.py sdist

53
8. Check that everything looks correct by uploading the package to the PyPI test server:
54
55

   twine upload dist/* -r pypitest
56
   (pypi suggests using twine as other methods upload files via plaintext.)
57
58
59
60
   You may have to specify the repository url, use the following command then:
   twine upload dist/* -r pypitest --repository-url=https://test.pypi.org/legacy/

   Check that you can install it in a virtualenv by running:
61
   pip install -i https://testpypi.python.org/pypi diffusers
62

Sayak Paul's avatar
Sayak Paul committed
63
64
65
66
   If you are testing from a Colab Notebook, for instance, then do:
   pip install diffusers && pip uninstall diffusers
   pip install -i https://testpypi.python.org/pypi diffusers

67
   Check you can run the following commands:
68
   python -c "from diffusers import __version__; print(__version__)"
Sayak Paul's avatar
Sayak Paul committed
69
70
   python -c "from diffusers import DiffusionPipeline; pipe = DiffusionPipeline.from_pretrained('fusing/unet-ldm-dummy-update'); pipe()"
   python -c "from diffusers import DiffusionPipeline; pipe = DiffusionPipeline.from_pretrained('hf-internal-testing/tiny-stable-diffusion-pipe', safety_checker=None); pipe('ah suh du')"
71
   python -c "from diffusers import *"
72

73
9. Upload the final version to the actual PyPI:
74
75
   twine upload dist/* -r pypi

76
10. Prepare the release notes and publish them on GitHub once everything is looking hunky-dory. You can use the following
77
78
79
    Space to fetch all the commits applicable for the release: https://huggingface.co/spaces/sayakpaul/auto-release-notes-diffusers.
    It automatically fetches the correct tag and branch but also provides the option to configure them.
    `tag` should be the previous release tag (v0.26.1, for example), and `branch` should be
80
81
    the latest release branch (v0.27.0-release, for example). It denotes all commits that have happened on branch
    v0.27.0-release after the tag v0.26.1 was created.
82
83
84
85
86

11. Run `make post-release` (or, for a patch release, `make post-patch`). If you were on a branch for the release,
    you need to go back to main before executing this.
"""

87
import os
88
import re
89
import sys
90

Sayak Paul's avatar
Sayak Paul committed
91
from setuptools import Command, find_packages, setup
92

93

94
95
96
97
# IMPORTANT:
# 1. all dependencies should be listed here with their version requirements if any
# 2. once modified, run: `make deps_table_update` to update src/diffusers/dependency_versions_table.py
_deps = [
98
    "Pillow",  # keep the PIL.Image.Resampling deprecation away
Sayak Paul's avatar
Sayak Paul committed
99
    "accelerate>=0.31.0",
100
    "compel==0.1.8",
Anton Lozhkov's avatar
Anton Lozhkov committed
101
    "datasets",
102
    "filelock",
103
    "flax>=0.4.1",
Anton Lozhkov's avatar
Anton Lozhkov committed
104
    "hf-doc-builder>=0.3.0",
105
106
    "httpx<1.0.0",
    "huggingface-hub>=0.34.0,<2.0",
107
    "requests-mock==1.10.0",
Patrick von Platen's avatar
Patrick von Platen committed
108
    "importlib_metadata",
109
    "invisible-watermark>=0.2.0",
110
    "isort>=5.5.4",
111
112
    "jax>=0.4.1",
    "jaxlib>=0.4.1",
Lucain's avatar
Lucain committed
113
    "Jinja2",
Sayak Paul's avatar
Sayak Paul committed
114
    "k-diffusion==0.0.12",
115
116
    "torchsde",
    "note_seq",
117
    "librosa",
118
    "numpy",
119
    "parameterized",
120
    "peft>=0.17.0",
121
    "protobuf>=3.20.3,<4",
122
    "pytest",
123
124
    "pytest-timeout",
    "pytest-xdist",
125
    "python>=3.8.0",
126
    "ruff==0.9.10",
127
    "safetensors>=0.3.1",
Suraj Patil's avatar
Suraj Patil committed
128
    "sentencepiece>=0.1.91,!=0.1.92",
Dhruv Nair's avatar
Dhruv Nair committed
129
    "GitPython<3.1.19",
130
    "scipy",
131
    "onnx",
132
133
134
135
    "optimum_quanto>=0.2.6",
    "gguf>=0.10.0",
    "torchao>=0.7.0",
    "bitsandbytes>=0.43.3",
136
    "nvidia_modelopt[hf]>=0.33.1",
Patrick von Platen's avatar
Patrick von Platen committed
137
    "regex!=2019.12.17",
138
    "requests",
139
    "tensorboard",
140
    "tiktoken>=0.7.0",
141
    "torch>=1.4",
142
    "torchvision",
143
    "transformers>=4.41.2",
144
    "urllib3<=2.0.0",
145
    "black",
146
    "phonemizer",
Sayak Paul's avatar
Sayak Paul committed
147
    "opencv-python",
148
    "timm",
149
150
151
152
]

# this is a lookup table with items like:
#
153
# tokenizers: "huggingface-hub==0.8.0"
154
155
156
157
158
159
160
161
162
# packaging: "packaging"
#
# some of the values are versioned whereas others aren't.
deps = {b: a for a, b in (re.findall(r"^(([^!=<>~]+)(?:[!=<>~].*)?$)", x)[0] for x in _deps)}

# since we save this data in src/diffusers/dependency_versions_table.py it can be easily accessed from
# anywhere. If you need to quickly access the data from this table in a shell, you can do so easily with:
#
# python -c 'import sys; from diffusers.dependency_versions_table import deps; \
163
# print(" ".join([deps[x] for x in sys.argv[1:]]))' tokenizers datasets
164
165
166
167
168
169
170
171
#
# Just pass the desired package names to that script as it's shown with 2 packages above.
#
# If diffusers is not yet installed and the work is done from the cloned repo remember to add `PYTHONPATH=src` to the script above
#
# You can then feed this for example to `pip`:
#
# pip install -U $(python -c 'import sys; from diffusers.dependency_versions_table import deps; \
172
# print(" ".join([deps[x] for x in sys.argv[1:]]))' tokenizers datasets)
173
174
175
176
177
178
179
180
181
#


def deps_list(*pkgs):
    return [deps[pkg] for pkg in pkgs]


class DepsTableUpdateCommand(Command):
    """
Sayak Paul's avatar
Sayak Paul committed
182
    A custom command that updates the dependency table.
183
184
185
186
187
188
    usage: python setup.py deps_table_update
    """

    description = "build runtime dependency table"
    user_options = [
        # format: (long option, short option, description).
189
190
191
192
193
        (
            "dep-table-update",
            None,
            "updates src/diffusers/dependency_versions_table.py",
        ),
194
195
196
197
198
199
200
201
202
203
204
205
206
    ]

    def initialize_options(self):
        pass

    def finalize_options(self):
        pass

    def run(self):
        entries = "\n".join([f'    "{k}": "{v}",' for k, v in deps.items()])
        content = [
            "# THIS FILE HAS BEEN AUTOGENERATED. To update:",
            "# 1. modify the `_deps` dict in setup.py",
207
            "# 2. run `make deps_table_update`",
208
209
210
211
212
213
214
215
216
217
218
            "deps = {",
            entries,
            "}",
            "",
        ]
        target = "src/diffusers/dependency_versions_table.py"
        print(f"updating {target}")
        with open(target, "w", encoding="utf-8", newline="\n") as f:
            f.write("\n".join(content))


Patrick von Platen's avatar
Patrick von Platen committed
219
extras = {}
220
extras["quality"] = deps_list("urllib3", "isort", "ruff", "hf-doc-builder")
221
extras["docs"] = deps_list("hf-doc-builder")
222
extras["training"] = deps_list("accelerate", "datasets", "protobuf", "tensorboard", "Jinja2", "peft", "timm")
223
extras["test"] = deps_list(
224
    "compel",
Dhruv Nair's avatar
Dhruv Nair committed
225
    "GitPython",
226
    "datasets",
Lucain's avatar
Lucain committed
227
    "Jinja2",
228
    "invisible-watermark",
229
    "k-diffusion",
230
    "librosa",
231
    "parameterized",
232
233
234
    "pytest",
    "pytest-timeout",
    "pytest-xdist",
235
    "requests-mock",
236
    "safetensors",
Suraj Patil's avatar
Suraj Patil committed
237
    "sentencepiece",
238
    "scipy",
239
    "tiktoken",
240
    "torchvision",
241
    "transformers",
242
    "phonemizer",
243
)
244
extras["torch"] = deps_list("torch", "accelerate")
245

246
247
248
249
extras["bitsandbytes"] = deps_list("bitsandbytes", "accelerate")
extras["gguf"] = deps_list("gguf", "accelerate")
extras["optimum_quanto"] = deps_list("optimum_quanto", "accelerate")
extras["torchao"] = deps_list("torchao", "accelerate")
250
extras["nvidia_modelopt"] = deps_list("nvidia_modelopt[hf]")
251

252
253
254
255
if os.name == "nt":  # windows
    extras["flax"] = []  # jax is not supported on windows
else:
    extras["flax"] = deps_list("jax", "jaxlib", "flax")
256

257
extras["dev"] = (
258
    extras["quality"] + extras["test"] + extras["training"] + extras["docs"] + extras["torch"] + extras["flax"]
259
)
Patrick von Platen's avatar
Patrick von Platen committed
260

261
install_requires = [
Patrick von Platen's avatar
Patrick von Platen committed
262
    deps["importlib_metadata"],
263
    deps["filelock"],
264
    deps["httpx"],
265
266
    deps["huggingface-hub"],
    deps["numpy"],
Patrick von Platen's avatar
Patrick von Platen committed
267
    deps["regex"],
268
    deps["requests"],
269
    deps["safetensors"],
270
    deps["Pillow"],
Patrick von Platen's avatar
Patrick von Platen committed
271
272
]

273
274
version_range_max = max(sys.version_info[1], 10) + 1

Patrick von Platen's avatar
Patrick von Platen committed
275
276
setup(
    name="diffusers",
Sayak Paul's avatar
Sayak Paul committed
277
    version="0.36.0.dev0",  # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots)
Sayak Paul's avatar
Sayak Paul committed
278
    description="State-of-the-art diffusion in PyTorch and JAX.",
Patrick von Platen's avatar
Patrick von Platen committed
279
280
    long_description=open("README.md", "r", encoding="utf-8").read(),
    long_description_content_type="text/markdown",
Sayak Paul's avatar
Sayak Paul committed
281
    keywords="deep learning diffusion jax pytorch stable diffusion audioldm",
282
283
    license="Apache 2.0 License",
    author="The Hugging Face team (past and future) with the help of all our contributors (https://github.com/huggingface/diffusers/graphs/contributors)",
Sayak Paul's avatar
Sayak Paul committed
284
    author_email="diffusers@huggingface.co",
Patrick von Platen's avatar
Patrick von Platen committed
285
286
287
    url="https://github.com/huggingface/diffusers",
    package_dir={"": "src"},
    packages=find_packages("src"),
288
    package_data={"diffusers": ["py.typed"]},
289
    include_package_data=True,
290
    python_requires=">=3.8.0",
291
    install_requires=list(install_requires),
Patrick von Platen's avatar
Patrick von Platen committed
292
    extras_require=extras,
293
    entry_points={"console_scripts": ["diffusers-cli=diffusers.commands.diffusers_cli:main"]},
Patrick von Platen's avatar
Patrick von Platen committed
294
295
296
297
298
299
300
301
    classifiers=[
        "Development Status :: 5 - Production/Stable",
        "Intended Audience :: Developers",
        "Intended Audience :: Education",
        "Intended Audience :: Science/Research",
        "License :: OSI Approved :: Apache Software License",
        "Operating System :: OS Independent",
        "Topic :: Scientific/Engineering :: Artificial Intelligence",
302
303
        "Programming Language :: Python :: 3",
    ]
304
    + [f"Programming Language :: Python :: 3.{i}" for i in range(8, version_range_max)],
305
    cmdclass={"deps_table_update": DepsTableUpdateCommand},
Patrick von Platen's avatar
Patrick von Platen committed
306
)