import_utils.py 34.2 KB
Newer Older
Aryan's avatar
Aryan committed
1
# Copyright 2025 The HuggingFace Team. All rights reserved.
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Import utilities: Utilities related to imports and our lazy inits.
"""
Mengqing Cao's avatar
Mengqing Cao committed
17

18
import importlib.util
19
import inspect
20
import operator as op
21
22
import os
import sys
23
from collections import OrderedDict, defaultdict
Charles's avatar
Charles committed
24
from functools import lru_cache as cache
Dhruv Nair's avatar
Dhruv Nair committed
25
26
from itertools import chain
from types import ModuleType
27
from typing import Any, Tuple, Union
28

Lucain's avatar
Lucain committed
29
from huggingface_hub.utils import is_jinja_available  # noqa: F401
30
from packaging.version import Version, parse
31
32
33
34
35
36
37
38
39

from . import logging


# The package importlib_metadata is in a different place, depending on the python version.
if sys.version_info < (3, 8):
    import importlib_metadata
else:
    import importlib.metadata as importlib_metadata
40
41
42
43
try:
    _package_map = importlib_metadata.packages_distributions()  # load-once to avoid expensive calls
except Exception:
    _package_map = None
44
45
46
47
48
49
50
51
52

logger = logging.get_logger(__name__)  # pylint: disable=invalid-name

ENV_VARS_TRUE_VALUES = {"1", "ON", "YES", "TRUE"}
ENV_VARS_TRUE_AND_AUTO_VALUES = ENV_VARS_TRUE_VALUES.union({"AUTO"})

USE_TF = os.environ.get("USE_TF", "AUTO").upper()
USE_TORCH = os.environ.get("USE_TORCH", "AUTO").upper()
USE_JAX = os.environ.get("USE_FLAX", "AUTO").upper()
53
USE_SAFETENSORS = os.environ.get("USE_SAFETENSORS", "AUTO").upper()
54
55
DIFFUSERS_SLOW_IMPORT = os.environ.get("DIFFUSERS_SLOW_IMPORT", "FALSE").upper()
DIFFUSERS_SLOW_IMPORT = DIFFUSERS_SLOW_IMPORT in ENV_VARS_TRUE_VALUES
56

57
58
STR_OPERATION_TO_FUNC = {">": op.gt, ">=": op.ge, "==": op.eq, "!=": op.ne, "<=": op.le, "<": op.lt}

59
60
61
_is_google_colab = "google.colab" in sys.modules or any(k.startswith("COLAB_") for k in os.environ)


62
def _is_package_available(pkg_name: str, get_dist_name: bool = False) -> Tuple[bool, str]:
63
    global _package_map
64
65
66
67
    pkg_exists = importlib.util.find_spec(pkg_name) is not None
    pkg_version = "N/A"

    if pkg_exists:
68
69
70
71
72
73
        if _package_map is None:
            _package_map = defaultdict(list)
            try:
                # Fallback for Python < 3.10
                for dist in importlib_metadata.distributions():
                    _top_level_declared = (dist.read_text("top_level.txt") or "").split()
74
75
                    # Infer top-level package names from file structure
                    _inferred_opt_names = {
76
77
                        f.parts[0] if len(f.parts) > 1 else inspect.getmodulename(f) for f in (dist.files or [])
                    } - {None}
78
                    _top_level_inferred = filter(lambda name: "." not in name, _inferred_opt_names)
79
80
81
82
                    for pkg in _top_level_declared or _top_level_inferred:
                        _package_map[pkg].append(dist.metadata["Name"])
            except Exception as _:
                pass
83
        try:
84
85
            if get_dist_name and pkg_name in _package_map and _package_map[pkg_name]:
                if len(_package_map[pkg_name]) > 1:
86
                    logger.warning(
87
                        f"Multiple distributions found for package {pkg_name}. Picked distribution: {_package_map[pkg_name][0]}"
88
                    )
89
                pkg_name = _package_map[pkg_name][0]
90
91
92
93
94
95
96
97
98
99
100
            pkg_version = importlib_metadata.version(pkg_name)
            logger.debug(f"Successfully imported {pkg_name} version {pkg_version}")
        except (ImportError, importlib_metadata.PackageNotFoundError):
            pkg_exists = False

    return pkg_exists, pkg_version


if USE_TORCH in ENV_VARS_TRUE_AND_AUTO_VALUES and USE_TF not in ENV_VARS_TRUE_VALUES:
    _torch_available, _torch_version = _is_package_available("torch")

101
else:
102
    logger.info("Disabling PyTorch because USE_TORCH is set")
103
    _torch_available = False
104
    _torch_version = "N/A"
105

106
107
_jax_version = "N/A"
_flax_version = "N/A"
108
109
110
111
112
113
114
115
116
117
118
119
if USE_JAX in ENV_VARS_TRUE_AND_AUTO_VALUES:
    _flax_available = importlib.util.find_spec("jax") is not None and importlib.util.find_spec("flax") is not None
    if _flax_available:
        try:
            _jax_version = importlib_metadata.version("jax")
            _flax_version = importlib_metadata.version("flax")
            logger.info(f"JAX version {_jax_version}, Flax version {_flax_version} available.")
        except importlib_metadata.PackageNotFoundError:
            _flax_available = False
else:
    _flax_available = False

120
if USE_SAFETENSORS in ENV_VARS_TRUE_AND_AUTO_VALUES:
121
122
    _safetensors_available, _safetensors_version = _is_package_available("safetensors")

123
else:
124
    logger.info("Disabling Safetensors because USE_SAFETENSORS is set")
125
    _safetensors_available = False
126

127
_onnxruntime_version = "N/A"
128
_onnx_available = importlib.util.find_spec("onnxruntime") is not None
SkyTNT's avatar
SkyTNT committed
129
if _onnx_available:
130
131
    candidates = (
        "onnxruntime",
132
133
134
        "onnxruntime-cann",
        "onnxruntime-directml",
        "ort_nightly_directml",
135
        "onnxruntime-gpu",
136
        "ort_nightly_gpu",
137
        "onnxruntime-migraphx",
138
        "onnxruntime-openvino",
139
        "onnxruntime-qnn",
140
141
        "onnxruntime-rocm",
        "onnxruntime-training",
142
        "onnxruntime-vitisai",
143
    )
SkyTNT's avatar
SkyTNT committed
144
    _onnxruntime_version = None
145
    # For the metadata, we have to look for both onnxruntime and onnxruntime-x
SkyTNT's avatar
SkyTNT committed
146
147
148
149
150
151
152
153
154
    for pkg in candidates:
        try:
            _onnxruntime_version = importlib_metadata.version(pkg)
            break
        except importlib_metadata.PackageNotFoundError:
            pass
    _onnx_available = _onnxruntime_version is not None
    if _onnx_available:
        logger.debug(f"Successfully imported onnxruntime version {_onnxruntime_version}")
155

156
157
158
# (sayakpaul): importlib.util.find_spec("opencv-python") returns None even when it's installed.
# _opencv_available = importlib.util.find_spec("opencv-python") is not None
try:
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
    candidates = (
        "opencv-python",
        "opencv-contrib-python",
        "opencv-python-headless",
        "opencv-contrib-python-headless",
    )
    _opencv_version = None
    for pkg in candidates:
        try:
            _opencv_version = importlib_metadata.version(pkg)
            break
        except importlib_metadata.PackageNotFoundError:
            pass
    _opencv_available = _opencv_version is not None
    if _opencv_available:
        logger.debug(f"Successfully imported cv2 version {_opencv_version}")
175
176
except importlib_metadata.PackageNotFoundError:
    _opencv_available = False
177

Patrick von Platen's avatar
Patrick von Platen committed
178
179
180
181
182
183
184
185
_bs4_available = importlib.util.find_spec("bs4") is not None
try:
    # importlib metadata under different name
    _bs4_version = importlib_metadata.version("beautifulsoup4")
    logger.debug(f"Successfully imported ftfy version {_bs4_version}")
except importlib_metadata.PackageNotFoundError:
    _bs4_available = False

186
187
188
189
190
191
192
_invisible_watermark_available = importlib.util.find_spec("imwatermark") is not None
try:
    _invisible_watermark_version = importlib_metadata.version("invisible-watermark")
    logger.debug(f"Successfully imported invisible-watermark version {_invisible_watermark_version}")
except importlib_metadata.PackageNotFoundError:
    _invisible_watermark_available = False

193
194
195
196
_torch_xla_available, _torch_xla_version = _is_package_available("torch_xla")
_torch_npu_available, _torch_npu_version = _is_package_available("torch_npu")
_transformers_available, _transformers_version = _is_package_available("transformers")
_hf_hub_available, _hf_hub_version = _is_package_available("huggingface_hub")
197
_kernels_available, _kernels_version = _is_package_available("kernels")
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
_inflect_available, _inflect_version = _is_package_available("inflect")
_unidecode_available, _unidecode_version = _is_package_available("unidecode")
_k_diffusion_available, _k_diffusion_version = _is_package_available("k_diffusion")
_note_seq_available, _note_seq_version = _is_package_available("note_seq")
_wandb_available, _wandb_version = _is_package_available("wandb")
_tensorboard_available, _tensorboard_version = _is_package_available("tensorboard")
_compel_available, _compel_version = _is_package_available("compel")
_sentencepiece_available, _sentencepiece_version = _is_package_available("sentencepiece")
_torchsde_available, _torchsde_version = _is_package_available("torchsde")
_peft_available, _peft_version = _is_package_available("peft")
_torchvision_available, _torchvision_version = _is_package_available("torchvision")
_matplotlib_available, _matplotlib_version = _is_package_available("matplotlib")
_timm_available, _timm_version = _is_package_available("timm")
_bitsandbytes_available, _bitsandbytes_version = _is_package_available("bitsandbytes")
_imageio_available, _imageio_version = _is_package_available("imageio")
_ftfy_available, _ftfy_version = _is_package_available("ftfy")
_scipy_available, _scipy_version = _is_package_available("scipy")
_librosa_available, _librosa_version = _is_package_available("librosa")
_accelerate_available, _accelerate_version = _is_package_available("accelerate")
_xformers_available, _xformers_version = _is_package_available("xformers")
_gguf_available, _gguf_version = _is_package_available("gguf")
_torchao_available, _torchao_version = _is_package_available("torchao")
_bitsandbytes_available, _bitsandbytes_version = _is_package_available("bitsandbytes")
221
_optimum_quanto_available, _optimum_quanto_version = _is_package_available("optimum", get_dist_name=True)
Aryan's avatar
Aryan committed
222
223
224
225
_pytorch_retinaface_available, _pytorch_retinaface_version = _is_package_available("pytorch_retinaface")
_better_profanity_available, _better_profanity_version = _is_package_available("better_profanity")
_nltk_available, _nltk_version = _is_package_available("nltk")
_cosmos_guardrail_available, _cosmos_guardrail_version = _is_package_available("cosmos_guardrail")
226
227
228
_sageattention_available, _sageattention_version = _is_package_available("sageattention")
_flash_attn_available, _flash_attn_version = _is_package_available("flash_attn")
_flash_attn_3_available, _flash_attn_3_version = _is_package_available("flash_attn_3")
229
_aiter_available, _aiter_version = _is_package_available("aiter")
230
_kornia_available, _kornia_version = _is_package_available("kornia")
231
_nvidia_modelopt_available, _nvidia_modelopt_version = _is_package_available("modelopt", get_dist_name=True)
232
233


234
235
236
237
def is_torch_available():
    return _torch_available


238
239
240
241
def is_torch_xla_available():
    return _torch_xla_available


Mengqing Cao's avatar
Mengqing Cao committed
242
243
244
245
def is_torch_npu_available():
    return _torch_npu_available


246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
def is_flax_available():
    return _flax_available


def is_transformers_available():
    return _transformers_available


def is_inflect_available():
    return _inflect_available


def is_unidecode_available():
    return _unidecode_available


262
263
264
265
def is_onnx_available():
    return _onnx_available


266
267
268
269
def is_opencv_available():
    return _opencv_available


270
271
272
273
def is_scipy_available():
    return _scipy_available


274
275
276
277
def is_librosa_available():
    return _librosa_available


278
279
280
281
def is_xformers_available():
    return _xformers_available


282
283
284
285
def is_accelerate_available():
    return _accelerate_available


286
287
288
289
def is_kernels_available():
    return _kernels_available


290
291
292
293
def is_k_diffusion_available():
    return _k_diffusion_available


294
295
296
297
def is_note_seq_available():
    return _note_seq_available


298
299
300
301
def is_wandb_available():
    return _wandb_available


302
303
304
305
def is_tensorboard_available():
    return _tensorboard_available


306
307
308
309
def is_compel_available():
    return _compel_available


Patrick von Platen's avatar
Patrick von Platen committed
310
311
312
313
314
315
316
317
def is_ftfy_available():
    return _ftfy_available


def is_bs4_available():
    return _bs4_available


318
319
320
321
def is_torchsde_available():
    return _torchsde_available


322
323
324
325
def is_invisible_watermark_available():
    return _invisible_watermark_available


326
327
328
329
def is_peft_available():
    return _peft_available


YiYi Xu's avatar
YiYi Xu committed
330
331
332
333
def is_torchvision_available():
    return _torchvision_available


334
335
336
337
def is_matplotlib_available():
    return _matplotlib_available


338
339
340
341
342
343
344
345
346
347
348
349
def is_safetensors_available():
    return _safetensors_available


def is_bitsandbytes_available():
    return _bitsandbytes_available


def is_google_colab():
    return _is_google_colab


350
351
352
353
def is_sentencepiece_available():
    return _sentencepiece_available


354
355
356
357
def is_imageio_available():
    return _imageio_available


358
def is_gguf_available():
359
    return _gguf_available
360
361


Aryan's avatar
Aryan committed
362
def is_torchao_available():
363
    return _torchao_available
Aryan's avatar
Aryan committed
364
365


366
def is_optimum_quanto_available():
367
368
369
    return _optimum_quanto_available


370
371
372
373
def is_nvidia_modelopt_available():
    return _nvidia_modelopt_available


374
375
def is_timm_available():
    return _timm_available
376
377


Aryan's avatar
Aryan committed
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
def is_pytorch_retinaface_available():
    return _pytorch_retinaface_available


def is_better_profanity_available():
    return _better_profanity_available


def is_nltk_available():
    return _nltk_available


def is_cosmos_guardrail_available():
    return _cosmos_guardrail_available


394
395
396
397
def is_hpu_available():
    return all(importlib.util.find_spec(lib) for lib in ("habana_frameworks", "habana_frameworks.torch"))


398
399
400
401
402
403
404
405
406
407
408
409
def is_sageattention_available():
    return _sageattention_available


def is_flash_attn_available():
    return _flash_attn_available


def is_flash_attn_3_available():
    return _flash_attn_3_available


410
411
412
413
def is_aiter_available():
    return _aiter_available


414
415
416
417
def is_kornia_available():
    return _kornia_available


418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
# docstyle-ignore
FLAX_IMPORT_ERROR = """
{0} requires the FLAX library but it was not found in your environment. Checkout the instructions on the
installation page: https://github.com/google/flax and follow the ones that match your environment.
"""

# docstyle-ignore
INFLECT_IMPORT_ERROR = """
{0} requires the inflect library but it was not found in your environment. You can install it with pip: `pip install
inflect`
"""

# docstyle-ignore
PYTORCH_IMPORT_ERROR = """
{0} requires the PyTorch library but it was not found in your environment. Checkout the instructions on the
installation page: https://pytorch.org/get-started/locally/ and follow the ones that match your environment.
"""

436
437
438
439
440
441
# docstyle-ignore
ONNX_IMPORT_ERROR = """
{0} requires the onnxruntime library but it was not found in your environment. You can install it with pip: `pip
install onnxruntime`
"""

442
443
444
445
446
447
# docstyle-ignore
OPENCV_IMPORT_ERROR = """
{0} requires the OpenCV library but it was not found in your environment. You can install it with pip: `pip
install opencv-python`
"""

448
449
450
451
452
453
# docstyle-ignore
SCIPY_IMPORT_ERROR = """
{0} requires the scipy library but it was not found in your environment. You can install it with pip: `pip install
scipy`
"""

454
455
456
457
458
459
# docstyle-ignore
LIBROSA_IMPORT_ERROR = """
{0} requires the librosa library but it was not found in your environment.  Checkout the instructions on the
installation page: https://librosa.org/doc/latest/install.html and follow the ones that match your environment.
"""

460
461
462
463
464
465
466
467
468
469
470
471
# docstyle-ignore
TRANSFORMERS_IMPORT_ERROR = """
{0} requires the transformers library but it was not found in your environment. You can install it with pip: `pip
install transformers`
"""

# docstyle-ignore
UNIDECODE_IMPORT_ERROR = """
{0} requires the unidecode library but it was not found in your environment. You can install it with pip: `pip install
Unidecode`
"""

472
473
474
475
476
477
# docstyle-ignore
K_DIFFUSION_IMPORT_ERROR = """
{0} requires the k-diffusion library but it was not found in your environment. You can install it with pip: `pip
install k-diffusion`
"""

478
479
480
481
482
483
# docstyle-ignore
NOTE_SEQ_IMPORT_ERROR = """
{0} requires the note-seq library but it was not found in your environment. You can install it with pip: `pip
install note-seq`
"""

484
485
486
487
488
489
# docstyle-ignore
WANDB_IMPORT_ERROR = """
{0} requires the wandb library but it was not found in your environment. You can install it with pip: `pip
install wandb`
"""

490
491
492
493
494
495
# docstyle-ignore
TENSORBOARD_IMPORT_ERROR = """
{0} requires the tensorboard library but it was not found in your environment. You can install it with pip: `pip
install tensorboard`
"""

496
497
498
499
500
501

# docstyle-ignore
COMPEL_IMPORT_ERROR = """
{0} requires the compel library but it was not found in your environment. You can install it with pip: `pip install compel`
"""

Patrick von Platen's avatar
Patrick von Platen committed
502
503
504
505
506
507
508
509
510
511
512
513
514
# docstyle-ignore
BS4_IMPORT_ERROR = """
{0} requires the Beautiful Soup library but it was not found in your environment. You can install it with pip:
`pip install beautifulsoup4`. Please note that you may need to restart your runtime after installation.
"""

# docstyle-ignore
FTFY_IMPORT_ERROR = """
{0} requires the ftfy library but it was not found in your environment. Checkout the instructions on the
installation section: https://github.com/rspeer/python-ftfy/tree/master#installing and follow the ones
that match your environment. Please note that you may need to restart your runtime after installation.
"""

515
516
517
518
519
# docstyle-ignore
TORCHSDE_IMPORT_ERROR = """
{0} requires the torchsde library but it was not found in your environment. You can install it with pip: `pip install torchsde`
"""

520
521
# docstyle-ignore
INVISIBLE_WATERMARK_IMPORT_ERROR = """
522
{0} requires the invisible-watermark library but it was not found in your environment. You can install it with pip: `pip install invisible-watermark>=0.2.0`
523
524
"""

525
526
527
528
529
530
531
532
533
534
# docstyle-ignore
PEFT_IMPORT_ERROR = """
{0} requires the peft library but it was not found in your environment. You can install it with pip: `pip install peft`
"""

# docstyle-ignore
SAFETENSORS_IMPORT_ERROR = """
{0} requires the safetensors library but it was not found in your environment. You can install it with pip: `pip install safetensors`
"""

535
536
537
538
539
540
# docstyle-ignore
SENTENCEPIECE_IMPORT_ERROR = """
{0} requires the sentencepiece library but it was not found in your environment. You can install it with pip: `pip install sentencepiece`
"""


541
542
543
544
# docstyle-ignore
BITSANDBYTES_IMPORT_ERROR = """
{0} requires the bitsandbytes library but it was not found in your environment. You can install it with pip: `pip install bitsandbytes`
"""
Patrick von Platen's avatar
Patrick von Platen committed
545

546
547
548
549
550
# docstyle-ignore
IMAGEIO_IMPORT_ERROR = """
{0} requires the imageio library and ffmpeg but it was not found in your environment. You can install it with pip: `pip install imageio imageio-ffmpeg`
"""

Aryan's avatar
Aryan committed
551
# docstyle-ignore
552
553
554
555
GGUF_IMPORT_ERROR = """
{0} requires the gguf library but it was not found in your environment. You can install it with pip: `pip install gguf`
"""

Aryan's avatar
Aryan committed
556
TORCHAO_IMPORT_ERROR = """
557
558
{0} requires the torchao library but it was not found in your environment. You can install it with pip: `pip install
torchao`
Aryan's avatar
Aryan committed
559
560
"""

561
562
563
564
565
QUANTO_IMPORT_ERROR = """
{0} requires the optimum-quanto library but it was not found in your environment. You can install it with pip: `pip
install optimum-quanto`
"""

Aryan's avatar
Aryan committed
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
# docstyle-ignore
PYTORCH_RETINAFACE_IMPORT_ERROR = """
{0} requires the pytorch_retinaface library but it was not found in your environment. You can install it with pip: `pip install pytorch_retinaface`
"""

# docstyle-ignore
BETTER_PROFANITY_IMPORT_ERROR = """
{0} requires the better_profanity library but it was not found in your environment. You can install it with pip: `pip install better_profanity`
"""

# docstyle-ignore
NLTK_IMPORT_ERROR = """
{0} requires the nltk library but it was not found in your environment. You can install it with pip: `pip install nltk`
"""


582
583
BACKENDS_MAPPING = OrderedDict(
    [
Patrick von Platen's avatar
Patrick von Platen committed
584
        ("bs4", (is_bs4_available, BS4_IMPORT_ERROR)),
585
586
        ("flax", (is_flax_available, FLAX_IMPORT_ERROR)),
        ("inflect", (is_inflect_available, INFLECT_IMPORT_ERROR)),
587
        ("onnx", (is_onnx_available, ONNX_IMPORT_ERROR)),
588
        ("opencv", (is_opencv_available, OPENCV_IMPORT_ERROR)),
589
590
591
592
        ("scipy", (is_scipy_available, SCIPY_IMPORT_ERROR)),
        ("torch", (is_torch_available, PYTORCH_IMPORT_ERROR)),
        ("transformers", (is_transformers_available, TRANSFORMERS_IMPORT_ERROR)),
        ("unidecode", (is_unidecode_available, UNIDECODE_IMPORT_ERROR)),
593
        ("librosa", (is_librosa_available, LIBROSA_IMPORT_ERROR)),
594
        ("k_diffusion", (is_k_diffusion_available, K_DIFFUSION_IMPORT_ERROR)),
595
        ("note_seq", (is_note_seq_available, NOTE_SEQ_IMPORT_ERROR)),
596
        ("wandb", (is_wandb_available, WANDB_IMPORT_ERROR)),
597
598
        ("tensorboard", (is_tensorboard_available, TENSORBOARD_IMPORT_ERROR)),
        ("compel", (is_compel_available, COMPEL_IMPORT_ERROR)),
Patrick von Platen's avatar
Patrick von Platen committed
599
        ("ftfy", (is_ftfy_available, FTFY_IMPORT_ERROR)),
600
601
        ("torchsde", (is_torchsde_available, TORCHSDE_IMPORT_ERROR)),
        ("invisible_watermark", (is_invisible_watermark_available, INVISIBLE_WATERMARK_IMPORT_ERROR)),
602
603
604
        ("peft", (is_peft_available, PEFT_IMPORT_ERROR)),
        ("safetensors", (is_safetensors_available, SAFETENSORS_IMPORT_ERROR)),
        ("bitsandbytes", (is_bitsandbytes_available, BITSANDBYTES_IMPORT_ERROR)),
605
        ("sentencepiece", (is_sentencepiece_available, SENTENCEPIECE_IMPORT_ERROR)),
606
        ("imageio", (is_imageio_available, IMAGEIO_IMPORT_ERROR)),
607
        ("gguf", (is_gguf_available, GGUF_IMPORT_ERROR)),
Aryan's avatar
Aryan committed
608
        ("torchao", (is_torchao_available, TORCHAO_IMPORT_ERROR)),
609
        ("quanto", (is_optimum_quanto_available, QUANTO_IMPORT_ERROR)),
Aryan's avatar
Aryan committed
610
611
612
        ("pytorch_retinaface", (is_pytorch_retinaface_available, PYTORCH_RETINAFACE_IMPORT_ERROR)),
        ("better_profanity", (is_better_profanity_available, BETTER_PROFANITY_IMPORT_ERROR)),
        ("nltk", (is_nltk_available, NLTK_IMPORT_ERROR)),
613
614
615
616
617
618
619
620
621
622
623
624
625
626
    ]
)


def requires_backends(obj, backends):
    if not isinstance(backends, (list, tuple)):
        backends = [backends]

    name = obj.__name__ if hasattr(obj, "__name__") else obj.__class__.__name__
    checks = (BACKENDS_MAPPING[backend] for backend in backends)
    failed = [msg.format(name) for available, msg in checks if not available()]
    if failed:
        raise ImportError("".join(failed))

627
628
629
630
631
    if name in [
        "VersatileDiffusionTextToImagePipeline",
        "VersatileDiffusionPipeline",
        "VersatileDiffusionDualGuidedPipeline",
        "StableDiffusionImageVariationPipeline",
632
        "UnCLIPPipeline",
633
634
635
636
637
638
    ] and is_transformers_version("<", "4.25.0"):
        raise ImportError(
            f"You need to install `transformers>=4.25` in order to use {name}: \n```\n pip install"
            " --upgrade transformers \n```"
        )

639
640
641
    if name in ["StableDiffusionDepth2ImgPipeline", "StableDiffusionPix2PixZeroPipeline"] and is_transformers_version(
        "<", "4.26.0"
    ):
642
        raise ImportError(
643
644
            f"You need to install `transformers>=4.26` in order to use {name}: \n```\n pip install"
            " --upgrade transformers \n```"
645
646
        )

647
648
649
650
651
652
653
654

class DummyObject(type):
    """
    Metaclass for the dummy objects. Any class inheriting from it will return the ImportError generated by
    `requires_backend` each time a user tries to access any method of that class.
    """

    def __getattr__(cls, key):
655
        if key.startswith("_") and key not in ["_load_connected_pipes", "_is_onnx"]:
656
657
            return super().__getattr__(cls, key)
        requires_backends(cls, cls._backends)
658
659
660
661
662
663


# This function was copied from: https://github.com/huggingface/accelerate/blob/874c4967d94badd24f893064cc3bef45f57cadf7/src/accelerate/utils/versions.py#L319
def compare_versions(library_or_version: Union[str, Version], operation: str, requirement_version: str):
    """
    Compares a library version to some requirement using a given operation.
664
665

    Args:
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
        library_or_version (`str` or `packaging.version.Version`):
            A library name or a version to check.
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`.
        requirement_version (`str`):
            The version to compare the library version against
    """
    if operation not in STR_OPERATION_TO_FUNC.keys():
        raise ValueError(f"`operation` must be one of {list(STR_OPERATION_TO_FUNC.keys())}, received {operation}")
    operation = STR_OPERATION_TO_FUNC[operation]
    if isinstance(library_or_version, str):
        library_or_version = parse(importlib_metadata.version(library_or_version))
    return operation(library_or_version, parse(requirement_version))


# This function was copied from: https://github.com/huggingface/accelerate/blob/874c4967d94badd24f893064cc3bef45f57cadf7/src/accelerate/utils/versions.py#L338
Charles's avatar
Charles committed
682
@cache
683
684
685
def is_torch_version(operation: str, version: str):
    """
    Compares the current PyTorch version to a given reference with an operation.
686
687

    Args:
688
689
690
691
692
693
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A string version of PyTorch
    """
    return compare_versions(parse(_torch_version), operation, version)
694
695


Charles's avatar
Charles committed
696
@cache
Juan Acevedo's avatar
Juan Acevedo committed
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
def is_torch_xla_version(operation: str, version: str):
    """
    Compares the current torch_xla version to a given reference with an operation.

    Args:
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A string version of torch_xla
    """
    if not is_torch_xla_available:
        return False
    return compare_versions(parse(_torch_xla_version), operation, version)


Charles's avatar
Charles committed
712
@cache
713
714
715
def is_transformers_version(operation: str, version: str):
    """
    Compares the current Transformers version to a given reference with an operation.
716
717

    Args:
718
719
720
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
721
            A version string
722
723
724
725
    """
    if not _transformers_available:
        return False
    return compare_versions(parse(_transformers_version), operation, version)
726
727


Charles's avatar
Charles committed
728
@cache
Marc Sun's avatar
Marc Sun committed
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
def is_hf_hub_version(operation: str, version: str):
    """
    Compares the current Hugging Face Hub version to a given reference with an operation.

    Args:
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A version string
    """
    if not _hf_hub_available:
        return False
    return compare_versions(parse(_hf_hub_version), operation, version)


Charles's avatar
Charles committed
744
@cache
745
746
747
def is_accelerate_version(operation: str, version: str):
    """
    Compares the current Accelerate version to a given reference with an operation.
748
749

    Args:
750
751
752
753
754
755
756
757
758
759
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A version string
    """
    if not _accelerate_available:
        return False
    return compare_versions(parse(_accelerate_version), operation, version)


Charles's avatar
Charles committed
760
@cache
761
762
763
def is_peft_version(operation: str, version: str):
    """
    Compares the current PEFT version to a given reference with an operation.
764
765

    Args:
766
767
768
769
770
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A version string
    """
771
    if not _peft_available:
772
773
774
775
        return False
    return compare_versions(parse(_peft_version), operation, version)


Charles's avatar
Charles committed
776
@cache
777
778
779
780
781
782
783
784
785
def is_bitsandbytes_version(operation: str, version: str):
    """
    Args:
    Compares the current bitsandbytes version to a given reference with an operation.
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A version string
    """
786
    if not _bitsandbytes_available:
787
788
789
790
        return False
    return compare_versions(parse(_bitsandbytes_version), operation, version)


Charles's avatar
Charles committed
791
@cache
792
793
794
795
796
797
798
799
800
801
def is_gguf_version(operation: str, version: str):
    """
    Compares the current Accelerate version to a given reference with an operation.

    Args:
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A version string
    """
802
    if not _gguf_available:
803
804
805
806
        return False
    return compare_versions(parse(_gguf_version), operation, version)


Charles's avatar
Charles committed
807
@cache
808
809
810
811
812
813
814
815
816
817
def is_torchao_version(operation: str, version: str):
    """
    Compares the current torchao version to a given reference with an operation.

    Args:
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A version string
    """
818
    if not _torchao_available:
819
820
821
822
        return False
    return compare_versions(parse(_torchao_version), operation, version)


Charles's avatar
Charles committed
823
@cache
824
825
826
def is_k_diffusion_version(operation: str, version: str):
    """
    Compares the current k-diffusion version to a given reference with an operation.
827
828

    Args:
829
830
831
832
833
834
835
836
837
838
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A version string
    """
    if not _k_diffusion_available:
        return False
    return compare_versions(parse(_k_diffusion_version), operation, version)


Charles's avatar
Charles committed
839
@cache
840
841
842
843
844
845
846
847
848
849
def is_optimum_quanto_version(operation: str, version: str):
    """
    Compares the current Accelerate version to a given reference with an operation.

    Args:
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A version string
    """
850
    if not _optimum_quanto_available:
851
852
853
854
        return False
    return compare_versions(parse(_optimum_quanto_version), operation, version)


Charles's avatar
Charles committed
855
@cache
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
def is_nvidia_modelopt_version(operation: str, version: str):
    """
    Compares the current Nvidia ModelOpt version to a given reference with an operation.

    Args:
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A version string
    """
    if not _nvidia_modelopt_available:
        return False
    return compare_versions(parse(_nvidia_modelopt_version), operation, version)


Charles's avatar
Charles committed
871
@cache
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
def is_xformers_version(operation: str, version: str):
    """
    Compares the current xformers version to a given reference with an operation.

    Args:
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A version string
    """
    if not _xformers_available:
        return False
    return compare_versions(parse(_xformers_version), operation, version)


Charles's avatar
Charles committed
887
@cache
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
def is_sageattention_version(operation: str, version: str):
    """
    Compares the current sageattention version to a given reference with an operation.

    Args:
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A version string
    """
    if not _sageattention_available:
        return False
    return compare_versions(parse(_sageattention_version), operation, version)


Charles's avatar
Charles committed
903
@cache
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
def is_flash_attn_version(operation: str, version: str):
    """
    Compares the current flash-attention version to a given reference with an operation.

    Args:
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A version string
    """
    if not _flash_attn_available:
        return False
    return compare_versions(parse(_flash_attn_version), operation, version)


919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
@cache
def is_aiter_version(operation: str, version: str):
    """
    Compares the current aiter version to a given reference with an operation.

    Args:
        operation (`str`):
            A string representation of an operator, such as `">"` or `"<="`
        version (`str`):
            A version string
    """
    if not _aiter_available:
        return False
    return compare_versions(parse(_aiter_version), operation, version)


Dhruv Nair's avatar
Dhruv Nair committed
935
936
937
def get_objects_from_module(module):
    """
    Returns a dict of object names and values in a module, while skipping private/internal objects
938
939

    Args:
Dhruv Nair's avatar
Dhruv Nair committed
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
        module (ModuleType):
            Module to extract the objects from.

    Returns:
        dict: Dictionary of object names and corresponding values
    """

    objects = {}
    for name in dir(module):
        if name.startswith("_"):
            continue
        objects[name] = getattr(module, name)

    return objects


956
class OptionalDependencyNotAvailable(BaseException):
957
958
959
    """
    An error indicating that an optional dependency of Diffusers was not found in the environment.
    """
Dhruv Nair's avatar
Dhruv Nair committed
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019


class _LazyModule(ModuleType):
    """
    Module class that surfaces all objects but only performs associated imports when the objects are requested.
    """

    # Very heavily inspired by optuna.integration._IntegrationModule
    # https://github.com/optuna/optuna/blob/master/optuna/integration/__init__.py
    def __init__(self, name, module_file, import_structure, module_spec=None, extra_objects=None):
        super().__init__(name)
        self._modules = set(import_structure.keys())
        self._class_to_module = {}
        for key, values in import_structure.items():
            for value in values:
                self._class_to_module[value] = key
        # Needed for autocompletion in an IDE
        self.__all__ = list(import_structure.keys()) + list(chain(*import_structure.values()))
        self.__file__ = module_file
        self.__spec__ = module_spec
        self.__path__ = [os.path.dirname(module_file)]
        self._objects = {} if extra_objects is None else extra_objects
        self._name = name
        self._import_structure = import_structure

    # Needed for autocompletion in an IDE
    def __dir__(self):
        result = super().__dir__()
        # The elements of self.__all__ that are submodules may or may not be in the dir already, depending on whether
        # they have been accessed or not. So we only add the elements of self.__all__ that are not already in the dir.
        for attr in self.__all__:
            if attr not in result:
                result.append(attr)
        return result

    def __getattr__(self, name: str) -> Any:
        if name in self._objects:
            return self._objects[name]
        if name in self._modules:
            value = self._get_module(name)
        elif name in self._class_to_module.keys():
            module = self._get_module(self._class_to_module[name])
            value = getattr(module, name)
        else:
            raise AttributeError(f"module {self.__name__} has no attribute {name}")

        setattr(self, name, value)
        return value

    def _get_module(self, module_name: str):
        try:
            return importlib.import_module("." + module_name, self.__name__)
        except Exception as e:
            raise RuntimeError(
                f"Failed to import {self.__name__}.{module_name} because of the following error (look up to see its"
                f" traceback):\n{e}"
            ) from e

    def __reduce__(self):
        return (self.__class__, (self._name, self.__file__, self._import_structure))