Unverified Commit 144c3a8b authored by Patrick von Platen's avatar Patrick von Platen Committed by GitHub
Browse files

[Imports] Fix many import bugs and make sure that doc builder CI test works correctly (#5176)

* [Doc builder] Ensure slow import for doc builder

* Apply suggestions from code review

* env for doc builder

* fix more

* [Diffusers] Set import to slow as env variable

* fix docs

* fix docs

* Apply suggestions from code review

* Apply suggestions from code review

* fix docs

* fix docs
parent 30a512ea
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
DIFFUSERS_SLOW_IMPORT,
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
get_objects_from_module, get_objects_from_module,
...@@ -26,7 +27,7 @@ else: ...@@ -26,7 +27,7 @@ else:
_import_structure["pipeline_text_to_video_zero"] = ["TextToVideoZeroPipeline"] _import_structure["pipeline_text_to_video_zero"] = ["TextToVideoZeroPipeline"]
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
try: try:
if not (is_transformers_available() and is_torch_available()): if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable() raise OptionalDependencyNotAvailable()
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
DIFFUSERS_SLOW_IMPORT,
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
is_torch_available, is_torch_available,
...@@ -27,7 +28,7 @@ else: ...@@ -27,7 +28,7 @@ else:
_import_structure["text_proj"] = ["UnCLIPTextProjModel"] _import_structure["text_proj"] = ["UnCLIPTextProjModel"]
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
try: try:
if not (is_transformers_available() and is_torch_available() and is_transformers_version(">=", "4.25.0")): if not (is_transformers_available() and is_torch_available() and is_transformers_version(">=", "4.25.0")):
raise OptionalDependencyNotAvailable() raise OptionalDependencyNotAvailable()
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
DIFFUSERS_SLOW_IMPORT,
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
is_torch_available, is_torch_available,
...@@ -29,7 +30,7 @@ else: ...@@ -29,7 +30,7 @@ else:
_import_structure["pipeline_unidiffuser"] = ["ImageTextPipelineOutput", "UniDiffuserPipeline"] _import_structure["pipeline_unidiffuser"] = ["ImageTextPipelineOutput", "UniDiffuserPipeline"]
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
try: try:
if not (is_transformers_available() and is_torch_available()): if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable() raise OptionalDependencyNotAvailable()
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
DIFFUSERS_SLOW_IMPORT,
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
is_torch_available, is_torch_available,
...@@ -39,7 +40,7 @@ else: ...@@ -39,7 +40,7 @@ else:
_import_structure["pipeline_versatile_diffusion_text_to_image"] = ["VersatileDiffusionTextToImagePipeline"] _import_structure["pipeline_versatile_diffusion_text_to_image"] = ["VersatileDiffusionTextToImagePipeline"]
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
try: try:
if not (is_transformers_available() and is_torch_available() and is_transformers_version(">=", "4.25.0")): if not (is_transformers_available() and is_torch_available() and is_transformers_version(">=", "4.25.0")):
raise OptionalDependencyNotAvailable() raise OptionalDependencyNotAvailable()
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
DIFFUSERS_SLOW_IMPORT,
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
is_torch_available, is_torch_available,
...@@ -30,7 +31,7 @@ else: ...@@ -30,7 +31,7 @@ else:
_import_structure["pipeline_vq_diffusion"] = ["LearnedClassifierFreeSamplingEmbeddings", "VQDiffusionPipeline"] _import_structure["pipeline_vq_diffusion"] = ["LearnedClassifierFreeSamplingEmbeddings", "VQDiffusionPipeline"]
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
try: try:
if not (is_transformers_available() and is_torch_available()): if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable() raise OptionalDependencyNotAvailable()
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
DIFFUSERS_SLOW_IMPORT,
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
get_objects_from_module, get_objects_from_module,
...@@ -28,7 +29,7 @@ else: ...@@ -28,7 +29,7 @@ else:
_import_structure["pipeline_wuerstchen_prior"] = ["DEFAULT_STAGE_C_TIMESTEPS", "WuerstchenPriorPipeline"] _import_structure["pipeline_wuerstchen_prior"] = ["DEFAULT_STAGE_C_TIMESTEPS", "WuerstchenPriorPipeline"]
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
try: try:
if not (is_transformers_available() and is_torch_available()): if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable() raise OptionalDependencyNotAvailable()
......
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ..utils import ( from ..utils import (
DIFFUSERS_SLOW_IMPORT,
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
get_objects_from_module, get_objects_from_module,
...@@ -111,7 +112,7 @@ except OptionalDependencyNotAvailable: ...@@ -111,7 +112,7 @@ except OptionalDependencyNotAvailable:
else: else:
_import_structure["scheduling_dpmsolver_sde"] = ["DPMSolverSDEScheduler"] _import_structure["scheduling_dpmsolver_sde"] = ["DPMSolverSDEScheduler"]
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
from ..utils import ( from ..utils import (
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
is_flax_available, is_flax_available,
...@@ -162,6 +163,7 @@ if TYPE_CHECKING: ...@@ -162,6 +163,7 @@ if TYPE_CHECKING:
from .scheduling_ddim_flax import FlaxDDIMScheduler from .scheduling_ddim_flax import FlaxDDIMScheduler
from .scheduling_ddpm_flax import FlaxDDPMScheduler from .scheduling_ddpm_flax import FlaxDDPMScheduler
from .scheduling_dpmsolver_multistep_flax import FlaxDPMSolverMultistepScheduler from .scheduling_dpmsolver_multistep_flax import FlaxDPMSolverMultistepScheduler
from .scheduling_euler_discrete_flax import FlaxEulerDiscreteScheduler
from .scheduling_karras_ve_flax import FlaxKarrasVeScheduler from .scheduling_karras_ve_flax import FlaxKarrasVeScheduler
from .scheduling_lms_discrete_flax import FlaxLMSDiscreteScheduler from .scheduling_lms_discrete_flax import FlaxLMSDiscreteScheduler
from .scheduling_pndm_flax import FlaxPNDMScheduler from .scheduling_pndm_flax import FlaxPNDMScheduler
......
...@@ -45,6 +45,7 @@ from .hub_utils import ( ...@@ -45,6 +45,7 @@ from .hub_utils import (
) )
from .import_utils import ( from .import_utils import (
BACKENDS_MAPPING, BACKENDS_MAPPING,
DIFFUSERS_SLOW_IMPORT,
ENV_VARS_TRUE_AND_AUTO_VALUES, ENV_VARS_TRUE_AND_AUTO_VALUES,
ENV_VARS_TRUE_VALUES, ENV_VARS_TRUE_VALUES,
USE_JAX, USE_JAX,
......
...@@ -46,6 +46,8 @@ USE_TF = os.environ.get("USE_TF", "AUTO").upper() ...@@ -46,6 +46,8 @@ USE_TF = os.environ.get("USE_TF", "AUTO").upper()
USE_TORCH = os.environ.get("USE_TORCH", "AUTO").upper() USE_TORCH = os.environ.get("USE_TORCH", "AUTO").upper()
USE_JAX = os.environ.get("USE_FLAX", "AUTO").upper() USE_JAX = os.environ.get("USE_FLAX", "AUTO").upper()
USE_SAFETENSORS = os.environ.get("USE_SAFETENSORS", "AUTO").upper() USE_SAFETENSORS = os.environ.get("USE_SAFETENSORS", "AUTO").upper()
DIFFUSERS_SLOW_IMPORT = os.environ.get("DIFFUSERS_SLOW_IMPORT", "FALSE").upper()
DIFFUSERS_SLOW_IMPORT = DIFFUSERS_SLOW_IMPORT in ENV_VARS_TRUE_VALUES
STR_OPERATION_TO_FUNC = {">": op.gt, ">=": op.ge, "==": op.eq, "!=": op.ne, "<=": op.le, "<": op.lt} STR_OPERATION_TO_FUNC = {">": op.gt, ">=": op.ge, "==": op.eq, "!=": op.ne, "<=": op.le, "<": op.lt}
......
...@@ -17,11 +17,10 @@ PEFT utilities: Utilities related to peft library ...@@ -17,11 +17,10 @@ PEFT utilities: Utilities related to peft library
from .import_utils import is_torch_available from .import_utils import is_torch_available
if is_torch_available():
import torch
def recurse_remove_peft_layers(model): def recurse_remove_peft_layers(model):
if is_torch_available():
import torch
r""" r"""
Recursively replace all instances of `LoraLayer` with corresponding new layers in `model`. Recursively replace all instances of `LoraLayer` with corresponding new layers in `model`.
""" """
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment