Unverified Commit 144c3a8b authored by Patrick von Platen's avatar Patrick von Platen Committed by GitHub
Browse files

[Imports] Fix many import bugs and make sure that doc builder CI test works correctly (#5176)

* [Doc builder] Ensure slow import for doc builder

* Apply suggestions from code review

* env for doc builder

* fix more

* [Diffusers] Set import to slow as env variable

* fix docs

* fix docs

* Apply suggestions from code review

* Apply suggestions from code review

* fix docs

* fix docs
parent 30a512ea
...@@ -67,30 +67,30 @@ By default, `tqdm` progress bars are displayed during model download. [`logging. ...@@ -67,30 +67,30 @@ By default, `tqdm` progress bars are displayed during model download. [`logging.
## Base setters ## Base setters
[[autodoc]] logging.set_verbosity_error [[autodoc]] utils.logging.set_verbosity_error
[[autodoc]] logging.set_verbosity_warning [[autodoc]] utils.logging.set_verbosity_warning
[[autodoc]] logging.set_verbosity_info [[autodoc]] utils.logging.set_verbosity_info
[[autodoc]] logging.set_verbosity_debug [[autodoc]] utils.logging.set_verbosity_debug
## Other functions ## Other functions
[[autodoc]] logging.get_verbosity [[autodoc]] utils.logging.get_verbosity
[[autodoc]] logging.set_verbosity [[autodoc]] utils.logging.set_verbosity
[[autodoc]] logging.get_logger [[autodoc]] utils.logging.get_logger
[[autodoc]] logging.enable_default_handler [[autodoc]] utils.logging.enable_default_handler
[[autodoc]] logging.disable_default_handler [[autodoc]] utils.logging.disable_default_handler
[[autodoc]] logging.enable_explicit_format [[autodoc]] utils.logging.enable_explicit_format
[[autodoc]] logging.reset_format [[autodoc]] utils.logging.reset_format
[[autodoc]] logging.enable_progress_bar [[autodoc]] utils.logging.enable_progress_bar
[[autodoc]] logging.disable_progress_bar [[autodoc]] utils.logging.disable_progress_bar
...@@ -34,13 +34,7 @@ Make sure to check out the Schedulers [guide](/using-diffusers/schedulers) to le ...@@ -34,13 +34,7 @@ Make sure to check out the Schedulers [guide](/using-diffusers/schedulers) to le
- load_lora_weights - load_lora_weights
- save_lora_weights - save_lora_weights
## StableDiffusionPipelineOutput
[[autodoc]] pipelines.stable_diffusion.StableDiffusionPipelineOutput
## StableDiffusionXLInstructPix2PixPipeline ## StableDiffusionXLInstructPix2PixPipeline
[[autodoc]] StableDiffusionXLInstructPix2PixPipeline [[autodoc]] StableDiffusionXLInstructPix2PixPipeline
- __call__ - __call__
- all - all
## StableDiffusionXLPipelineOutput
[[autodoc]] pipelines.stable_diffusion_xl.StableDiffusionXLPipelineOutput
\ No newline at end of file
...@@ -31,5 +31,5 @@ Make sure to check out the Schedulers [guide](/using-diffusers/schedulers) to le ...@@ -31,5 +31,5 @@ Make sure to check out the Schedulers [guide](/using-diffusers/schedulers) to le
- __call__ - __call__
## StableDiffusionSafePipelineOutput ## StableDiffusionSafePipelineOutput
[[autodoc]] pipelines.semantic_stable_diffusion.SemanticStableDiffusionPipelineOutput [[autodoc]] pipelines.semantic_stable_diffusion.pipeline_output.SemanticStableDiffusionPipelineOutput
- all - all
\ No newline at end of file
...@@ -3,6 +3,7 @@ __version__ = "0.22.0.dev0" ...@@ -3,6 +3,7 @@ __version__ = "0.22.0.dev0"
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from .utils import ( from .utils import (
DIFFUSERS_SLOW_IMPORT,
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
is_flax_available, is_flax_available,
...@@ -414,7 +415,7 @@ except OptionalDependencyNotAvailable: ...@@ -414,7 +415,7 @@ except OptionalDependencyNotAvailable:
else: else:
_import_structure["pipelines"].extend(["MidiProcessor"]) _import_structure["pipelines"].extend(["MidiProcessor"])
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
from .configuration_utils import ConfigMixin from .configuration_utils import ConfigMixin
try: try:
......
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ..utils import _LazyModule, is_flax_available, is_torch_available from ..utils import DIFFUSERS_SLOW_IMPORT, _LazyModule, is_flax_available, is_torch_available
_import_structure = {} _import_structure = {}
...@@ -43,7 +43,7 @@ if is_flax_available(): ...@@ -43,7 +43,7 @@ if is_flax_available():
_import_structure["vae_flax"] = ["FlaxAutoencoderKL"] _import_structure["vae_flax"] = ["FlaxAutoencoderKL"]
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
if is_torch_available(): if is_torch_available():
from .adapter import MultiAdapter, T2IAdapter from .adapter import MultiAdapter, T2IAdapter
from .autoencoder_asym_kl import AsymmetricAutoencoderKL from .autoencoder_asym_kl import AsymmetricAutoencoderKL
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ..utils import ( from ..utils import (
DIFFUSERS_SLOW_IMPORT,
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
get_objects_from_module, get_objects_from_module,
...@@ -252,7 +253,7 @@ except OptionalDependencyNotAvailable: ...@@ -252,7 +253,7 @@ except OptionalDependencyNotAvailable:
else: else:
_import_structure["spectrogram_diffusion"] = ["MidiProcessor", "SpectrogramDiffusionPipeline"] _import_structure["spectrogram_diffusion"] = ["MidiProcessor", "SpectrogramDiffusionPipeline"]
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
try: try:
if not is_torch_available(): if not is_torch_available():
raise OptionalDependencyNotAvailable() raise OptionalDependencyNotAvailable()
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
DIFFUSERS_SLOW_IMPORT,
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
get_objects_from_module, get_objects_from_module,
...@@ -26,7 +27,7 @@ else: ...@@ -26,7 +27,7 @@ else:
_import_structure["pipeline_output"] = ["AltDiffusionPipelineOutput"] _import_structure["pipeline_output"] = ["AltDiffusionPipelineOutput"]
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
try: try:
if not (is_transformers_available() and is_torch_available()): if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable() raise OptionalDependencyNotAvailable()
......
...@@ -29,7 +29,8 @@ from ...utils import deprecate, logging, replace_example_docstring ...@@ -29,7 +29,8 @@ from ...utils import deprecate, logging, replace_example_docstring
from ...utils.torch_utils import randn_tensor from ...utils.torch_utils import randn_tensor
from ..pipeline_utils import DiffusionPipeline from ..pipeline_utils import DiffusionPipeline
from ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker from ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker
from . import AltDiffusionPipelineOutput, RobertaSeriesModelWithTransformation from .modeling_roberta_series import RobertaSeriesModelWithTransformation
from .pipeline_output import AltDiffusionPipelineOutput
logger = logging.get_logger(__name__) # pylint: disable=invalid-name logger = logging.get_logger(__name__) # pylint: disable=invalid-name
......
...@@ -31,7 +31,8 @@ from ...utils import PIL_INTERPOLATION, deprecate, logging, replace_example_docs ...@@ -31,7 +31,8 @@ from ...utils import PIL_INTERPOLATION, deprecate, logging, replace_example_docs
from ...utils.torch_utils import randn_tensor from ...utils.torch_utils import randn_tensor
from ..pipeline_utils import DiffusionPipeline from ..pipeline_utils import DiffusionPipeline
from ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker from ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker
from . import AltDiffusionPipelineOutput, RobertaSeriesModelWithTransformation from .modeling_roberta_series import RobertaSeriesModelWithTransformation
from .pipeline_output import AltDiffusionPipelineOutput
logger = logging.get_logger(__name__) # pylint: disable=invalid-name logger = logging.get_logger(__name__) # pylint: disable=invalid-name
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule from ...utils import DIFFUSERS_SLOW_IMPORT, _LazyModule
_import_structure = { _import_structure = {
...@@ -8,7 +8,7 @@ _import_structure = { ...@@ -8,7 +8,7 @@ _import_structure = {
"pipeline_audio_diffusion": ["AudioDiffusionPipeline"], "pipeline_audio_diffusion": ["AudioDiffusionPipeline"],
} }
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
from .mel import Mel from .mel import Mel
from .pipeline_audio_diffusion import AudioDiffusionPipeline from .pipeline_audio_diffusion import AudioDiffusionPipeline
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
DIFFUSERS_SLOW_IMPORT,
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
is_torch_available, is_torch_available,
...@@ -25,7 +26,7 @@ else: ...@@ -25,7 +26,7 @@ else:
_import_structure["pipeline_audioldm"] = ["AudioLDMPipeline"] _import_structure["pipeline_audioldm"] = ["AudioLDMPipeline"]
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
try: try:
if not (is_transformers_available() and is_torch_available() and is_transformers_version(">=", "4.27.0")): if not (is_transformers_available() and is_torch_available() and is_transformers_version(">=", "4.27.0")):
raise OptionalDependencyNotAvailable() raise OptionalDependencyNotAvailable()
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
DIFFUSERS_SLOW_IMPORT,
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
get_objects_from_module, get_objects_from_module,
...@@ -25,7 +26,7 @@ else: ...@@ -25,7 +26,7 @@ else:
_import_structure["pipeline_audioldm2"] = ["AudioLDM2Pipeline"] _import_structure["pipeline_audioldm2"] = ["AudioLDM2Pipeline"]
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
try: try:
if not (is_transformers_available() and is_torch_available() and is_transformers_version(">=", "4.27.0")): if not (is_transformers_available() and is_torch_available() and is_transformers_version(">=", "4.27.0")):
raise OptionalDependencyNotAvailable() raise OptionalDependencyNotAvailable()
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
DIFFUSERS_SLOW_IMPORT,
_LazyModule, _LazyModule,
) )
_import_structure = {"pipeline_consistency_models": ["ConsistencyModelPipeline"]} _import_structure = {"pipeline_consistency_models": ["ConsistencyModelPipeline"]}
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
from .pipeline_consistency_models import ConsistencyModelPipeline from .pipeline_consistency_models import ConsistencyModelPipeline
else: else:
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import ( from ...utils import (
DIFFUSERS_SLOW_IMPORT,
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
_LazyModule, _LazyModule,
get_objects_from_module, get_objects_from_module,
...@@ -40,7 +41,7 @@ else: ...@@ -40,7 +41,7 @@ else:
_import_structure["pipeline_flax_controlnet"] = ["FlaxStableDiffusionControlNetPipeline"] _import_structure["pipeline_flax_controlnet"] = ["FlaxStableDiffusionControlNetPipeline"]
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
try: try:
if not (is_transformers_available() and is_torch_available()): if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable() raise OptionalDependencyNotAvailable()
......
...@@ -34,7 +34,7 @@ from ...utils import ( ...@@ -34,7 +34,7 @@ from ...utils import (
) )
from ...utils.torch_utils import is_compiled_module, randn_tensor from ...utils.torch_utils import is_compiled_module, randn_tensor
from ..pipeline_utils import DiffusionPipeline from ..pipeline_utils import DiffusionPipeline
from ..stable_diffusion import StableDiffusionPipelineOutput from ..stable_diffusion.pipeline_output import StableDiffusionPipelineOutput
from ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker from ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker
from .multicontrolnet import MultiControlNetModel from .multicontrolnet import MultiControlNetModel
......
...@@ -21,8 +21,6 @@ import torch ...@@ -21,8 +21,6 @@ import torch
import torch.nn.functional as F import torch.nn.functional as F
from transformers import CLIPTextModel, CLIPTextModelWithProjection, CLIPTokenizer from transformers import CLIPTextModel, CLIPTextModelWithProjection, CLIPTokenizer
from diffusers.pipelines.stable_diffusion_xl import StableDiffusionXLPipelineOutput
from ...image_processor import PipelineImageInput, VaeImageProcessor from ...image_processor import PipelineImageInput, VaeImageProcessor
from ...loaders import FromSingleFileMixin, StableDiffusionXLLoraLoaderMixin, TextualInversionLoaderMixin from ...loaders import FromSingleFileMixin, StableDiffusionXLLoraLoaderMixin, TextualInversionLoaderMixin
from ...models import AutoencoderKL, ControlNetModel, UNet2DConditionModel from ...models import AutoencoderKL, ControlNetModel, UNet2DConditionModel
...@@ -41,6 +39,7 @@ from ...utils import ( ...@@ -41,6 +39,7 @@ from ...utils import (
) )
from ...utils.torch_utils import is_compiled_module, randn_tensor from ...utils.torch_utils import is_compiled_module, randn_tensor
from ..pipeline_utils import DiffusionPipeline from ..pipeline_utils import DiffusionPipeline
from ..stable_diffusion_xl.pipeline_output import StableDiffusionXLPipelineOutput
from .multicontrolnet import MultiControlNetModel from .multicontrolnet import MultiControlNetModel
......
...@@ -41,7 +41,7 @@ from ...utils import ( ...@@ -41,7 +41,7 @@ from ...utils import (
) )
from ...utils.torch_utils import is_compiled_module, randn_tensor from ...utils.torch_utils import is_compiled_module, randn_tensor
from ..pipeline_utils import DiffusionPipeline from ..pipeline_utils import DiffusionPipeline
from ..stable_diffusion_xl import StableDiffusionXLPipelineOutput from ..stable_diffusion_xl.pipeline_output import StableDiffusionXLPipelineOutput
if is_invisible_watermark_available(): if is_invisible_watermark_available():
......
...@@ -41,7 +41,7 @@ from ...utils import ( ...@@ -41,7 +41,7 @@ from ...utils import (
) )
from ...utils.torch_utils import is_compiled_module, randn_tensor from ...utils.torch_utils import is_compiled_module, randn_tensor
from ..pipeline_utils import DiffusionPipeline from ..pipeline_utils import DiffusionPipeline
from ..stable_diffusion_xl import StableDiffusionXLPipelineOutput from ..stable_diffusion_xl.pipeline_output import StableDiffusionXLPipelineOutput
if is_invisible_watermark_available(): if is_invisible_watermark_available():
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule from ...utils import DIFFUSERS_SLOW_IMPORT, _LazyModule
_import_structure = {"pipeline_dance_diffusion": ["DanceDiffusionPipeline"]} _import_structure = {"pipeline_dance_diffusion": ["DanceDiffusionPipeline"]}
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
from .pipeline_dance_diffusion import DanceDiffusionPipeline from .pipeline_dance_diffusion import DanceDiffusionPipeline
else: else:
import sys import sys
......
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ...utils import _LazyModule from ...utils import DIFFUSERS_SLOW_IMPORT, _LazyModule
_import_structure = {"pipeline_ddim": ["DDIMPipeline"]} _import_structure = {"pipeline_ddim": ["DDIMPipeline"]}
if TYPE_CHECKING: if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
from .pipeline_ddim import DDIMPipeline from .pipeline_ddim import DDIMPipeline
else: else:
import sys import sys
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment