Unverified Commit b37dc3b3 authored by Patrick von Platen's avatar Patrick von Platen Committed by GitHub
Browse files

Fix all missing optional import statements from pipeline folders (#4272)

* fix circular import

* fix imports when watermark not specified

* fix all pipelines
parent ff8f5808
...@@ -25,7 +25,6 @@ import torch.nn.functional as F ...@@ -25,7 +25,6 @@ import torch.nn.functional as F
from huggingface_hub import hf_hub_download from huggingface_hub import hf_hub_download
from torch import nn from torch import nn
from .models.lora import LoRACompatibleConv, LoRACompatibleLinear, LoRAConv2dLayer, LoRALinearLayer
from .utils import ( from .utils import (
DIFFUSERS_CACHE, DIFFUSERS_CACHE,
HF_HUB_OFFLINE, HF_HUB_OFFLINE,
...@@ -69,7 +68,7 @@ CUSTOM_DIFFUSION_WEIGHT_NAME_SAFE = "pytorch_custom_diffusion_weights.safetensor ...@@ -69,7 +68,7 @@ CUSTOM_DIFFUSION_WEIGHT_NAME_SAFE = "pytorch_custom_diffusion_weights.safetensor
class PatchedLoraProjection(nn.Module): class PatchedLoraProjection(nn.Module):
def __init__(self, regular_linear_layer, lora_scale=1, network_alpha=None, rank=4, dtype=None): def __init__(self, regular_linear_layer, lora_scale=1, network_alpha=None, rank=4, dtype=None):
super().__init__() super().__init__()
from .models.attention_processor import LoRALinearLayer from .models.lora import LoRALinearLayer
self.regular_linear_layer = regular_linear_layer self.regular_linear_layer = regular_linear_layer
...@@ -244,6 +243,7 @@ class UNet2DConditionLoadersMixin: ...@@ -244,6 +243,7 @@ class UNet2DConditionLoadersMixin:
SlicedAttnAddedKVProcessor, SlicedAttnAddedKVProcessor,
XFormersAttnProcessor, XFormersAttnProcessor,
) )
from .models.lora import LoRACompatibleConv, LoRACompatibleLinear, LoRAConv2dLayer, LoRALinearLayer
cache_dir = kwargs.pop("cache_dir", DIFFUSERS_CACHE) cache_dir = kwargs.pop("cache_dir", DIFFUSERS_CACHE)
force_download = kwargs.pop("force_download", False) force_download = kwargs.pop("force_download", False)
......
...@@ -5,7 +5,7 @@ import numpy as np ...@@ -5,7 +5,7 @@ import numpy as np
import PIL import PIL
from PIL import Image from PIL import Image
from ...utils import BaseOutput, is_torch_available, is_transformers_available from ...utils import BaseOutput, OptionalDependencyNotAvailable, is_torch_available, is_transformers_available
@dataclass @dataclass
...@@ -27,7 +27,12 @@ class AltDiffusionPipelineOutput(BaseOutput): ...@@ -27,7 +27,12 @@ class AltDiffusionPipelineOutput(BaseOutput):
nsfw_content_detected: Optional[List[bool]] nsfw_content_detected: Optional[List[bool]]
if is_transformers_available() and is_torch_available(): try:
if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
from ...utils.dummy_torch_and_transformers_objects import ShapEPipeline
else:
from .modeling_roberta_series import RobertaSeriesModelWithTransformation from .modeling_roberta_series import RobertaSeriesModelWithTransformation
from .pipeline_alt_diffusion import AltDiffusionPipeline from .pipeline_alt_diffusion import AltDiffusionPipeline
from .pipeline_alt_diffusion_img2img import AltDiffusionImg2ImgPipeline from .pipeline_alt_diffusion_img2img import AltDiffusionImg2ImgPipeline
...@@ -7,7 +7,12 @@ from ...utils import ( ...@@ -7,7 +7,12 @@ from ...utils import (
) )
if is_transformers_available() and is_torch_available() and is_invisible_watermark_available(): try:
if not (is_transformers_available() and is_torch_available() and is_invisible_watermark_available()):
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
from ...utils.dummy_torch_and_transformers_and_invisible_watermark_objects import * # noqa F403
else:
from .pipeline_controlnet_sd_xl import StableDiffusionXLControlNetPipeline from .pipeline_controlnet_sd_xl import StableDiffusionXLControlNetPipeline
......
...@@ -2,7 +2,6 @@ from ...utils import ( ...@@ -2,7 +2,6 @@ from ...utils import (
OptionalDependencyNotAvailable, OptionalDependencyNotAvailable,
is_torch_available, is_torch_available,
is_transformers_available, is_transformers_available,
is_transformers_version,
) )
...@@ -10,7 +9,7 @@ try: ...@@ -10,7 +9,7 @@ try:
if not (is_transformers_available() and is_torch_available()): if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable() raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable: except OptionalDependencyNotAvailable:
from ...utils.dummy_torch_and_transformers_objects import KandinskyPipeline, KandinskyPriorPipeline from ...utils.dummy_torch_and_transformers_objects import *
else: else:
from .pipeline_kandinsky import KandinskyPipeline from .pipeline_kandinsky import KandinskyPipeline
from .pipeline_kandinsky_img2img import KandinskyImg2ImgPipeline from .pipeline_kandinsky_img2img import KandinskyImg2ImgPipeline
......
from .pipeline_kandinsky2_2 import KandinskyV22Pipeline from ...utils import (
from .pipeline_kandinsky2_2_controlnet import KandinskyV22ControlnetPipeline OptionalDependencyNotAvailable,
from .pipeline_kandinsky2_2_controlnet_img2img import KandinskyV22ControlnetImg2ImgPipeline is_torch_available,
from .pipeline_kandinsky2_2_img2img import KandinskyV22Img2ImgPipeline is_transformers_available,
from .pipeline_kandinsky2_2_inpainting import KandinskyV22InpaintPipeline )
from .pipeline_kandinsky2_2_prior import KandinskyV22PriorPipeline
from .pipeline_kandinsky2_2_prior_emb2emb import KandinskyV22PriorEmb2EmbPipeline
try:
if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
from ...utils.dummy_torch_and_transformers_objects import *
else:
from .pipeline_kandinsky2_2 import KandinskyV22Pipeline
from .pipeline_kandinsky2_2_controlnet import KandinskyV22ControlnetPipeline
from .pipeline_kandinsky2_2_controlnet_img2img import KandinskyV22ControlnetImg2ImgPipeline
from .pipeline_kandinsky2_2_img2img import KandinskyV22Img2ImgPipeline
from .pipeline_kandinsky2_2_inpainting import KandinskyV22InpaintPipeline
from .pipeline_kandinsky2_2_prior import KandinskyV22PriorPipeline
from .pipeline_kandinsky2_2_prior_emb2emb import KandinskyV22PriorEmb2EmbPipeline
from ...utils import is_transformers_available from ...utils import OptionalDependencyNotAvailable, is_torch_available, is_transformers_available
from .pipeline_latent_diffusion_superresolution import LDMSuperResolutionPipeline from .pipeline_latent_diffusion_superresolution import LDMSuperResolutionPipeline
if is_transformers_available(): try:
if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
from ...utils.dummy_torch_and_transformers_objects import ShapEPipeline
else:
from .pipeline_latent_diffusion import LDMBertModel, LDMTextToImagePipeline from .pipeline_latent_diffusion import LDMBertModel, LDMTextToImagePipeline
...@@ -5,9 +5,14 @@ import numpy as np ...@@ -5,9 +5,14 @@ import numpy as np
import PIL import PIL
from PIL import Image from PIL import Image
from ...utils import is_torch_available, is_transformers_available from ...utils import OptionalDependencyNotAvailable, is_torch_available, is_transformers_available
if is_transformers_available() and is_torch_available(): try:
if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
from ...utils.dummy_torch_and_transformers_objects import ShapEPipeline
else:
from .image_encoder import PaintByExampleImageEncoder from .image_encoder import PaintByExampleImageEncoder
from .pipeline_paint_by_example import PaintByExamplePipeline from .pipeline_paint_by_example import PaintByExamplePipeline
...@@ -6,7 +6,7 @@ import numpy as np ...@@ -6,7 +6,7 @@ import numpy as np
import PIL import PIL
from PIL import Image from PIL import Image
from ...utils import BaseOutput, is_torch_available, is_transformers_available from ...utils import BaseOutput, OptionalDependencyNotAvailable, is_torch_available, is_transformers_available
@dataclass @dataclass
...@@ -27,5 +27,10 @@ class SemanticStableDiffusionPipelineOutput(BaseOutput): ...@@ -27,5 +27,10 @@ class SemanticStableDiffusionPipelineOutput(BaseOutput):
nsfw_content_detected: Optional[List[bool]] nsfw_content_detected: Optional[List[bool]]
if is_transformers_available() and is_torch_available(): try:
if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
from ...utils.dummy_torch_and_transformers_objects import * # noqa F403
else:
from .pipeline_semantic_stable_diffusion import SemanticStableDiffusionPipeline from .pipeline_semantic_stable_diffusion import SemanticStableDiffusionPipeline
...@@ -6,7 +6,7 @@ import numpy as np ...@@ -6,7 +6,7 @@ import numpy as np
import PIL import PIL
from PIL import Image from PIL import Image
from ...utils import BaseOutput, is_torch_available, is_transformers_available from ...utils import BaseOutput, OptionalDependencyNotAvailable, is_torch_available, is_transformers_available
@dataclass @dataclass
...@@ -66,6 +66,11 @@ class StableDiffusionSafePipelineOutput(BaseOutput): ...@@ -66,6 +66,11 @@ class StableDiffusionSafePipelineOutput(BaseOutput):
applied_safety_concept: Optional[str] applied_safety_concept: Optional[str]
if is_transformers_available() and is_torch_available(): try:
if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
from ...utils.dummy_torch_and_transformers_objects import *
else:
from .pipeline_stable_diffusion_safe import StableDiffusionPipelineSafe from .pipeline_stable_diffusion_safe import StableDiffusionPipelineSafe
from .safety_checker import SafeStableDiffusionSafetyChecker from .safety_checker import SafeStableDiffusionSafetyChecker
...@@ -4,7 +4,13 @@ from typing import List, Optional, Union ...@@ -4,7 +4,13 @@ from typing import List, Optional, Union
import numpy as np import numpy as np
import PIL import PIL
from ...utils import BaseOutput, is_invisible_watermark_available, is_torch_available, is_transformers_available from ...utils import (
BaseOutput,
OptionalDependencyNotAvailable,
is_invisible_watermark_available,
is_torch_available,
is_transformers_available,
)
@dataclass @dataclass
...@@ -21,7 +27,12 @@ class StableDiffusionXLPipelineOutput(BaseOutput): ...@@ -21,7 +27,12 @@ class StableDiffusionXLPipelineOutput(BaseOutput):
images: Union[List[PIL.Image.Image], np.ndarray] images: Union[List[PIL.Image.Image], np.ndarray]
if is_transformers_available() and is_torch_available() and is_invisible_watermark_available(): try:
if not (is_transformers_available() and is_torch_available() and is_invisible_watermark_available()):
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
from ...utils.dummy_torch_and_transformers_and_invisible_watermark_objects import * # noqa F403
else:
from .pipeline_stable_diffusion_xl import StableDiffusionXLPipeline from .pipeline_stable_diffusion_xl import StableDiffusionXLPipeline
from .pipeline_stable_diffusion_xl_img2img import StableDiffusionXLImg2ImgPipeline from .pipeline_stable_diffusion_xl_img2img import StableDiffusionXLImg2ImgPipeline
from .pipeline_stable_diffusion_xl_inpaint import StableDiffusionXLInpaintPipeline from .pipeline_stable_diffusion_xl_inpaint import StableDiffusionXLInpaintPipeline
......
from ...utils import is_torch_available, is_transformers_available from ...utils import OptionalDependencyNotAvailable, is_torch_available, is_transformers_available
if is_transformers_available() and is_torch_available(): try:
if not (is_transformers_available() and is_torch_available()):
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
from ...utils.dummy_torch_and_transformers_objects import *
else:
from .pipeline_vq_diffusion import LearnedClassifierFreeSamplingEmbeddings, VQDiffusionPipeline from .pipeline_vq_diffusion import LearnedClassifierFreeSamplingEmbeddings, VQDiffusionPipeline
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment