Unverified Commit 2e8668f0 authored by Patrick von Platen's avatar Patrick von Platen Committed by GitHub
Browse files

Correct controlnet out of list error (#3928)

* Correct controlnet out of list error

* Apply suggestions from code review

* correct tests

* correct tests

* fix

* test all

* Apply suggestions from code review

* test all

* test all

* Apply suggestions from code review

* Apply suggestions from code review

* fix more tests

* Fix more

* Apply suggestions from code review

* finish

* Apply suggestions from code review

* Update src/diffusers/schedulers/scheduling_k_dpm_2_ancestral_discrete.py

* finish
parent b298484f
......@@ -30,7 +30,7 @@ from diffusers.utils import load_numpy, skip_mps, slow
from diffusers.utils.testing_utils import require_torch_gpu
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineLatentTesterMixin, PipelineTesterMixin
from ..test_pipelines_common import PipelineKarrasSchedulerTesterMixin, PipelineLatentTesterMixin, PipelineTesterMixin
torch.backends.cuda.matmul.allow_tf32 = False
......@@ -38,7 +38,7 @@ torch.backends.cuda.matmul.allow_tf32 = False
@skip_mps
class StableDiffusionAttendAndExcitePipelineFastTests(
PipelineLatentTesterMixin, PipelineTesterMixin, unittest.TestCase
PipelineLatentTesterMixin, PipelineKarrasSchedulerTesterMixin, PipelineTesterMixin, unittest.TestCase
):
pipeline_class = StableDiffusionAttendAndExcitePipeline
test_attention_slicing = False
......
......@@ -57,14 +57,16 @@ from ..pipeline_params import (
TEXT_GUIDED_IMAGE_VARIATION_PARAMS,
TEXT_TO_IMAGE_IMAGE_PARAMS,
)
from ..test_pipelines_common import PipelineLatentTesterMixin, PipelineTesterMixin
from ..test_pipelines_common import PipelineKarrasSchedulerTesterMixin, PipelineLatentTesterMixin, PipelineTesterMixin
enable_full_determinism()
@skip_mps
class StableDiffusionDepth2ImgPipelineFastTests(PipelineLatentTesterMixin, PipelineTesterMixin, unittest.TestCase):
class StableDiffusionDepth2ImgPipelineFastTests(
PipelineLatentTesterMixin, PipelineKarrasSchedulerTesterMixin, PipelineTesterMixin, unittest.TestCase
):
pipeline_class = StableDiffusionDepth2ImgPipeline
test_save_load_optional_components = False
params = TEXT_GUIDED_IMAGE_VARIATION_PARAMS - {"height", "width"}
......
......@@ -27,13 +27,15 @@ from diffusers.utils import floats_tensor, load_image, load_numpy, torch_device
from diffusers.utils.testing_utils import enable_full_determinism, require_torch_gpu, slow
from ..pipeline_params import TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS, TEXT_GUIDED_IMAGE_INPAINTING_PARAMS
from ..test_pipelines_common import PipelineLatentTesterMixin, PipelineTesterMixin
from ..test_pipelines_common import PipelineKarrasSchedulerTesterMixin, PipelineLatentTesterMixin, PipelineTesterMixin
enable_full_determinism()
class StableDiffusion2InpaintPipelineFastTests(PipelineLatentTesterMixin, PipelineTesterMixin, unittest.TestCase):
class StableDiffusion2InpaintPipelineFastTests(
PipelineLatentTesterMixin, PipelineKarrasSchedulerTesterMixin, PipelineTesterMixin, unittest.TestCase
):
pipeline_class = StableDiffusionInpaintPipeline
params = TEXT_GUIDED_IMAGE_INPAINTING_PARAMS
batch_params = TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS
......
......@@ -21,6 +21,7 @@ import numpy as np
import torch
from transformers import CLIPTextConfig, CLIPTextModel, CLIPTokenizer
import diffusers
from diffusers import (
AutoencoderKL,
EulerDiscreteScheduler,
......@@ -28,17 +29,25 @@ from diffusers import (
StableDiffusionPipeline,
UNet2DConditionModel,
)
from diffusers.schedulers import KarrasDiffusionSchedulers
from diffusers.utils import floats_tensor, load_image, load_numpy, slow, torch_device
from diffusers.utils.testing_utils import enable_full_determinism, require_torch_gpu
from ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS
from ..test_pipelines_common import PipelineLatentTesterMixin, PipelineTesterMixin
from ..test_pipelines_common import PipelineKarrasSchedulerTesterMixin, PipelineLatentTesterMixin, PipelineTesterMixin
enable_full_determinism()
class StableDiffusionLatentUpscalePipelineFastTests(PipelineLatentTesterMixin, PipelineTesterMixin, unittest.TestCase):
def check_same_shape(tensor_list):
shapes = [tensor.shape for tensor in tensor_list]
return all(shape == shapes[0] for shape in shapes[1:])
class StableDiffusionLatentUpscalePipelineFastTests(
PipelineLatentTesterMixin, PipelineKarrasSchedulerTesterMixin, PipelineTesterMixin, unittest.TestCase
):
pipeline_class = StableDiffusionLatentUpscalePipeline
params = TEXT_GUIDED_IMAGE_VARIATION_PARAMS - {
"height",
......@@ -185,6 +194,42 @@ class StableDiffusionLatentUpscalePipelineFastTests(PipelineLatentTesterMixin, P
def test_save_load_optional_components(self):
super().test_save_load_optional_components(expected_max_difference=3e-3)
def test_karras_schedulers_shape(self):
skip_schedulers = [
"DDIMScheduler",
"DDPMScheduler",
"PNDMScheduler",
"HeunDiscreteScheduler",
"EulerAncestralDiscreteScheduler",
"KDPM2DiscreteScheduler",
"KDPM2AncestralDiscreteScheduler",
"DPMSolverSDEScheduler",
]
components = self.get_dummy_components()
pipe = self.pipeline_class(**components)
# make sure that PNDM does not need warm-up
pipe.scheduler.register_to_config(skip_prk_steps=True)
pipe.to(torch_device)
pipe.set_progress_bar_config(disable=None)
inputs = self.get_dummy_inputs(torch_device)
inputs["num_inference_steps"] = 2
outputs = []
for scheduler_enum in KarrasDiffusionSchedulers:
if scheduler_enum.name in skip_schedulers:
# no sigma schedulers are not supported
# no schedulers
continue
scheduler_cls = getattr(diffusers, scheduler_enum.name)
pipe.scheduler = scheduler_cls.from_config(pipe.scheduler.config)
output = pipe(**inputs)[0]
outputs.append(output)
assert check_same_shape(outputs)
@require_torch_gpu
@slow
......
......@@ -16,13 +16,20 @@ from diffusers.pipelines.stable_diffusion.stable_unclip_image_normalizer import
from diffusers.utils.testing_utils import enable_full_determinism, load_numpy, require_torch_gpu, slow, torch_device
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineLatentTesterMixin, PipelineTesterMixin, assert_mean_pixel_difference
from ..test_pipelines_common import (
PipelineKarrasSchedulerTesterMixin,
PipelineLatentTesterMixin,
PipelineTesterMixin,
assert_mean_pixel_difference,
)
enable_full_determinism()
class StableUnCLIPPipelineFastTests(PipelineLatentTesterMixin, PipelineTesterMixin, unittest.TestCase):
class StableUnCLIPPipelineFastTests(
PipelineLatentTesterMixin, PipelineKarrasSchedulerTesterMixin, PipelineTesterMixin, unittest.TestCase
):
pipeline_class = StableUnCLIPPipeline
params = TEXT_TO_IMAGE_PARAMS
batch_params = TEXT_TO_IMAGE_BATCH_PARAMS
......
......@@ -30,6 +30,7 @@ from diffusers.utils.testing_utils import (
from ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS
from ..test_pipelines_common import (
PipelineKarrasSchedulerTesterMixin,
PipelineLatentTesterMixin,
PipelineTesterMixin,
assert_mean_pixel_difference,
......@@ -39,7 +40,9 @@ from ..test_pipelines_common import (
enable_full_determinism()
class StableUnCLIPImg2ImgPipelineFastTests(PipelineLatentTesterMixin, PipelineTesterMixin, unittest.TestCase):
class StableUnCLIPImg2ImgPipelineFastTests(
PipelineLatentTesterMixin, PipelineKarrasSchedulerTesterMixin, PipelineTesterMixin, unittest.TestCase
):
pipeline_class = StableUnCLIPImg2ImgPipeline
params = TEXT_GUIDED_IMAGE_VARIATION_PARAMS
batch_params = TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS
......
......@@ -14,6 +14,7 @@ import torch
import diffusers
from diffusers import DiffusionPipeline
from diffusers.image_processor import VaeImageProcessor
from diffusers.schedulers import KarrasDiffusionSchedulers
from diffusers.utils import logging
from diffusers.utils.import_utils import is_accelerate_available, is_accelerate_version, is_xformers_available
from diffusers.utils.testing_utils import require_torch, torch_device
......@@ -26,6 +27,11 @@ def to_np(tensor):
return tensor
def check_same_shape(tensor_list):
shapes = [tensor.shape for tensor in tensor_list]
return all(shape == shapes[0] for shape in shapes[1:])
class PipelineLatentTesterMixin:
"""
This mixin is designed to be used with PipelineTesterMixin and unittest.TestCase classes.
......@@ -155,6 +161,46 @@ class PipelineLatentTesterMixin:
self.assertLess(max_diff, 1e-4, "passing latents as image input generate different result from passing image")
@require_torch
class PipelineKarrasSchedulerTesterMixin:
"""
This mixin is designed to be used with unittest.TestCase classes.
It provides a set of common tests for each PyTorch pipeline that makes use of KarrasDiffusionSchedulers
equivalence of dict and tuple outputs, etc.
"""
def test_karras_schedulers_shape(self):
components = self.get_dummy_components()
pipe = self.pipeline_class(**components)
# make sure that PNDM does not need warm-up
pipe.scheduler.register_to_config(skip_prk_steps=True)
pipe.to(torch_device)
pipe.set_progress_bar_config(disable=None)
inputs = self.get_dummy_inputs(torch_device)
inputs["num_inference_steps"] = 2
if "strength" in inputs:
inputs["num_inference_steps"] = 4
inputs["strength"] = 0.5
outputs = []
for scheduler_enum in KarrasDiffusionSchedulers:
if "KDPM2" in scheduler_enum.name:
inputs["num_inference_steps"] = 5
scheduler_cls = getattr(diffusers, scheduler_enum.name)
pipe.scheduler = scheduler_cls.from_config(pipe.scheduler.config)
output = pipe(**inputs)[0]
outputs.append(output)
if "KDPM2" in scheduler_enum.name:
inputs["num_inference_steps"] = 2
assert check_same_shape(outputs)
@require_torch
class PipelineTesterMixin:
"""
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment