Unverified Commit 7aa6af11 authored by Dhruv Nair's avatar Dhruv Nair Committed by GitHub
Browse files

[Refactor] Move testing utils out of src (#12238)

* update

* update

* update

* update

* update

* merge main

* Revert "merge main"

This reverts commit 65efbcead58644b31596ed2d714f7cee0e0238d3.
parent 87b800e1
...@@ -24,10 +24,10 @@ from diffusers import ( ...@@ -24,10 +24,10 @@ from diffusers import (
SkyReelsV2Transformer3DModel, SkyReelsV2Transformer3DModel,
UniPCMultistepScheduler, UniPCMultistepScheduler,
) )
from diffusers.utils.testing_utils import (
from ...testing_utils import (
enable_full_determinism, enable_full_determinism,
) )
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import ( from ..test_pipelines_common import (
PipelineTesterMixin, PipelineTesterMixin,
......
...@@ -28,8 +28,8 @@ from diffusers import ( ...@@ -28,8 +28,8 @@ from diffusers import (
SkyReelsV2Transformer3DModel, SkyReelsV2Transformer3DModel,
UniPCMultistepScheduler, UniPCMultistepScheduler,
) )
from diffusers.utils.testing_utils import enable_full_determinism
from ...testing_utils import enable_full_determinism
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineTesterMixin from ..test_pipelines_common import PipelineTesterMixin
......
...@@ -26,11 +26,11 @@ from diffusers import ( ...@@ -26,11 +26,11 @@ from diffusers import (
SkyReelsV2Transformer3DModel, SkyReelsV2Transformer3DModel,
UniPCMultistepScheduler, UniPCMultistepScheduler,
) )
from diffusers.utils.testing_utils import (
from ...testing_utils import (
enable_full_determinism, enable_full_determinism,
torch_device, torch_device,
) )
from ..pipeline_params import TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS from ..pipeline_params import TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import ( from ..test_pipelines_common import (
PipelineTesterMixin, PipelineTesterMixin,
......
...@@ -31,8 +31,8 @@ from diffusers import ( ...@@ -31,8 +31,8 @@ from diffusers import (
SkyReelsV2Transformer3DModel, SkyReelsV2Transformer3DModel,
UniPCMultistepScheduler, UniPCMultistepScheduler,
) )
from diffusers.utils.testing_utils import enable_full_determinism
from ...testing_utils import enable_full_determinism
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineTesterMixin from ..test_pipelines_common import PipelineTesterMixin
......
...@@ -32,7 +32,8 @@ from diffusers import ( ...@@ -32,7 +32,8 @@ from diffusers import (
StableAudioProjectionModel, StableAudioProjectionModel,
) )
from diffusers.utils import is_xformers_available from diffusers.utils import is_xformers_available
from diffusers.utils.testing_utils import (
from ...testing_utils import (
Expectations, Expectations,
backend_empty_cache, backend_empty_cache,
enable_full_determinism, enable_full_determinism,
...@@ -40,7 +41,6 @@ from diffusers.utils.testing_utils import ( ...@@ -40,7 +41,6 @@ from diffusers.utils.testing_utils import (
require_torch_accelerator, require_torch_accelerator,
torch_device, torch_device,
) )
from ..pipeline_params import TEXT_TO_AUDIO_BATCH_PARAMS from ..pipeline_params import TEXT_TO_AUDIO_BATCH_PARAMS
from ..test_pipelines_common import PipelineTesterMixin from ..test_pipelines_common import PipelineTesterMixin
......
...@@ -22,8 +22,8 @@ from transformers import CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokeni ...@@ -22,8 +22,8 @@ from transformers import CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokeni
from diffusers import DDPMWuerstchenScheduler, StableCascadeCombinedPipeline from diffusers import DDPMWuerstchenScheduler, StableCascadeCombinedPipeline
from diffusers.models import StableCascadeUNet from diffusers.models import StableCascadeUNet
from diffusers.pipelines.wuerstchen import PaellaVQModel from diffusers.pipelines.wuerstchen import PaellaVQModel
from diffusers.utils.testing_utils import enable_full_determinism, require_torch_accelerator, torch_device
from ...testing_utils import enable_full_determinism, require_torch_accelerator, torch_device
from ..test_pipelines_common import PipelineTesterMixin from ..test_pipelines_common import PipelineTesterMixin
......
...@@ -23,7 +23,9 @@ from transformers import CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokeni ...@@ -23,7 +23,9 @@ from transformers import CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokeni
from diffusers import DDPMWuerstchenScheduler, StableCascadeDecoderPipeline from diffusers import DDPMWuerstchenScheduler, StableCascadeDecoderPipeline
from diffusers.models import StableCascadeUNet from diffusers.models import StableCascadeUNet
from diffusers.pipelines.wuerstchen import PaellaVQModel from diffusers.pipelines.wuerstchen import PaellaVQModel
from diffusers.utils.testing_utils import ( from diffusers.utils.torch_utils import randn_tensor
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
enable_full_determinism, enable_full_determinism,
load_numpy, load_numpy,
...@@ -34,8 +36,6 @@ from diffusers.utils.testing_utils import ( ...@@ -34,8 +36,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from diffusers.utils.torch_utils import randn_tensor
from ..test_pipelines_common import PipelineTesterMixin from ..test_pipelines_common import PipelineTesterMixin
......
...@@ -23,7 +23,8 @@ from transformers import CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokeni ...@@ -23,7 +23,8 @@ from transformers import CLIPTextConfig, CLIPTextModelWithProjection, CLIPTokeni
from diffusers import DDPMWuerstchenScheduler, StableCascadePriorPipeline from diffusers import DDPMWuerstchenScheduler, StableCascadePriorPipeline
from diffusers.models import StableCascadeUNet from diffusers.models import StableCascadeUNet
from diffusers.utils.import_utils import is_peft_available from diffusers.utils.import_utils import is_peft_available
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
enable_full_determinism, enable_full_determinism,
load_numpy, load_numpy,
......
...@@ -27,8 +27,8 @@ from diffusers import ( ...@@ -27,8 +27,8 @@ from diffusers import (
OnnxStableDiffusionPipeline, OnnxStableDiffusionPipeline,
PNDMScheduler, PNDMScheduler,
) )
from diffusers.utils.testing_utils import is_onnx_available, nightly, require_onnxruntime, require_torch_gpu
from ...testing_utils import is_onnx_available, nightly, require_onnxruntime, require_torch_gpu
from ..test_pipelines_onnx_common import OnnxPipelineTesterMixin from ..test_pipelines_onnx_common import OnnxPipelineTesterMixin
......
...@@ -26,7 +26,8 @@ from diffusers import ( ...@@ -26,7 +26,8 @@ from diffusers import (
OnnxStableDiffusionImg2ImgPipeline, OnnxStableDiffusionImg2ImgPipeline,
PNDMScheduler, PNDMScheduler,
) )
from diffusers.utils.testing_utils import (
from ...testing_utils import (
floats_tensor, floats_tensor,
is_onnx_available, is_onnx_available,
load_image, load_image,
...@@ -34,7 +35,6 @@ from diffusers.utils.testing_utils import ( ...@@ -34,7 +35,6 @@ from diffusers.utils.testing_utils import (
require_onnxruntime, require_onnxruntime,
require_torch_gpu, require_torch_gpu,
) )
from ..test_pipelines_onnx_common import OnnxPipelineTesterMixin from ..test_pipelines_onnx_common import OnnxPipelineTesterMixin
......
...@@ -18,14 +18,14 @@ import unittest ...@@ -18,14 +18,14 @@ import unittest
import numpy as np import numpy as np
from diffusers import LMSDiscreteScheduler, OnnxStableDiffusionInpaintPipeline from diffusers import LMSDiscreteScheduler, OnnxStableDiffusionInpaintPipeline
from diffusers.utils.testing_utils import (
from ...testing_utils import (
is_onnx_available, is_onnx_available,
load_image, load_image,
nightly, nightly,
require_onnxruntime, require_onnxruntime,
require_torch_gpu, require_torch_gpu,
) )
from ..test_pipelines_onnx_common import OnnxPipelineTesterMixin from ..test_pipelines_onnx_common import OnnxPipelineTesterMixin
......
...@@ -26,7 +26,8 @@ from diffusers import ( ...@@ -26,7 +26,8 @@ from diffusers import (
OnnxStableDiffusionUpscalePipeline, OnnxStableDiffusionUpscalePipeline,
PNDMScheduler, PNDMScheduler,
) )
from diffusers.utils.testing_utils import (
from ...testing_utils import (
floats_tensor, floats_tensor,
is_onnx_available, is_onnx_available,
load_image, load_image,
...@@ -34,7 +35,6 @@ from diffusers.utils.testing_utils import ( ...@@ -34,7 +35,6 @@ from diffusers.utils.testing_utils import (
require_onnxruntime, require_onnxruntime,
require_torch_gpu, require_torch_gpu,
) )
from ..test_pipelines_onnx_common import OnnxPipelineTesterMixin from ..test_pipelines_onnx_common import OnnxPipelineTesterMixin
......
...@@ -41,7 +41,8 @@ from diffusers import ( ...@@ -41,7 +41,8 @@ from diffusers import (
UNet2DConditionModel, UNet2DConditionModel,
logging, logging,
) )
from diffusers.utils.testing_utils import (
from ...testing_utils import (
CaptureLogger, CaptureLogger,
backend_empty_cache, backend_empty_cache,
backend_max_memory_allocated, backend_max_memory_allocated,
...@@ -58,7 +59,6 @@ from diffusers.utils.testing_utils import ( ...@@ -58,7 +59,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import ( from ..pipeline_params import (
TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_BATCH_PARAMS,
TEXT_TO_IMAGE_CALLBACK_CFG_PARAMS, TEXT_TO_IMAGE_CALLBACK_CFG_PARAMS,
......
...@@ -33,7 +33,8 @@ from diffusers import ( ...@@ -33,7 +33,8 @@ from diffusers import (
StableDiffusionImg2ImgPipeline, StableDiffusionImg2ImgPipeline,
UNet2DConditionModel, UNet2DConditionModel,
) )
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
backend_max_memory_allocated, backend_max_memory_allocated,
backend_reset_max_memory_allocated, backend_reset_max_memory_allocated,
...@@ -48,7 +49,6 @@ from diffusers.utils.testing_utils import ( ...@@ -48,7 +49,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import ( from ..pipeline_params import (
IMAGE_TO_IMAGE_IMAGE_PARAMS, IMAGE_TO_IMAGE_IMAGE_PARAMS,
TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS,
......
...@@ -35,7 +35,8 @@ from diffusers import ( ...@@ -35,7 +35,8 @@ from diffusers import (
StableDiffusionInpaintPipeline, StableDiffusionInpaintPipeline,
UNet2DConditionModel, UNet2DConditionModel,
) )
from diffusers.utils.testing_utils import (
from ...testing_utils import (
Expectations, Expectations,
backend_empty_cache, backend_empty_cache,
backend_max_memory_allocated, backend_max_memory_allocated,
...@@ -50,7 +51,6 @@ from diffusers.utils.testing_utils import ( ...@@ -50,7 +51,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import ( from ..pipeline_params import (
TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS, TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS,
TEXT_GUIDED_IMAGE_INPAINTING_PARAMS, TEXT_GUIDED_IMAGE_INPAINTING_PARAMS,
......
...@@ -32,7 +32,8 @@ from diffusers import ( ...@@ -32,7 +32,8 @@ from diffusers import (
UNet2DConditionModel, UNet2DConditionModel,
) )
from diffusers.image_processor import VaeImageProcessor from diffusers.image_processor import VaeImageProcessor
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
backend_max_memory_allocated, backend_max_memory_allocated,
backend_reset_max_memory_allocated, backend_reset_max_memory_allocated,
...@@ -44,7 +45,6 @@ from diffusers.utils.testing_utils import ( ...@@ -44,7 +45,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import ( from ..pipeline_params import (
IMAGE_TO_IMAGE_IMAGE_PARAMS, IMAGE_TO_IMAGE_IMAGE_PARAMS,
TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS, TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS,
......
...@@ -31,7 +31,8 @@ from diffusers import ( ...@@ -31,7 +31,8 @@ from diffusers import (
UNet2DConditionModel, UNet2DConditionModel,
logging, logging,
) )
from diffusers.utils.testing_utils import (
from ...testing_utils import (
CaptureLogger, CaptureLogger,
backend_empty_cache, backend_empty_cache,
backend_max_memory_allocated, backend_max_memory_allocated,
...@@ -45,7 +46,6 @@ from diffusers.utils.testing_utils import ( ...@@ -45,7 +46,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import ( from ..pipeline_params import (
TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_BATCH_PARAMS,
TEXT_TO_IMAGE_CALLBACK_CFG_PARAMS, TEXT_TO_IMAGE_CALLBACK_CFG_PARAMS,
......
...@@ -36,7 +36,8 @@ from diffusers import ( ...@@ -36,7 +36,8 @@ from diffusers import (
StableDiffusionDepth2ImgPipeline, StableDiffusionDepth2ImgPipeline,
UNet2DConditionModel, UNet2DConditionModel,
) )
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
enable_full_determinism, enable_full_determinism,
floats_tensor, floats_tensor,
...@@ -50,7 +51,6 @@ from diffusers.utils.testing_utils import ( ...@@ -50,7 +51,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import ( from ..pipeline_params import (
IMAGE_TO_IMAGE_IMAGE_PARAMS, IMAGE_TO_IMAGE_IMAGE_PARAMS,
TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS,
......
...@@ -23,7 +23,8 @@ from PIL import Image ...@@ -23,7 +23,8 @@ from PIL import Image
from transformers import CLIPTextConfig, CLIPTextModel, CLIPTokenizer from transformers import CLIPTextConfig, CLIPTextModel, CLIPTokenizer
from diffusers import AutoencoderKL, PNDMScheduler, StableDiffusionInpaintPipeline, UNet2DConditionModel from diffusers import AutoencoderKL, PNDMScheduler, StableDiffusionInpaintPipeline, UNet2DConditionModel
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
backend_max_memory_allocated, backend_max_memory_allocated,
backend_reset_max_memory_allocated, backend_reset_max_memory_allocated,
...@@ -36,7 +37,6 @@ from diffusers.utils.testing_utils import ( ...@@ -36,7 +37,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import ( from ..pipeline_params import (
TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS, TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS,
TEXT_GUIDED_IMAGE_INPAINTING_PARAMS, TEXT_GUIDED_IMAGE_INPAINTING_PARAMS,
......
...@@ -30,7 +30,8 @@ from diffusers import ( ...@@ -30,7 +30,8 @@ from diffusers import (
UNet2DConditionModel, UNet2DConditionModel,
) )
from diffusers.schedulers import KarrasDiffusionSchedulers from diffusers.schedulers import KarrasDiffusionSchedulers
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
enable_full_determinism, enable_full_determinism,
floats_tensor, floats_tensor,
...@@ -40,7 +41,6 @@ from diffusers.utils.testing_utils import ( ...@@ -40,7 +41,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS from ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS
from ..test_pipelines_common import PipelineKarrasSchedulerTesterMixin, PipelineLatentTesterMixin, PipelineTesterMixin from ..test_pipelines_common import PipelineKarrasSchedulerTesterMixin, PipelineLatentTesterMixin, PipelineTesterMixin
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment