Unverified Commit 7aa6af11 authored by Dhruv Nair's avatar Dhruv Nair Committed by GitHub
Browse files

[Refactor] Move testing utils out of src (#12238)

* update

* update

* update

* update

* update

* merge main

* Revert "merge main"

This reverts commit 65efbcead58644b31596ed2d714f7cee0e0238d3.
parent 87b800e1
...@@ -26,12 +26,12 @@ from diffusers import ( ...@@ -26,12 +26,12 @@ from diffusers import (
StableDiffusion3ControlNetInpaintingPipeline, StableDiffusion3ControlNetInpaintingPipeline,
) )
from diffusers.models import SD3ControlNetModel from diffusers.models import SD3ControlNetModel
from diffusers.utils.testing_utils import ( from diffusers.utils.torch_utils import randn_tensor
from ...testing_utils import (
enable_full_determinism, enable_full_determinism,
torch_device, torch_device,
) )
from diffusers.utils.torch_utils import randn_tensor
from ..test_pipelines_common import PipelineTesterMixin from ..test_pipelines_common import PipelineTesterMixin
......
...@@ -29,7 +29,9 @@ from diffusers import ( ...@@ -29,7 +29,9 @@ from diffusers import (
) )
from diffusers.models import SD3ControlNetModel, SD3MultiControlNetModel from diffusers.models import SD3ControlNetModel, SD3MultiControlNetModel
from diffusers.utils import load_image from diffusers.utils import load_image
from diffusers.utils.testing_utils import ( from diffusers.utils.torch_utils import randn_tensor
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
enable_full_determinism, enable_full_determinism,
numpy_cosine_similarity_distance, numpy_cosine_similarity_distance,
...@@ -37,8 +39,6 @@ from diffusers.utils.testing_utils import ( ...@@ -37,8 +39,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from diffusers.utils.torch_utils import randn_tensor
from ..test_pipelines_common import PipelineTesterMixin from ..test_pipelines_common import PipelineTesterMixin
......
...@@ -23,8 +23,8 @@ import torch ...@@ -23,8 +23,8 @@ import torch
from transformers import AutoTokenizer, T5EncoderModel from transformers import AutoTokenizer, T5EncoderModel
from diffusers import AutoencoderKLCosmos, CosmosTextToWorldPipeline, CosmosTransformer3DModel, EDMEulerScheduler from diffusers import AutoencoderKLCosmos, CosmosTextToWorldPipeline, CosmosTransformer3DModel, EDMEulerScheduler
from diffusers.utils.testing_utils import enable_full_determinism, torch_device
from ...testing_utils import enable_full_determinism, torch_device
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineTesterMixin, to_np from ..test_pipelines_common import PipelineTesterMixin, to_np
from .cosmos_guardrail import DummyCosmosSafetyChecker from .cosmos_guardrail import DummyCosmosSafetyChecker
......
...@@ -28,8 +28,8 @@ from diffusers import ( ...@@ -28,8 +28,8 @@ from diffusers import (
CosmosTransformer3DModel, CosmosTransformer3DModel,
FlowMatchEulerDiscreteScheduler, FlowMatchEulerDiscreteScheduler,
) )
from diffusers.utils.testing_utils import enable_full_determinism, torch_device
from ...testing_utils import enable_full_determinism, torch_device
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineTesterMixin, to_np from ..test_pipelines_common import PipelineTesterMixin, to_np
from .cosmos_guardrail import DummyCosmosSafetyChecker from .cosmos_guardrail import DummyCosmosSafetyChecker
......
...@@ -29,8 +29,8 @@ from diffusers import ( ...@@ -29,8 +29,8 @@ from diffusers import (
CosmosTransformer3DModel, CosmosTransformer3DModel,
FlowMatchEulerDiscreteScheduler, FlowMatchEulerDiscreteScheduler,
) )
from diffusers.utils.testing_utils import enable_full_determinism, torch_device
from ...testing_utils import enable_full_determinism, torch_device
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineTesterMixin, to_np from ..test_pipelines_common import PipelineTesterMixin, to_np
from .cosmos_guardrail import DummyCosmosSafetyChecker from .cosmos_guardrail import DummyCosmosSafetyChecker
......
...@@ -24,8 +24,8 @@ import torch ...@@ -24,8 +24,8 @@ import torch
from transformers import AutoTokenizer, T5EncoderModel from transformers import AutoTokenizer, T5EncoderModel
from diffusers import AutoencoderKLCosmos, CosmosTransformer3DModel, CosmosVideoToWorldPipeline, EDMEulerScheduler from diffusers import AutoencoderKLCosmos, CosmosTransformer3DModel, CosmosVideoToWorldPipeline, EDMEulerScheduler
from diffusers.utils.testing_utils import enable_full_determinism, torch_device
from ...testing_utils import enable_full_determinism, torch_device
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineTesterMixin, to_np from ..test_pipelines_common import PipelineTesterMixin, to_np
from .cosmos_guardrail import DummyCosmosSafetyChecker from .cosmos_guardrail import DummyCosmosSafetyChecker
......
...@@ -19,8 +19,8 @@ import numpy as np ...@@ -19,8 +19,8 @@ import numpy as np
import torch import torch
from diffusers import DDIMPipeline, DDIMScheduler, UNet2DModel from diffusers import DDIMPipeline, DDIMScheduler, UNet2DModel
from diffusers.utils.testing_utils import enable_full_determinism, require_torch_accelerator, slow, torch_device
from ...testing_utils import enable_full_determinism, require_torch_accelerator, slow, torch_device
from ..pipeline_params import UNCONDITIONAL_IMAGE_GENERATION_BATCH_PARAMS, UNCONDITIONAL_IMAGE_GENERATION_PARAMS from ..pipeline_params import UNCONDITIONAL_IMAGE_GENERATION_BATCH_PARAMS, UNCONDITIONAL_IMAGE_GENERATION_PARAMS
from ..test_pipelines_common import PipelineTesterMixin from ..test_pipelines_common import PipelineTesterMixin
......
...@@ -19,7 +19,8 @@ import numpy as np ...@@ -19,7 +19,8 @@ import numpy as np
import torch import torch
from diffusers import DDPMPipeline, DDPMScheduler, UNet2DModel from diffusers import DDPMPipeline, DDPMScheduler, UNet2DModel
from diffusers.utils.testing_utils import enable_full_determinism, require_torch_accelerator, slow, torch_device
from ...testing_utils import enable_full_determinism, require_torch_accelerator, slow, torch_device
enable_full_determinism() enable_full_determinism()
......
...@@ -7,8 +7,8 @@ from transformers import AutoTokenizer, T5EncoderModel ...@@ -7,8 +7,8 @@ from transformers import AutoTokenizer, T5EncoderModel
from diffusers import DDPMScheduler, UNet2DConditionModel from diffusers import DDPMScheduler, UNet2DConditionModel
from diffusers.models.attention_processor import AttnAddedKVProcessor from diffusers.models.attention_processor import AttnAddedKVProcessor
from diffusers.pipelines.deepfloyd_if import IFWatermarker from diffusers.pipelines.deepfloyd_if import IFWatermarker
from diffusers.utils.testing_utils import torch_device
from ...testing_utils import torch_device
from ..test_pipelines_common import to_np from ..test_pipelines_common import to_np
......
...@@ -23,7 +23,8 @@ from diffusers import ( ...@@ -23,7 +23,8 @@ from diffusers import (
) )
from diffusers.models.attention_processor import AttnAddedKVProcessor from diffusers.models.attention_processor import AttnAddedKVProcessor
from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.import_utils import is_xformers_available
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
backend_max_memory_allocated, backend_max_memory_allocated,
backend_reset_max_memory_allocated, backend_reset_max_memory_allocated,
...@@ -37,7 +38,6 @@ from diffusers.utils.testing_utils import ( ...@@ -37,7 +38,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_PARAMS from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineTesterMixin, assert_mean_pixel_difference from ..test_pipelines_common import PipelineTesterMixin, assert_mean_pixel_difference
from . import IFPipelineTesterMixin from . import IFPipelineTesterMixin
......
...@@ -22,7 +22,8 @@ import torch ...@@ -22,7 +22,8 @@ import torch
from diffusers import IFImg2ImgPipeline from diffusers import IFImg2ImgPipeline
from diffusers.models.attention_processor import AttnAddedKVProcessor from diffusers.models.attention_processor import AttnAddedKVProcessor
from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.import_utils import is_xformers_available
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
backend_max_memory_allocated, backend_max_memory_allocated,
backend_reset_max_memory_allocated, backend_reset_max_memory_allocated,
...@@ -37,7 +38,6 @@ from diffusers.utils.testing_utils import ( ...@@ -37,7 +38,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import ( from ..pipeline_params import (
TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS,
TEXT_GUIDED_IMAGE_VARIATION_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS,
......
...@@ -22,7 +22,8 @@ import torch ...@@ -22,7 +22,8 @@ import torch
from diffusers import IFImg2ImgSuperResolutionPipeline from diffusers import IFImg2ImgSuperResolutionPipeline
from diffusers.models.attention_processor import AttnAddedKVProcessor from diffusers.models.attention_processor import AttnAddedKVProcessor
from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.import_utils import is_xformers_available
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
backend_max_memory_allocated, backend_max_memory_allocated,
backend_reset_max_memory_allocated, backend_reset_max_memory_allocated,
...@@ -37,7 +38,6 @@ from diffusers.utils.testing_utils import ( ...@@ -37,7 +38,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import ( from ..pipeline_params import (
TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS,
TEXT_GUIDED_IMAGE_VARIATION_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS,
......
...@@ -22,7 +22,8 @@ import torch ...@@ -22,7 +22,8 @@ import torch
from diffusers import IFInpaintingPipeline from diffusers import IFInpaintingPipeline
from diffusers.models.attention_processor import AttnAddedKVProcessor from diffusers.models.attention_processor import AttnAddedKVProcessor
from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.import_utils import is_xformers_available
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
backend_max_memory_allocated, backend_max_memory_allocated,
backend_reset_max_memory_allocated, backend_reset_max_memory_allocated,
...@@ -37,7 +38,6 @@ from diffusers.utils.testing_utils import ( ...@@ -37,7 +38,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import ( from ..pipeline_params import (
TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS, TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS,
TEXT_GUIDED_IMAGE_INPAINTING_PARAMS, TEXT_GUIDED_IMAGE_INPAINTING_PARAMS,
......
...@@ -22,7 +22,8 @@ import torch ...@@ -22,7 +22,8 @@ import torch
from diffusers import IFInpaintingSuperResolutionPipeline from diffusers import IFInpaintingSuperResolutionPipeline
from diffusers.models.attention_processor import AttnAddedKVProcessor from diffusers.models.attention_processor import AttnAddedKVProcessor
from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.import_utils import is_xformers_available
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
backend_max_memory_allocated, backend_max_memory_allocated,
backend_reset_max_memory_allocated, backend_reset_max_memory_allocated,
...@@ -37,7 +38,6 @@ from diffusers.utils.testing_utils import ( ...@@ -37,7 +38,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import ( from ..pipeline_params import (
TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS, TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS,
TEXT_GUIDED_IMAGE_INPAINTING_PARAMS, TEXT_GUIDED_IMAGE_INPAINTING_PARAMS,
......
...@@ -22,7 +22,8 @@ import torch ...@@ -22,7 +22,8 @@ import torch
from diffusers import IFSuperResolutionPipeline from diffusers import IFSuperResolutionPipeline
from diffusers.models.attention_processor import AttnAddedKVProcessor from diffusers.models.attention_processor import AttnAddedKVProcessor
from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.import_utils import is_xformers_available
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
backend_max_memory_allocated, backend_max_memory_allocated,
backend_reset_max_memory_allocated, backend_reset_max_memory_allocated,
...@@ -37,7 +38,6 @@ from diffusers.utils.testing_utils import ( ...@@ -37,7 +38,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS from ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS
from ..test_pipelines_common import PipelineTesterMixin, assert_mean_pixel_difference from ..test_pipelines_common import PipelineTesterMixin, assert_mean_pixel_difference
from . import IFPipelineTesterMixin from . import IFPipelineTesterMixin
......
...@@ -21,7 +21,8 @@ import torch ...@@ -21,7 +21,8 @@ import torch
from diffusers import AutoencoderKL, DDIMScheduler, DiTPipeline, DiTTransformer2DModel, DPMSolverMultistepScheduler from diffusers import AutoencoderKL, DDIMScheduler, DiTPipeline, DiTTransformer2DModel, DPMSolverMultistepScheduler
from diffusers.utils import is_xformers_available from diffusers.utils import is_xformers_available
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
enable_full_determinism, enable_full_determinism,
load_numpy, load_numpy,
...@@ -30,7 +31,6 @@ from diffusers.utils.testing_utils import ( ...@@ -30,7 +31,6 @@ from diffusers.utils.testing_utils import (
require_torch_accelerator, require_torch_accelerator,
torch_device, torch_device,
) )
from ..pipeline_params import ( from ..pipeline_params import (
CLASS_CONDITIONED_IMAGE_GENERATION_BATCH_PARAMS, CLASS_CONDITIONED_IMAGE_GENERATION_BATCH_PARAMS,
CLASS_CONDITIONED_IMAGE_GENERATION_PARAMS, CLASS_CONDITIONED_IMAGE_GENERATION_PARAMS,
......
...@@ -26,7 +26,8 @@ from diffusers import ( ...@@ -26,7 +26,8 @@ from diffusers import (
EasyAnimateTransformer3DModel, EasyAnimateTransformer3DModel,
FlowMatchEulerDiscreteScheduler, FlowMatchEulerDiscreteScheduler,
) )
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
enable_full_determinism, enable_full_determinism,
numpy_cosine_similarity_distance, numpy_cosine_similarity_distance,
...@@ -34,7 +35,6 @@ from diffusers.utils.testing_utils import ( ...@@ -34,7 +35,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineTesterMixin, to_np from ..test_pipelines_common import PipelineTesterMixin, to_np
......
...@@ -13,7 +13,8 @@ from diffusers import ( ...@@ -13,7 +13,8 @@ from diffusers import (
FluxPipeline, FluxPipeline,
FluxTransformer2DModel, FluxTransformer2DModel,
) )
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache, backend_empty_cache,
nightly, nightly,
numpy_cosine_similarity_distance, numpy_cosine_similarity_distance,
...@@ -21,7 +22,6 @@ from diffusers.utils.testing_utils import ( ...@@ -21,7 +22,6 @@ from diffusers.utils.testing_utils import (
slow, slow,
torch_device, torch_device,
) )
from ..test_pipelines_common import ( from ..test_pipelines_common import (
FasterCacheTesterMixin, FasterCacheTesterMixin,
FirstBlockCacheTesterMixin, FirstBlockCacheTesterMixin,
......
...@@ -6,8 +6,8 @@ from PIL import Image ...@@ -6,8 +6,8 @@ from PIL import Image
from transformers import AutoTokenizer, CLIPTextConfig, CLIPTextModel, CLIPTokenizer, T5EncoderModel from transformers import AutoTokenizer, CLIPTextConfig, CLIPTextModel, CLIPTokenizer, T5EncoderModel
from diffusers import AutoencoderKL, FlowMatchEulerDiscreteScheduler, FluxControlPipeline, FluxTransformer2DModel from diffusers import AutoencoderKL, FlowMatchEulerDiscreteScheduler, FluxControlPipeline, FluxTransformer2DModel
from diffusers.utils.testing_utils import torch_device
from ...testing_utils import torch_device
from ..test_pipelines_common import PipelineTesterMixin, check_qkv_fused_layers_exist from ..test_pipelines_common import PipelineTesterMixin, check_qkv_fused_layers_exist
......
...@@ -11,8 +11,8 @@ from diffusers import ( ...@@ -11,8 +11,8 @@ from diffusers import (
FluxControlImg2ImgPipeline, FluxControlImg2ImgPipeline,
FluxTransformer2DModel, FluxTransformer2DModel,
) )
from diffusers.utils.testing_utils import enable_full_determinism, torch_device
from ...testing_utils import enable_full_determinism, torch_device
from ..test_pipelines_common import PipelineTesterMixin from ..test_pipelines_common import PipelineTesterMixin
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment