Unverified Commit 7aa6af11 authored by Dhruv Nair's avatar Dhruv Nair Committed by GitHub
Browse files

[Refactor] Move testing utils out of src (#12238)

* update

* update

* update

* update

* update

* merge main

* Revert "merge main"

This reverts commit 65efbcead58644b31596ed2d714f7cee0e0238d3.
parent 87b800e1
......@@ -10,14 +10,14 @@ from transformers import AutoTokenizer, CLIPTextConfig, CLIPTextModel, CLIPToken
import diffusers
from diffusers import AutoencoderKL, FlowMatchEulerDiscreteScheduler, FluxTransformer2DModel, VisualClozePipeline
from diffusers.utils import logging
from diffusers.utils.testing_utils import (
from ...testing_utils import (
CaptureLogger,
enable_full_determinism,
floats_tensor,
require_accelerator,
torch_device,
)
from ..test_pipelines_common import PipelineTesterMixin, to_np
......
......@@ -15,14 +15,14 @@ from diffusers import (
VisualClozeGenerationPipeline,
)
from diffusers.utils import logging
from diffusers.utils.testing_utils import (
from ...testing_utils import (
CaptureLogger,
enable_full_determinism,
floats_tensor,
require_accelerator,
torch_device,
)
from ..test_pipelines_common import PipelineTesterMixin, to_np
......
......@@ -21,14 +21,14 @@ import torch
from transformers import AutoTokenizer, T5EncoderModel
from diffusers import AutoencoderKLWan, FlowMatchEulerDiscreteScheduler, WanPipeline, WanTransformer3DModel
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache,
enable_full_determinism,
require_torch_accelerator,
slow,
torch_device,
)
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineTesterMixin
......
......@@ -20,11 +20,11 @@ import torch
from transformers import AutoTokenizer, T5EncoderModel
from diffusers import AutoencoderKLWan, UniPCMultistepScheduler, WanPipeline, WanTransformer3DModel
from diffusers.utils.testing_utils import (
from ...testing_utils import (
enable_full_determinism,
torch_device,
)
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineTesterMixin
......
......@@ -21,11 +21,11 @@ from PIL import Image
from transformers import AutoTokenizer, T5EncoderModel
from diffusers import AutoencoderKLWan, UniPCMultistepScheduler, WanImageToVideoPipeline, WanTransformer3DModel
from diffusers.utils.testing_utils import (
from ...testing_utils import (
enable_full_determinism,
torch_device,
)
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineTesterMixin
......
......@@ -27,8 +27,8 @@ from transformers import (
)
from diffusers import AutoencoderKLWan, FlowMatchEulerDiscreteScheduler, WanImageToVideoPipeline, WanTransformer3DModel
from diffusers.utils.testing_utils import enable_full_determinism, torch_device
from ...testing_utils import enable_full_determinism, torch_device
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineTesterMixin
......
......@@ -20,8 +20,8 @@ from PIL import Image
from transformers import AutoTokenizer, T5EncoderModel
from diffusers import AutoencoderKLWan, FlowMatchEulerDiscreteScheduler, WanVACEPipeline, WanVACETransformer3DModel
from diffusers.utils.testing_utils import enable_full_determinism
from ...testing_utils import enable_full_determinism
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import PipelineTesterMixin
......
......@@ -19,10 +19,10 @@ from PIL import Image
from transformers import AutoTokenizer, T5EncoderModel
from diffusers import AutoencoderKLWan, UniPCMultistepScheduler, WanTransformer3DModel, WanVideoToVideoPipeline
from diffusers.utils.testing_utils import (
from ...testing_utils import (
enable_full_determinism,
)
from ..pipeline_params import TEXT_TO_IMAGE_IMAGE_PARAMS, TEXT_TO_IMAGE_PARAMS
from ..test_pipelines_common import (
PipelineTesterMixin,
......
......@@ -32,7 +32,8 @@ from diffusers import (
)
from diffusers.quantizers import PipelineQuantizationConfig
from diffusers.utils import is_accelerate_version, logging
from diffusers.utils.testing_utils import (
from ...testing_utils import (
CaptureLogger,
backend_empty_cache,
is_bitsandbytes_available,
......@@ -50,7 +51,6 @@ from diffusers.utils.testing_utils import (
slow,
torch_device,
)
from ..test_torch_compile_utils import QuantCompileTests
......
......@@ -32,7 +32,8 @@ from diffusers import (
)
from diffusers.quantizers import PipelineQuantizationConfig
from diffusers.utils import is_accelerate_version
from diffusers.utils.testing_utils import (
from ...testing_utils import (
CaptureLogger,
backend_empty_cache,
is_bitsandbytes_available,
......@@ -51,7 +52,6 @@ from diffusers.utils.testing_utils import (
slow,
torch_device,
)
from ..test_torch_compile_utils import QuantCompileTests
......
......@@ -20,7 +20,8 @@ from diffusers import (
WanVACETransformer3DModel,
)
from diffusers.utils import load_image
from diffusers.utils.testing_utils import (
from ...testing_utils import (
Expectations,
backend_empty_cache,
backend_max_memory_allocated,
......@@ -38,7 +39,6 @@ from diffusers.utils.testing_utils import (
require_torch_version_greater,
torch_device,
)
from ..test_torch_compile_utils import QuantCompileTests
......
......@@ -5,7 +5,8 @@ import unittest
from diffusers import FluxPipeline, FluxTransformer2DModel, QuantoConfig
from diffusers.models.attention_processor import Attention
from diffusers.utils import is_optimum_quanto_available, is_torch_available
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache,
backend_reset_peak_memory_stats,
enable_full_determinism,
......
......@@ -22,7 +22,8 @@ from parameterized import parameterized
from diffusers import BitsAndBytesConfig, DiffusionPipeline, QuantoConfig
from diffusers.quantizers import PipelineQuantizationConfig
from diffusers.utils import logging
from diffusers.utils.testing_utils import (
from ..testing_utils import (
CaptureLogger,
is_transformers_available,
require_accelerate,
......
......@@ -18,7 +18,8 @@ import inspect
import torch
from diffusers import DiffusionPipeline
from diffusers.utils.testing_utils import backend_empty_cache, require_torch_accelerator, slow, torch_device
from ..testing_utils import backend_empty_cache, require_torch_accelerator, slow, torch_device
@require_torch_accelerator
......
......@@ -31,7 +31,8 @@ from diffusers import (
)
from diffusers.models.attention_processor import Attention
from diffusers.quantizers import PipelineQuantizationConfig
from diffusers.utils.testing_utils import (
from ...testing_utils import (
backend_empty_cache,
backend_synchronize,
enable_full_determinism,
......@@ -45,7 +46,6 @@ from diffusers.utils.testing_utils import (
slow,
torch_device,
)
from ..test_torch_compile_utils import QuantCompileTests
......
from diffusers.utils import is_torch_available
from diffusers.utils.testing_utils import (
from ..testing_utils import (
backend_empty_cache,
backend_max_memory_allocated,
backend_reset_peak_memory_stats,
......
......@@ -30,13 +30,14 @@ from diffusers.utils.constants import (
from diffusers.utils.remote_utils import (
remote_decode,
)
from diffusers.utils.testing_utils import (
from diffusers.video_processor import VideoProcessor
from ..testing_utils import (
enable_full_determinism,
slow,
torch_all_close,
torch_device,
)
from diffusers.video_processor import VideoProcessor
enable_full_determinism()
......
......@@ -31,7 +31,8 @@ from diffusers.utils.remote_utils import (
remote_decode,
remote_encode,
)
from diffusers.utils.testing_utils import (
from ..testing_utils import (
enable_full_determinism,
slow,
)
......
import torch
from diffusers import DPMSolverSDEScheduler
from diffusers.utils.testing_utils import require_torchsde, torch_device
from ..testing_utils import require_torchsde, torch_device
from .test_schedulers import SchedulerCommonTest
......
import torch
from diffusers import EulerDiscreteScheduler
from diffusers.utils.testing_utils import torch_device
from ..testing_utils import torch_device
from .test_schedulers import SchedulerCommonTest
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment