Unverified Commit 2e86a3f0 authored by Sayak Paul's avatar Sayak Paul Committed by GitHub
Browse files

[Tests] skip nan lora tests on PyTorch 2.5.1 CPU. (#9975)

* skip nan lora tests on PyTorch 2.5.1 CPU.

* cog

* use xfail

* correct xfail

* add condition

* tests
parent cd6ca9df
......@@ -16,6 +16,7 @@ import sys
import unittest
import numpy as np
import pytest
import torch
from transformers import AutoTokenizer, T5EncoderModel
......@@ -29,6 +30,7 @@ from diffusers import (
from diffusers.utils.testing_utils import (
floats_tensor,
is_peft_available,
is_torch_version,
require_peft_backend,
skip_mps,
torch_device,
......@@ -126,6 +128,11 @@ class CogVideoXLoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
return noise, input_ids, pipeline_inputs
@skip_mps
@pytest.mark.xfail(
condtion=torch.device(torch_device).type == "cpu" and is_torch_version(">=", "2.5"),
reason="Test currently fails on CPU and PyTorch 2.5.1 but not on PyTorch 2.4.1.",
strict=True,
)
def test_lora_fuse_nan(self):
for scheduler_cls in self.scheduler_classes:
components, text_lora_config, denoiser_lora_config = self.get_dummy_components(scheduler_cls)
......
......@@ -16,6 +16,7 @@ import sys
import unittest
import numpy as np
import pytest
import torch
from transformers import AutoTokenizer, T5EncoderModel
......@@ -23,6 +24,7 @@ from diffusers import AutoencoderKLMochi, FlowMatchEulerDiscreteScheduler, Mochi
from diffusers.utils.testing_utils import (
floats_tensor,
is_peft_available,
is_torch_version,
require_peft_backend,
skip_mps,
torch_device,
......@@ -105,6 +107,11 @@ class MochiLoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
return noise, input_ids, pipeline_inputs
@pytest.mark.xfail(
condtion=torch.device(torch_device).type == "cpu" and is_torch_version(">=", "2.5"),
reason="Test currently fails on CPU and PyTorch 2.5.1 but not on PyTorch 2.4.1.",
strict=True,
)
def test_lora_fuse_nan(self):
for scheduler_cls in self.scheduler_classes:
components, text_lora_config, denoiser_lora_config = self.get_dummy_components(scheduler_cls)
......
......@@ -19,6 +19,7 @@ import unittest
from itertools import product
import numpy as np
import pytest
import torch
from diffusers import (
......@@ -32,6 +33,7 @@ from diffusers.utils.import_utils import is_peft_available
from diffusers.utils.testing_utils import (
CaptureLogger,
floats_tensor,
is_torch_version,
require_peft_backend,
require_peft_version_greater,
require_transformers_version_greater,
......@@ -1510,6 +1512,11 @@ class PeftLoraLoaderMixinTests:
)
@skip_mps
@pytest.mark.xfail(
condtion=torch.device(torch_device).type == "cpu" and is_torch_version(">=", "2.5"),
reason="Test currently fails on CPU and PyTorch 2.5.1 but not on PyTorch 2.4.1.",
strict=True,
)
def test_lora_fuse_nan(self):
for scheduler_cls in self.scheduler_classes:
components, text_lora_config, denoiser_lora_config = self.get_dummy_components(scheduler_cls)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment