Unverified Commit 0b8e2928 authored by UmerHA's avatar UmerHA Committed by GitHub
Browse files

Skip `test_lora_fuse_nan` on mps (#7481)



Skipping test_lora_fuse_nan on mps
Co-authored-by: default avatarSayak Paul <spsayakpaul@gmail.com>
parent ab38ddf6
...@@ -31,6 +31,7 @@ from diffusers.utils.testing_utils import ( ...@@ -31,6 +31,7 @@ from diffusers.utils.testing_utils import (
floats_tensor, floats_tensor,
require_peft_backend, require_peft_backend,
require_peft_version_greater, require_peft_version_greater,
skip_mps,
torch_device, torch_device,
) )
...@@ -923,6 +924,7 @@ class PeftLoraLoaderMixinTests: ...@@ -923,6 +924,7 @@ class PeftLoraLoaderMixinTests:
"output with no lora and output with lora disabled should give same results", "output with no lora and output with lora disabled should give same results",
) )
@skip_mps
def test_lora_fuse_nan(self): def test_lora_fuse_nan(self):
for scheduler_cls in [DDIMScheduler, LCMScheduler]: for scheduler_cls in [DDIMScheduler, LCMScheduler]:
components, text_lora_config, unet_lora_config = self.get_dummy_components(scheduler_cls) components, text_lora_config, unet_lora_config = self.get_dummy_components(scheduler_cls)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment