Commit 02afa546 authored by lijian6's avatar lijian6
Browse files

Disable fa for vae because 512 dim is not supported.


Signed-off-by: lijian6's avatarlijian <lijian6@sugon.com>
parent 36cdd148
...@@ -1042,6 +1042,7 @@ class StableDiffusionPipeline( ...@@ -1042,6 +1042,7 @@ class StableDiffusionPipeline(
step_idx = i // getattr(self.scheduler, "order", 1) step_idx = i // getattr(self.scheduler, "order", 1)
callback(step_idx, t, latents) callback(step_idx, t, latents)
self.disable_xformers_memory_efficient_attention()
if not output_type == "latent": if not output_type == "latent":
image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False, generator=generator)[ image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False, generator=generator)[
0 0
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment