Unverified Commit 5fb3a985 authored by fancy45daddy's avatar fancy45daddy Committed by GitHub
Browse files

Update pipeline_controlnet.py add support for pytorch_xla (#10222)



* Update pipeline_controlnet.py

* make style

---------
Co-authored-by: default avatarhlky <hlky@hlky.ac>
parent aace1f41
...@@ -31,6 +31,7 @@ from ...schedulers import KarrasDiffusionSchedulers ...@@ -31,6 +31,7 @@ from ...schedulers import KarrasDiffusionSchedulers
from ...utils import ( from ...utils import (
USE_PEFT_BACKEND, USE_PEFT_BACKEND,
deprecate, deprecate,
is_torch_xla_available,
logging, logging,
replace_example_docstring, replace_example_docstring,
scale_lora_layers, scale_lora_layers,
...@@ -42,6 +43,13 @@ from ..stable_diffusion.pipeline_output import StableDiffusionPipelineOutput ...@@ -42,6 +43,13 @@ from ..stable_diffusion.pipeline_output import StableDiffusionPipelineOutput
from ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker from ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker
if is_torch_xla_available():
import torch_xla.core.xla_model as xm
XLA_AVAILABLE = True
else:
XLA_AVAILABLE = False
logger = logging.get_logger(__name__) # pylint: disable=invalid-name logger = logging.get_logger(__name__) # pylint: disable=invalid-name
...@@ -1323,6 +1331,8 @@ class StableDiffusionControlNetPipeline( ...@@ -1323,6 +1331,8 @@ class StableDiffusionControlNetPipeline(
step_idx = i // getattr(self.scheduler, "order", 1) step_idx = i // getattr(self.scheduler, "order", 1)
callback(step_idx, t, latents) callback(step_idx, t, latents)
if XLA_AVAILABLE:
xm.mark_step()
# If we do sequential model offloading, let's offload unet and controlnet # If we do sequential model offloading, let's offload unet and controlnet
# manually for max memory savings # manually for max memory savings
if hasattr(self, "final_offload_hook") and self.final_offload_hook is not None: if hasattr(self, "final_offload_hook") and self.final_offload_hook is not None:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment