Unverified Commit a0198676 authored by Bagheera's avatar Bagheera Committed by GitHub
Browse files

Remove logger.info statement from Unet2DCondition code to ensure torch compile...


Remove logger.info statement from Unet2DCondition code to ensure torch compile reliably succeeds (#4982)

* Remove logger.info statement from Unet2DCondition code to ensure torch compile reliably succeeds

* Convert logging statement to a comment for future archaeologists

* Update src/diffusers/models/unet_2d_condition.py
Co-authored-by: default avatarPatrick von Platen <patrick.v.platen@gmail.com>

---------
Co-authored-by: default avatarbghira <bghira@users.github.com>
Co-authored-by: default avatarPatrick von Platen <patrick.v.platen@gmail.com>
parent abc47dec
...@@ -784,7 +784,7 @@ class UNet2DConditionModel(ModelMixin, ConfigMixin, UNet2DConditionLoadersMixin) ...@@ -784,7 +784,7 @@ class UNet2DConditionModel(ModelMixin, ConfigMixin, UNet2DConditionLoadersMixin)
upsample_size = None upsample_size = None
if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]): if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]):
logger.info("Forward upsample size to force interpolation output size.") # Forward upsample size to force interpolation output size.
forward_upsample_size = True forward_upsample_size = True
# ensure attention_mask is a bias, and give it a singleton query_tokens dimension # ensure attention_mask is a bias, and give it a singleton query_tokens dimension
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment