Unverified Commit 09fab561 authored by Jackmin801's avatar Jackmin801 Committed by GitHub
Browse files

[fix] `network_alpha` when loading unet lora from old format (#4221)

fix: missed network_alpha when loading lora from old format
parent 2e53936c
...@@ -1011,7 +1011,7 @@ class LoraLoaderMixin: ...@@ -1011,7 +1011,7 @@ class LoraLoaderMixin:
elif not all( elif not all(
key.startswith(cls.unet_name) or key.startswith(cls.text_encoder_name) for key in state_dict.keys() key.startswith(cls.unet_name) or key.startswith(cls.text_encoder_name) for key in state_dict.keys()
): ):
unet.load_attn_procs(state_dict) unet.load_attn_procs(state_dict, network_alpha=network_alpha)
warn_message = "You have saved the LoRA weights using the old format. To convert the old LoRA weights to the new format, you can first load them in a dictionary and then create a new dictionary like the following: `new_state_dict = {f'unet'.{module_name}: params for module_name, params in old_state_dict.items()}`." warn_message = "You have saved the LoRA weights using the old format. To convert the old LoRA weights to the new format, you can first load them in a dictionary and then create a new dictionary like the following: `new_state_dict = {f'unet'.{module_name}: params for module_name, params in old_state_dict.items()}`."
warnings.warn(warn_message) warnings.warn(warn_message)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment