Unverified Commit 09675934 authored by Jonathan Yin's avatar Jonathan Yin Committed by GitHub
Browse files

Fix Nonetype attribute error when loading multiple Flux loras (#10182)

Fix Nonetype attribute error
parent 43534a8d
...@@ -2313,7 +2313,7 @@ class FluxLoraLoaderMixin(LoraBaseMixin): ...@@ -2313,7 +2313,7 @@ class FluxLoraLoaderMixin(LoraBaseMixin):
for name, module in transformer.named_modules(): for name, module in transformer.named_modules():
if isinstance(module, torch.nn.Linear): if isinstance(module, torch.nn.Linear):
module_weight = module.weight.data module_weight = module.weight.data
module_bias = module.bias.data if hasattr(module, "bias") else None module_bias = module.bias.data if module.bias is not None else None
bias = module_bias is not None bias = module_bias is not None
lora_A_weight_name = f"{name}.lora_A.weight" lora_A_weight_name = f"{name}.lora_A.weight"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment