Commit bf9ccffb authored by comfyanonymous's avatar comfyanonymous
Browse files

Small fix for SD2.x loras.

parent 678105fa
......@@ -153,8 +153,8 @@ def model_lora_keys(model, key_map={}):
key_map[lora_key] = (k, 0)
k = "model.transformer.resblocks.{}.attn.in_proj_weight".format(b)
if k in sdk:
key_map[text_model_lora_key.format(b, "self_attn_k_proj")] = (k, 0)
key_map[text_model_lora_key.format(b, "self_attn_q_proj")] = (k, 1)
key_map[text_model_lora_key.format(b, "self_attn_q_proj")] = (k, 0)
key_map[text_model_lora_key.format(b, "self_attn_k_proj")] = (k, 1)
key_map[text_model_lora_key.format(b, "self_attn_v_proj")] = (k, 2)
return key_map
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment