Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
2f360ae8
Commit
2f360ae8
authored
Jun 22, 2024
by
comfyanonymous
Browse files
Support OneTrainer SD3 lora format.
parent
4ef1479d
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
13 additions
and
1 deletion
+13
-1
comfy/lora.py
comfy/lora.py
+13
-1
No files found.
comfy/lora.py
View file @
2f360ae8
...
@@ -218,12 +218,21 @@ def model_lora_keys_clip(model, key_map={}):
...
@@ -218,12 +218,21 @@ def model_lora_keys_clip(model, key_map={}):
lora_key
=
"lora_prior_te_text_model_encoder_layers_{}_{}"
.
format
(
b
,
LORA_CLIP_MAP
[
c
])
#cascade lora: TODO put lora key prefix in the model config
lora_key
=
"lora_prior_te_text_model_encoder_layers_{}_{}"
.
format
(
b
,
LORA_CLIP_MAP
[
c
])
#cascade lora: TODO put lora key prefix in the model config
key_map
[
lora_key
]
=
k
key_map
[
lora_key
]
=
k
for
k
in
sdk
:
#OneTrainer SD3 lora
if
k
.
startswith
(
"t5xxl.transformer."
)
and
k
.
endswith
(
".weight"
):
l_key
=
k
[
len
(
"t5xxl.transformer."
):
-
len
(
".weight"
)]
lora_key
=
"lora_te3_{}"
.
format
(
l_key
.
replace
(
"."
,
"_"
))
key_map
[
lora_key
]
=
k
k
=
"clip_g.transformer.text_projection.weight"
k
=
"clip_g.transformer.text_projection.weight"
if
k
in
sdk
:
if
k
in
sdk
:
key_map
[
"lora_prior_te_text_projection"
]
=
k
#cascade lora?
key_map
[
"lora_prior_te_text_projection"
]
=
k
#cascade lora?
# key_map["text_encoder.text_projection"] = k #TODO: check if other lora have the text_projection too
# key_map["text_encoder.text_projection"] = k #TODO: check if other lora have the text_projection too
# key_map["lora_te_text_projection"] = k
key_map
[
"lora_te2_text_projection"
]
=
k
#OneTrainer SD3 lora
k
=
"clip_l.transformer.text_projection.weight"
if
k
in
sdk
:
key_map
[
"lora_te1_text_projection"
]
=
k
#OneTrainer SD3 lora, not necessary but omits warning
return
key_map
return
key_map
...
@@ -262,4 +271,7 @@ def model_lora_keys_unet(model, key_map={}):
...
@@ -262,4 +271,7 @@ def model_lora_keys_unet(model, key_map={}):
key_lora
=
"base_model.model.{}"
.
format
(
k
[:
-
len
(
".weight"
)])
#format for flash-sd3 lora and others?
key_lora
=
"base_model.model.{}"
.
format
(
k
[:
-
len
(
".weight"
)])
#format for flash-sd3 lora and others?
key_map
[
key_lora
]
=
to
key_map
[
key_lora
]
=
to
key_lora
=
"lora_transformer_{}"
.
format
(
k
[:
-
len
(
".weight"
)].
replace
(
"."
,
"_"
))
#OneTrainer lora
key_map
[
key_lora
]
=
to
return
key_map
return
key_map
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment