Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
5a9ddf94
Commit
5a9ddf94
authored
Jun 29, 2023
by
comfyanonymous
Browse files
LoraLoader node now caches the lora file between executions.
parent
6e9f2840
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
18 additions
and
5 deletions
+18
-5
comfy/sd.py
comfy/sd.py
+3
-4
nodes.py
nodes.py
+15
-1
No files found.
comfy/sd.py
View file @
5a9ddf94
...
@@ -89,8 +89,7 @@ LORA_UNET_MAP_RESNET = {
...
@@ -89,8 +89,7 @@ LORA_UNET_MAP_RESNET = {
"skip_connection"
:
"resnets_{}_conv_shortcut"
"skip_connection"
:
"resnets_{}_conv_shortcut"
}
}
def
load_lora
(
path
,
to_load
):
def
load_lora
(
lora
,
to_load
):
lora
=
utils
.
load_torch_file
(
path
,
safe_load
=
True
)
patch_dict
=
{}
patch_dict
=
{}
loaded_keys
=
set
()
loaded_keys
=
set
()
for
x
in
to_load
:
for
x
in
to_load
:
...
@@ -501,10 +500,10 @@ class ModelPatcher:
...
@@ -501,10 +500,10 @@ class ModelPatcher:
self
.
backup
=
{}
self
.
backup
=
{}
def
load_lora_for_models
(
model
,
clip
,
lora
_path
,
strength_model
,
strength_clip
):
def
load_lora_for_models
(
model
,
clip
,
lora
,
strength_model
,
strength_clip
):
key_map
=
model_lora_keys
(
model
.
model
)
key_map
=
model_lora_keys
(
model
.
model
)
key_map
=
model_lora_keys
(
clip
.
cond_stage_model
,
key_map
)
key_map
=
model_lora_keys
(
clip
.
cond_stage_model
,
key_map
)
loaded
=
load_lora
(
lora
_path
,
key_map
)
loaded
=
load_lora
(
lora
,
key_map
)
new_modelpatcher
=
model
.
clone
()
new_modelpatcher
=
model
.
clone
()
k
=
new_modelpatcher
.
add_patches
(
loaded
,
strength_model
)
k
=
new_modelpatcher
.
add_patches
(
loaded
,
strength_model
)
new_clip
=
clip
.
clone
()
new_clip
=
clip
.
clone
()
...
...
nodes.py
View file @
5a9ddf94
...
@@ -434,6 +434,9 @@ class CLIPSetLastLayer:
...
@@ -434,6 +434,9 @@ class CLIPSetLastLayer:
return
(
clip
,)
return
(
clip
,)
class
LoraLoader
:
class
LoraLoader
:
def
__init__
(
self
):
self
.
loaded_lora
=
None
@
classmethod
@
classmethod
def
INPUT_TYPES
(
s
):
def
INPUT_TYPES
(
s
):
return
{
"required"
:
{
"model"
:
(
"MODEL"
,),
return
{
"required"
:
{
"model"
:
(
"MODEL"
,),
...
@@ -452,7 +455,18 @@ class LoraLoader:
...
@@ -452,7 +455,18 @@ class LoraLoader:
return
(
model
,
clip
)
return
(
model
,
clip
)
lora_path
=
folder_paths
.
get_full_path
(
"loras"
,
lora_name
)
lora_path
=
folder_paths
.
get_full_path
(
"loras"
,
lora_name
)
model_lora
,
clip_lora
=
comfy
.
sd
.
load_lora_for_models
(
model
,
clip
,
lora_path
,
strength_model
,
strength_clip
)
lora
=
None
if
self
.
loaded_lora
is
not
None
:
if
self
.
loaded_lora
[
0
]
==
lora_path
:
lora
=
self
.
loaded_lora
[
1
]
else
:
del
self
.
loaded_lora
if
lora
is
None
:
lora
=
comfy
.
utils
.
load_torch_file
(
lora_path
,
safe_load
=
True
)
self
.
loaded_lora
=
(
lora_path
,
lora
)
model_lora
,
clip_lora
=
comfy
.
sd
.
load_lora_for_models
(
model
,
clip
,
lora
,
strength_model
,
strength_clip
)
return
(
model_lora
,
clip_lora
)
return
(
model_lora
,
clip_lora
)
class
VAELoader
:
class
VAELoader
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment