Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
09386a36
Commit
09386a36
authored
Jul 21, 2023
by
comfyanonymous
Browse files
Fix issue with lora in some cases when combined with model merging.
parent
58b2364f
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
11 additions
and
10 deletions
+11
-10
comfy/sd.py
comfy/sd.py
+11
-9
execution.py
execution.py
+0
-1
No files found.
comfy/sd.py
View file @
09386a36
...
...
@@ -202,6 +202,14 @@ def model_lora_keys_unet(model, key_map={}):
key_map
[
"lora_unet_{}"
.
format
(
key_lora
)]
=
"diffusion_model.{}"
.
format
(
diffusers_keys
[
k
])
return
key_map
def
set_attr
(
obj
,
attr
,
value
):
attrs
=
attr
.
split
(
"."
)
for
name
in
attrs
[:
-
1
]:
obj
=
getattr
(
obj
,
name
)
prev
=
getattr
(
obj
,
attrs
[
-
1
])
setattr
(
obj
,
attrs
[
-
1
],
torch
.
nn
.
Parameter
(
value
))
del
prev
class
ModelPatcher
:
def
__init__
(
self
,
model
,
load_device
,
offload_device
,
size
=
0
):
self
.
size
=
size
...
...
@@ -340,10 +348,11 @@ class ModelPatcher:
weight
=
model_sd
[
key
]
if
key
not
in
self
.
backup
:
self
.
backup
[
key
]
=
weight
.
to
(
self
.
offload_device
,
copy
=
True
)
self
.
backup
[
key
]
=
weight
.
to
(
self
.
offload_device
)
temp_weight
=
weight
.
to
(
torch
.
float32
,
copy
=
True
)
weight
[:]
=
self
.
calculate_weight
(
self
.
patches
[
key
],
temp_weight
,
key
).
to
(
weight
.
dtype
)
out_weight
=
self
.
calculate_weight
(
self
.
patches
[
key
],
temp_weight
,
key
).
to
(
weight
.
dtype
)
set_attr
(
self
.
model
,
key
,
out_weight
)
del
temp_weight
return
self
.
model
...
...
@@ -439,13 +448,6 @@ class ModelPatcher:
def
unpatch_model
(
self
):
keys
=
list
(
self
.
backup
.
keys
())
def
set_attr
(
obj
,
attr
,
value
):
attrs
=
attr
.
split
(
"."
)
for
name
in
attrs
[:
-
1
]:
obj
=
getattr
(
obj
,
name
)
prev
=
getattr
(
obj
,
attrs
[
-
1
])
setattr
(
obj
,
attrs
[
-
1
],
torch
.
nn
.
Parameter
(
value
))
del
prev
for
k
in
keys
:
set_attr
(
self
.
model
,
k
,
self
.
backup
[
k
])
...
...
execution.py
View file @
09386a36
...
...
@@ -6,7 +6,6 @@ import threading
import
heapq
import
traceback
import
gc
import
time
import
torch
import
nodes
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment