Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
199d7336
Commit
199d7336
authored
Aug 21, 2023
by
comfyanonymous
Browse files
Fix ControlLora on lowvram.
parent
d08e53de
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
18 additions
and
5 deletions
+18
-5
comfy/sd.py
comfy/sd.py
+18
-5
No files found.
comfy/sd.py
View file @
199d7336
...
...
@@ -243,6 +243,13 @@ def set_attr(obj, attr, value):
setattr
(
obj
,
attrs
[
-
1
],
torch
.
nn
.
Parameter
(
value
))
del
prev
def
get_attr
(
obj
,
attr
):
attrs
=
attr
.
split
(
"."
)
for
name
in
attrs
:
obj
=
getattr
(
obj
,
name
)
return
obj
class
ModelPatcher
:
def
__init__
(
self
,
model
,
load_device
,
offload_device
,
size
=
0
,
current_device
=
None
):
self
.
size
=
size
...
...
@@ -856,9 +863,9 @@ class ControlLoraOps:
def
forward
(
self
,
input
):
if
self
.
up
is
not
None
:
return
torch
.
nn
.
functional
.
linear
(
input
,
self
.
weight
+
(
torch
.
mm
(
self
.
up
.
flatten
(
start_dim
=
1
),
self
.
down
.
flatten
(
start_dim
=
1
))).
reshape
(
self
.
weight
.
shape
).
type
(
self
.
weigh
t
.
dtype
),
self
.
bias
)
return
torch
.
nn
.
functional
.
linear
(
input
,
self
.
weight
.
to
(
input
.
device
)
+
(
torch
.
mm
(
self
.
up
.
flatten
(
start_dim
=
1
),
self
.
down
.
flatten
(
start_dim
=
1
))).
reshape
(
self
.
weight
.
shape
).
type
(
inpu
t
.
dtype
),
self
.
bias
)
else
:
return
torch
.
nn
.
functional
.
linear
(
input
,
self
.
weight
,
self
.
bias
)
return
torch
.
nn
.
functional
.
linear
(
input
,
self
.
weight
.
to
(
input
.
device
)
,
self
.
bias
)
class
Conv2d
(
torch
.
nn
.
Module
):
def
__init__
(
...
...
@@ -895,9 +902,9 @@ class ControlLoraOps:
def
forward
(
self
,
input
):
if
self
.
up
is
not
None
:
return
torch
.
nn
.
functional
.
conv2d
(
input
,
self
.
weight
+
(
torch
.
mm
(
self
.
up
.
flatten
(
start_dim
=
1
),
self
.
down
.
flatten
(
start_dim
=
1
))).
reshape
(
self
.
weight
.
shape
).
type
(
self
.
weigh
t
.
dtype
),
self
.
bias
,
self
.
stride
,
self
.
padding
,
self
.
dilation
,
self
.
groups
)
return
torch
.
nn
.
functional
.
conv2d
(
input
,
self
.
weight
.
to
(
input
.
device
)
+
(
torch
.
mm
(
self
.
up
.
flatten
(
start_dim
=
1
),
self
.
down
.
flatten
(
start_dim
=
1
))).
reshape
(
self
.
weight
.
shape
).
type
(
inpu
t
.
dtype
),
self
.
bias
,
self
.
stride
,
self
.
padding
,
self
.
dilation
,
self
.
groups
)
else
:
return
torch
.
nn
.
functional
.
conv2d
(
input
,
self
.
weight
,
self
.
bias
,
self
.
stride
,
self
.
padding
,
self
.
dilation
,
self
.
groups
)
return
torch
.
nn
.
functional
.
conv2d
(
input
,
self
.
weight
.
to
(
input
.
device
)
,
self
.
bias
,
self
.
stride
,
self
.
padding
,
self
.
dilation
,
self
.
groups
)
def
conv_nd
(
self
,
dims
,
*
args
,
**
kwargs
):
if
dims
==
2
:
...
...
@@ -927,8 +934,14 @@ class ControlLora(ControlNet):
cm
=
self
.
control_model
.
state_dict
()
for
k
in
sd
:
weight
=
sd
[
k
]
if
weight
.
device
==
torch
.
device
(
"meta"
):
#lowvram NOTE: this depends on the inner working of the accelerate library so it might break.
key_split
=
k
.
split
(
'.'
)
# I have no idea why they don't just leave the weight there instead of using the meta device.
op
=
get_attr
(
diffusion_model
,
'.'
.
join
(
key_split
[:
-
1
]))
weight
=
op
.
_hf_hook
.
weights_map
[
key_split
[
-
1
]]
try
:
set_attr
(
self
.
control_model
,
k
,
sd
[
k
]
)
set_attr
(
self
.
control_model
,
k
,
weight
)
except
:
pass
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment