Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
c6de09b0
"...git@developer.sourcefind.cn:chenpangpang/diffusers.git" did not exist on "c806f2fad6040d50b3d291076cab0195863ba328"
Commit
c6de09b0
authored
Mar 24, 2024
by
comfyanonymous
Browse files
Optimize memory unload strategy for more optimized performance.
parent
6a32c06f
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
16 additions
and
9 deletions
+16
-9
comfy/model_management.py
comfy/model_management.py
+16
-9
No files found.
comfy/model_management.py
View file @
c6de09b0
...
@@ -349,20 +349,27 @@ def unload_model_clones(model, unload_weights_only=True, force_unload=True):
...
@@ -349,20 +349,27 @@ def unload_model_clones(model, unload_weights_only=True, force_unload=True):
return
unload_weight
return
unload_weight
def
free_memory
(
memory_required
,
device
,
keep_loaded
=
[]):
def
free_memory
(
memory_required
,
device
,
keep_loaded
=
[]):
unloaded_model
=
False
unloaded_model
=
[]
can_unload
=
[]
for
i
in
range
(
len
(
current_loaded_models
)
-
1
,
-
1
,
-
1
):
for
i
in
range
(
len
(
current_loaded_models
)
-
1
,
-
1
,
-
1
):
if
not
DISABLE_SMART_MEMORY
:
if
get_free_memory
(
device
)
>
memory_required
:
break
shift_model
=
current_loaded_models
[
i
]
shift_model
=
current_loaded_models
[
i
]
if
shift_model
.
device
==
device
:
if
shift_model
.
device
==
device
:
if
shift_model
not
in
keep_loaded
:
if
shift_model
not
in
keep_loaded
:
m
=
current_loaded_models
.
pop
(
i
)
can_unload
.
append
((
sys
.
getrefcount
(
shift_model
.
model
),
shift_model
.
model_memory
(),
i
))
m
.
model_unload
()
del
m
for
x
in
sorted
(
can_unload
):
unloaded_model
=
True
i
=
x
[
-
1
]
if
not
DISABLE_SMART_MEMORY
:
if
get_free_memory
(
device
)
>
memory_required
:
break
current_loaded_models
[
i
].
model_unload
()
unloaded_model
.
append
(
i
)
for
i
in
sorted
(
unloaded_model
,
reverse
=
True
):
current_loaded_models
.
pop
(
i
)
if
unloaded_model
:
if
len
(
unloaded_model
)
>
0
:
soft_empty_cache
()
soft_empty_cache
()
else
:
else
:
if
vram_state
!=
VRAMState
.
HIGH_VRAM
:
if
vram_state
!=
VRAMState
.
HIGH_VRAM
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment