Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
104fcea0
Commit
104fcea0
authored
Jun 05, 2024
by
comfyanonymous
Browse files
Add function to get the list of currently loaded models.
parent
b1fd26fe
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
13 additions
and
0 deletions
+13
-0
comfy/model_management.py
comfy/model_management.py
+13
-0
No files found.
comfy/model_management.py
View file @
104fcea0
...
@@ -276,6 +276,7 @@ class LoadedModel:
...
@@ -276,6 +276,7 @@ class LoadedModel:
self
.
device
=
model
.
load_device
self
.
device
=
model
.
load_device
self
.
weights_loaded
=
False
self
.
weights_loaded
=
False
self
.
real_model
=
None
self
.
real_model
=
None
self
.
currently_used
=
True
def
model_memory
(
self
):
def
model_memory
(
self
):
return
self
.
model
.
model_size
()
return
self
.
model
.
model_size
()
...
@@ -365,6 +366,7 @@ def free_memory(memory_required, device, keep_loaded=[]):
...
@@ -365,6 +366,7 @@ def free_memory(memory_required, device, keep_loaded=[]):
if
shift_model
.
device
==
device
:
if
shift_model
.
device
==
device
:
if
shift_model
not
in
keep_loaded
:
if
shift_model
not
in
keep_loaded
:
can_unload
.
append
((
sys
.
getrefcount
(
shift_model
.
model
),
shift_model
.
model_memory
(),
i
))
can_unload
.
append
((
sys
.
getrefcount
(
shift_model
.
model
),
shift_model
.
model_memory
(),
i
))
shift_model
.
currently_used
=
False
for
x
in
sorted
(
can_unload
):
for
x
in
sorted
(
can_unload
):
i
=
x
[
-
1
]
i
=
x
[
-
1
]
...
@@ -410,6 +412,7 @@ def load_models_gpu(models, memory_required=0, force_patch_weights=False):
...
@@ -410,6 +412,7 @@ def load_models_gpu(models, memory_required=0, force_patch_weights=False):
current_loaded_models
.
pop
(
loaded_model_index
).
model_unload
(
unpatch_weights
=
True
)
current_loaded_models
.
pop
(
loaded_model_index
).
model_unload
(
unpatch_weights
=
True
)
loaded
=
None
loaded
=
None
else
:
else
:
loaded
.
currently_used
=
True
models_already_loaded
.
append
(
loaded
)
models_already_loaded
.
append
(
loaded
)
if
loaded
is
None
:
if
loaded
is
None
:
...
@@ -466,6 +469,16 @@ def load_models_gpu(models, memory_required=0, force_patch_weights=False):
...
@@ -466,6 +469,16 @@ def load_models_gpu(models, memory_required=0, force_patch_weights=False):
def
load_model_gpu
(
model
):
def
load_model_gpu
(
model
):
return
load_models_gpu
([
model
])
return
load_models_gpu
([
model
])
def
loaded_models
(
only_currently_used
=
False
):
output
=
[]
for
m
in
current_loaded_models
:
if
only_currently_used
:
if
not
m
.
currently_used
:
continue
output
.
append
(
m
.
model
)
return
output
def
cleanup_models
(
keep_clone_weights_loaded
=
False
):
def
cleanup_models
(
keep_clone_weights_loaded
=
False
):
to_delete
=
[]
to_delete
=
[]
for
i
in
range
(
len
(
current_loaded_models
)):
for
i
in
range
(
len
(
current_loaded_models
)):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment