Commit 6a32c06f authored by comfyanonymous's avatar comfyanonymous
Browse files

Move cleanup_models to improve performance.

parent a28a9dc8
...@@ -368,7 +368,6 @@ class PromptExecutor: ...@@ -368,7 +368,6 @@ class PromptExecutor:
d = self.outputs_ui.pop(x) d = self.outputs_ui.pop(x)
del d del d
comfy.model_management.cleanup_models()
self.add_message("execution_cached", self.add_message("execution_cached",
{ "nodes": list(current_outputs) , "prompt_id": prompt_id}, { "nodes": list(current_outputs) , "prompt_id": prompt_id},
broadcast=False) broadcast=False)
......
...@@ -139,6 +139,7 @@ def prompt_worker(q, server): ...@@ -139,6 +139,7 @@ def prompt_worker(q, server):
if need_gc: if need_gc:
current_time = time.perf_counter() current_time = time.perf_counter()
if (current_time - last_gc_collect) > gc_collect_interval: if (current_time - last_gc_collect) > gc_collect_interval:
comfy.model_management.cleanup_models()
gc.collect() gc.collect()
comfy.model_management.soft_empty_cache() comfy.model_management.soft_empty_cache()
last_gc_collect = current_time last_gc_collect = current_time
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment