Move cleanup_models to improve performance.

This commit is contained in:
comfyanonymous 2024-03-23 17:27:10 -04:00
parent a28a9dc836
commit 6a32c06f06
2 changed files with 1 additions and 1 deletions

View File

@ -368,7 +368,6 @@ class PromptExecutor:
d = self.outputs_ui.pop(x) d = self.outputs_ui.pop(x)
del d del d
comfy.model_management.cleanup_models()
self.add_message("execution_cached", self.add_message("execution_cached",
{ "nodes": list(current_outputs) , "prompt_id": prompt_id}, { "nodes": list(current_outputs) , "prompt_id": prompt_id},
broadcast=False) broadcast=False)

View File

@ -139,6 +139,7 @@ def prompt_worker(q, server):
if need_gc: if need_gc:
current_time = time.perf_counter() current_time = time.perf_counter()
if (current_time - last_gc_collect) > gc_collect_interval: if (current_time - last_gc_collect) > gc_collect_interval:
comfy.model_management.cleanup_models()
gc.collect() gc.collect()
comfy.model_management.soft_empty_cache() comfy.model_management.soft_empty_cache()
last_gc_collect = current_time last_gc_collect = current_time