Browse Source

Move cleanup_models to improve performance.

pull/3069/merge
comfyanonymous 8 months ago
parent
commit
6a32c06f06
  1. 1
      execution.py
  2. 1
      main.py

1
execution.py

@ -368,7 +368,6 @@ class PromptExecutor:
d = self.outputs_ui.pop(x)
del d
comfy.model_management.cleanup_models()
self.add_message("execution_cached",
{ "nodes": list(current_outputs) , "prompt_id": prompt_id},
broadcast=False)

1
main.py

@ -139,6 +139,7 @@ def prompt_worker(q, server):
if need_gc:
current_time = time.perf_counter()
if (current_time - last_gc_collect) > gc_collect_interval:
comfy.model_management.cleanup_models()
gc.collect()
comfy.model_management.soft_empty_cache()
last_gc_collect = current_time

Loading…
Cancel
Save