Merge branch 'improved_memory' into patch_hooks_improved_memory

This commit is contained in:
Jedrzej Kosinski 2024-11-11 11:26:36 -06:00
commit 9330745f27
2 changed files with 2 additions and 1 deletions

View File

@ -331,7 +331,7 @@ class LoadedModel:
use_more_vram = lowvram_model_memory
if use_more_vram == 0:
use_more_vram = 1e32
self.model_use_more_vram(use_more_vram)
self.model_use_more_vram(use_more_vram, force_patch_weights=force_patch_weights)
real_model = self.model.model
if is_intel_xpu() and not args.disable_ipex_optimize and 'ipex' in globals() and real_model is not None:

View File

@ -480,6 +480,7 @@ class PromptExecutor:
if self.caches.outputs.get(node_id) is not None:
cached_nodes.append(node_id)
comfy.model_management.cleanup_models_gc()
self.add_message("execution_cached",
{ "nodes": cached_nodes, "prompt_id": prompt_id},
broadcast=False)