To be really simple CheckpointLoaderSimple should pick the right type.

This commit is contained in:
comfyanonymous 2023-03-03 11:07:10 -05:00
parent 94bb0375b0
commit fed315a76a
3 changed files with 29 additions and 7 deletions

View File

@ -173,6 +173,30 @@ def maximum_batch_area():
memory_free = get_free_memory() / (1024 * 1024)
area = ((memory_free - 1024) * 0.9) / (0.6)
return int(max(area, 0))
def cpu_mode():
global vram_state
return vram_state == CPU
def should_use_fp16():
if cpu_mode():
return False #TODO ?
if torch.cuda.is_bf16_supported():
return True
props = torch.cuda.get_device_properties()
if props.major < 7:
return False
#FP32 is faster on those cards?
nvidia_16_series = ["1660", "1650", "1630"]
for x in nvidia_16_series:
if x in props.name:
return False
return True
#TODO: might be cleaner to put this somewhere else
import threading

View File

@ -656,12 +656,14 @@ def load_checkpoint(config_path, ckpt_path, output_vae=True, output_clip=True, e
return (ModelPatcher(model), clip, vae)
def load_checkpoint_guess_config(ckpt_path, fp16=False, output_vae=True, output_clip=True, embedding_directory=None):
def load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=True, embedding_directory=None):
sd = load_torch_file(ckpt_path)
sd_keys = sd.keys()
clip = None
vae = None
fp16 = model_management.should_use_fp16()
class WeightsLoader(torch.nn.Module):
pass

View File

@ -209,19 +209,15 @@ class CheckpointLoaderSimple:
@classmethod
def INPUT_TYPES(s):
return {"required": { "ckpt_name": (filter_files_extensions(recursive_search(s.ckpt_dir), supported_ckpt_extensions), ),
"type": (["fp16", "fp32"],),
"stop_at_clip_layer": ("INT", {"default": -1, "min": -24, "max": -1, "step": 1}),
}}
RETURN_TYPES = ("MODEL", "CLIP", "VAE")
FUNCTION = "load_checkpoint"
CATEGORY = "_for_testing"
def load_checkpoint(self, ckpt_name, type, stop_at_clip_layer, output_vae=True, output_clip=True):
def load_checkpoint(self, ckpt_name, output_vae=True, output_clip=True):
ckpt_path = os.path.join(self.ckpt_dir, ckpt_name)
out = comfy.sd.load_checkpoint_guess_config(ckpt_path, type=="fp16", output_vae=True, output_clip=True, embedding_directory=CheckpointLoader.embedding_directory)
if out[1] is not None:
out[1].clip_layer(stop_at_clip_layer)
out = comfy.sd.load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=True, embedding_directory=CheckpointLoader.embedding_directory)
return out
class LoraLoader: