2023-11-07 08:28:53 +00:00
|
|
|
import folder_paths
|
|
|
|
import comfy.sd
|
|
|
|
import comfy.model_sampling
|
2024-02-27 20:12:33 +00:00
|
|
|
import comfy.latent_formats
|
2024-08-04 08:06:00 +00:00
|
|
|
import nodes
|
2023-11-09 22:57:51 +00:00
|
|
|
import torch
|
|
|
|
|
|
|
|
class LCM(comfy.model_sampling.EPS):
|
|
|
|
def calculate_denoised(self, sigma, model_output, model_input):
|
|
|
|
timestep = self.timestep(sigma).view(sigma.shape[:1] + (1,) * (model_output.ndim - 1))
|
|
|
|
sigma = sigma.view(sigma.shape[:1] + (1,) * (model_output.ndim - 1))
|
|
|
|
x0 = model_input - model_output * sigma
|
|
|
|
|
|
|
|
sigma_data = 0.5
|
|
|
|
scaled_timestep = timestep * 10.0 #timestep_scaling
|
|
|
|
|
|
|
|
c_skip = sigma_data**2 / (scaled_timestep**2 + sigma_data**2)
|
|
|
|
c_out = scaled_timestep / (scaled_timestep**2 + sigma_data**2) ** 0.5
|
|
|
|
|
|
|
|
return c_out * x0 + c_skip * model_input
|
|
|
|
|
2024-02-21 13:05:43 +00:00
|
|
|
class X0(comfy.model_sampling.EPS):
|
|
|
|
def calculate_denoised(self, sigma, model_output, model_input):
|
|
|
|
return model_output
|
|
|
|
|
2023-12-15 20:26:12 +00:00
|
|
|
class ModelSamplingDiscreteDistilled(comfy.model_sampling.ModelSamplingDiscrete):
|
2023-11-27 21:32:03 +00:00
|
|
|
original_timesteps = 50
|
|
|
|
|
2024-11-13 09:24:48 +00:00
|
|
|
def __init__(self, model_config=None, zsnr=None):
|
|
|
|
super().__init__(model_config, zsnr=zsnr)
|
2023-11-09 22:57:51 +00:00
|
|
|
|
2023-12-15 20:26:12 +00:00
|
|
|
self.skip_steps = self.num_timesteps // self.original_timesteps
|
2023-11-09 22:57:51 +00:00
|
|
|
|
2023-12-15 20:26:12 +00:00
|
|
|
sigmas_valid = torch.zeros((self.original_timesteps), dtype=torch.float32)
|
2023-11-27 21:32:03 +00:00
|
|
|
for x in range(self.original_timesteps):
|
2023-12-15 20:26:12 +00:00
|
|
|
sigmas_valid[self.original_timesteps - 1 - x] = self.sigmas[self.num_timesteps - 1 - x * self.skip_steps]
|
2023-11-09 22:57:51 +00:00
|
|
|
|
2023-12-15 20:26:12 +00:00
|
|
|
self.set_sigmas(sigmas_valid)
|
2023-11-09 22:57:51 +00:00
|
|
|
|
|
|
|
def timestep(self, sigma):
|
|
|
|
log_sigma = sigma.log()
|
|
|
|
dists = log_sigma.to(self.log_sigmas.device) - self.log_sigmas[:, None]
|
2023-11-27 21:41:33 +00:00
|
|
|
return (dists.abs().argmin(dim=0).view(sigma.shape) * self.skip_steps + (self.skip_steps - 1)).to(sigma.device)
|
2023-11-09 22:57:51 +00:00
|
|
|
|
|
|
|
def sigma(self, timestep):
|
2023-11-27 21:41:33 +00:00
|
|
|
t = torch.clamp(((timestep.float().to(self.log_sigmas.device) - (self.skip_steps - 1)) / self.skip_steps).float(), min=0, max=(len(self.sigmas) - 1))
|
2023-11-09 22:57:51 +00:00
|
|
|
low_idx = t.floor().long()
|
|
|
|
high_idx = t.ceil().long()
|
|
|
|
w = t.frac()
|
|
|
|
log_sigma = (1 - w) * self.log_sigmas[low_idx] + w * self.log_sigmas[high_idx]
|
2023-11-27 21:41:33 +00:00
|
|
|
return log_sigma.exp().to(timestep.device)
|
2023-11-09 22:57:51 +00:00
|
|
|
|
2023-11-07 08:28:53 +00:00
|
|
|
|
|
|
|
class ModelSamplingDiscrete:
|
|
|
|
@classmethod
|
|
|
|
def INPUT_TYPES(s):
|
|
|
|
return {"required": { "model": ("MODEL",),
|
2024-02-21 13:05:43 +00:00
|
|
|
"sampling": (["eps", "v_prediction", "lcm", "x0"],),
|
2023-11-07 08:28:53 +00:00
|
|
|
"zsnr": ("BOOLEAN", {"default": False}),
|
|
|
|
}}
|
|
|
|
|
|
|
|
RETURN_TYPES = ("MODEL",)
|
|
|
|
FUNCTION = "patch"
|
|
|
|
|
|
|
|
CATEGORY = "advanced/model"
|
|
|
|
|
|
|
|
def patch(self, model, sampling, zsnr):
|
|
|
|
m = model.clone()
|
|
|
|
|
2023-11-09 22:57:51 +00:00
|
|
|
sampling_base = comfy.model_sampling.ModelSamplingDiscrete
|
2023-11-07 08:28:53 +00:00
|
|
|
if sampling == "eps":
|
|
|
|
sampling_type = comfy.model_sampling.EPS
|
|
|
|
elif sampling == "v_prediction":
|
|
|
|
sampling_type = comfy.model_sampling.V_PREDICTION
|
2023-11-09 22:57:51 +00:00
|
|
|
elif sampling == "lcm":
|
|
|
|
sampling_type = LCM
|
2023-11-27 21:32:03 +00:00
|
|
|
sampling_base = ModelSamplingDiscreteDistilled
|
2024-02-21 13:05:43 +00:00
|
|
|
elif sampling == "x0":
|
|
|
|
sampling_type = X0
|
2023-11-07 08:28:53 +00:00
|
|
|
|
2023-11-09 22:57:51 +00:00
|
|
|
class ModelSamplingAdvanced(sampling_base, sampling_type):
|
2023-11-07 08:28:53 +00:00
|
|
|
pass
|
|
|
|
|
2024-11-11 09:55:56 +00:00
|
|
|
model_sampling = ModelSamplingAdvanced(model.model.model_config, zsnr=zsnr)
|
2023-11-09 22:57:51 +00:00
|
|
|
|
2023-11-07 08:28:53 +00:00
|
|
|
m.add_object_patch("model_sampling", model_sampling)
|
|
|
|
return (m, )
|
|
|
|
|
2024-02-18 05:55:23 +00:00
|
|
|
class ModelSamplingStableCascade:
|
|
|
|
@classmethod
|
|
|
|
def INPUT_TYPES(s):
|
|
|
|
return {"required": { "model": ("MODEL",),
|
|
|
|
"shift": ("FLOAT", {"default": 2.0, "min": 0.0, "max": 100.0, "step":0.01}),
|
|
|
|
}}
|
|
|
|
|
|
|
|
RETURN_TYPES = ("MODEL",)
|
|
|
|
FUNCTION = "patch"
|
|
|
|
|
|
|
|
CATEGORY = "advanced/model"
|
|
|
|
|
|
|
|
def patch(self, model, shift):
|
|
|
|
m = model.clone()
|
|
|
|
|
|
|
|
sampling_base = comfy.model_sampling.StableCascadeSampling
|
|
|
|
sampling_type = comfy.model_sampling.EPS
|
|
|
|
|
|
|
|
class ModelSamplingAdvanced(sampling_base, sampling_type):
|
|
|
|
pass
|
|
|
|
|
|
|
|
model_sampling = ModelSamplingAdvanced(model.model.model_config)
|
|
|
|
model_sampling.set_parameters(shift)
|
|
|
|
m.add_object_patch("model_sampling", model_sampling)
|
|
|
|
return (m, )
|
|
|
|
|
2024-06-10 17:26:25 +00:00
|
|
|
class ModelSamplingSD3:
|
|
|
|
@classmethod
|
|
|
|
def INPUT_TYPES(s):
|
|
|
|
return {"required": { "model": ("MODEL",),
|
2024-06-12 06:24:39 +00:00
|
|
|
"shift": ("FLOAT", {"default": 3.0, "min": 0.0, "max": 100.0, "step":0.01}),
|
2024-06-10 17:26:25 +00:00
|
|
|
}}
|
|
|
|
|
|
|
|
RETURN_TYPES = ("MODEL",)
|
|
|
|
FUNCTION = "patch"
|
|
|
|
|
|
|
|
CATEGORY = "advanced/model"
|
|
|
|
|
2024-07-11 21:51:56 +00:00
|
|
|
def patch(self, model, shift, multiplier=1000):
|
2024-06-10 17:26:25 +00:00
|
|
|
m = model.clone()
|
|
|
|
|
|
|
|
sampling_base = comfy.model_sampling.ModelSamplingDiscreteFlow
|
|
|
|
sampling_type = comfy.model_sampling.CONST
|
|
|
|
|
|
|
|
class ModelSamplingAdvanced(sampling_base, sampling_type):
|
|
|
|
pass
|
|
|
|
|
|
|
|
model_sampling = ModelSamplingAdvanced(model.model.model_config)
|
2024-07-11 21:51:56 +00:00
|
|
|
model_sampling.set_parameters(shift=shift, multiplier=multiplier)
|
2024-06-10 17:26:25 +00:00
|
|
|
m.add_object_patch("model_sampling", model_sampling)
|
|
|
|
return (m, )
|
|
|
|
|
2024-07-11 21:51:56 +00:00
|
|
|
class ModelSamplingAuraFlow(ModelSamplingSD3):
|
|
|
|
@classmethod
|
|
|
|
def INPUT_TYPES(s):
|
|
|
|
return {"required": { "model": ("MODEL",),
|
|
|
|
"shift": ("FLOAT", {"default": 1.73, "min": 0.0, "max": 100.0, "step":0.01}),
|
|
|
|
}}
|
|
|
|
|
|
|
|
FUNCTION = "patch_aura"
|
|
|
|
|
|
|
|
def patch_aura(self, model, shift):
|
|
|
|
return self.patch(model, shift, multiplier=1.0)
|
|
|
|
|
2024-08-03 07:54:38 +00:00
|
|
|
class ModelSamplingFlux:
|
|
|
|
@classmethod
|
|
|
|
def INPUT_TYPES(s):
|
|
|
|
return {"required": { "model": ("MODEL",),
|
2024-08-04 08:06:00 +00:00
|
|
|
"max_shift": ("FLOAT", {"default": 1.15, "min": 0.0, "max": 100.0, "step":0.01}),
|
|
|
|
"base_shift": ("FLOAT", {"default": 0.5, "min": 0.0, "max": 100.0, "step":0.01}),
|
|
|
|
"width": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}),
|
|
|
|
"height": ("INT", {"default": 1024, "min": 16, "max": nodes.MAX_RESOLUTION, "step": 8}),
|
2024-08-03 07:54:38 +00:00
|
|
|
}}
|
|
|
|
|
|
|
|
RETURN_TYPES = ("MODEL",)
|
|
|
|
FUNCTION = "patch"
|
|
|
|
|
|
|
|
CATEGORY = "advanced/model"
|
|
|
|
|
2024-08-04 08:06:00 +00:00
|
|
|
def patch(self, model, max_shift, base_shift, width, height):
|
2024-08-03 07:54:38 +00:00
|
|
|
m = model.clone()
|
|
|
|
|
2024-08-04 08:06:00 +00:00
|
|
|
x1 = 256
|
|
|
|
x2 = 4096
|
|
|
|
mm = (max_shift - base_shift) / (x2 - x1)
|
|
|
|
b = base_shift - mm * x1
|
|
|
|
shift = (width * height / (8 * 8 * 2 * 2)) * mm + b
|
|
|
|
|
2024-08-03 07:54:38 +00:00
|
|
|
sampling_base = comfy.model_sampling.ModelSamplingFlux
|
|
|
|
sampling_type = comfy.model_sampling.CONST
|
|
|
|
|
|
|
|
class ModelSamplingAdvanced(sampling_base, sampling_type):
|
|
|
|
pass
|
|
|
|
|
|
|
|
model_sampling = ModelSamplingAdvanced(model.model.model_config)
|
|
|
|
model_sampling.set_parameters(shift=shift)
|
|
|
|
m.add_object_patch("model_sampling", model_sampling)
|
|
|
|
return (m, )
|
|
|
|
|
|
|
|
|
2023-11-24 00:41:33 +00:00
|
|
|
class ModelSamplingContinuousEDM:
|
|
|
|
@classmethod
|
|
|
|
def INPUT_TYPES(s):
|
|
|
|
return {"required": { "model": ("MODEL",),
|
2024-02-27 20:12:33 +00:00
|
|
|
"sampling": (["v_prediction", "edm_playground_v2.5", "eps"],),
|
2023-11-24 00:41:33 +00:00
|
|
|
"sigma_max": ("FLOAT", {"default": 120.0, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}),
|
|
|
|
"sigma_min": ("FLOAT", {"default": 0.002, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}),
|
|
|
|
}}
|
|
|
|
|
|
|
|
RETURN_TYPES = ("MODEL",)
|
|
|
|
FUNCTION = "patch"
|
|
|
|
|
|
|
|
CATEGORY = "advanced/model"
|
|
|
|
|
|
|
|
def patch(self, model, sampling, sigma_max, sigma_min):
|
|
|
|
m = model.clone()
|
|
|
|
|
2024-02-27 20:12:33 +00:00
|
|
|
latent_format = None
|
|
|
|
sigma_data = 1.0
|
2023-11-24 00:41:33 +00:00
|
|
|
if sampling == "eps":
|
|
|
|
sampling_type = comfy.model_sampling.EPS
|
|
|
|
elif sampling == "v_prediction":
|
|
|
|
sampling_type = comfy.model_sampling.V_PREDICTION
|
2024-02-27 20:12:33 +00:00
|
|
|
elif sampling == "edm_playground_v2.5":
|
|
|
|
sampling_type = comfy.model_sampling.EDM
|
|
|
|
sigma_data = 0.5
|
|
|
|
latent_format = comfy.latent_formats.SDXL_Playground_2_5()
|
2023-11-24 00:41:33 +00:00
|
|
|
|
|
|
|
class ModelSamplingAdvanced(comfy.model_sampling.ModelSamplingContinuousEDM, sampling_type):
|
|
|
|
pass
|
|
|
|
|
2023-12-15 20:26:12 +00:00
|
|
|
model_sampling = ModelSamplingAdvanced(model.model.model_config)
|
2024-02-27 20:12:33 +00:00
|
|
|
model_sampling.set_parameters(sigma_min, sigma_max, sigma_data)
|
2023-11-24 00:41:33 +00:00
|
|
|
m.add_object_patch("model_sampling", model_sampling)
|
2024-02-27 20:12:33 +00:00
|
|
|
if latent_format is not None:
|
|
|
|
m.add_object_patch("latent_format", latent_format)
|
2023-11-24 00:41:33 +00:00
|
|
|
return (m, )
|
|
|
|
|
2024-06-15 16:14:56 +00:00
|
|
|
class ModelSamplingContinuousV:
|
|
|
|
@classmethod
|
|
|
|
def INPUT_TYPES(s):
|
|
|
|
return {"required": { "model": ("MODEL",),
|
|
|
|
"sampling": (["v_prediction"],),
|
|
|
|
"sigma_max": ("FLOAT", {"default": 500.0, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}),
|
|
|
|
"sigma_min": ("FLOAT", {"default": 0.03, "min": 0.0, "max": 1000.0, "step":0.001, "round": False}),
|
|
|
|
}}
|
|
|
|
|
|
|
|
RETURN_TYPES = ("MODEL",)
|
|
|
|
FUNCTION = "patch"
|
|
|
|
|
|
|
|
CATEGORY = "advanced/model"
|
|
|
|
|
|
|
|
def patch(self, model, sampling, sigma_max, sigma_min):
|
|
|
|
m = model.clone()
|
|
|
|
|
|
|
|
latent_format = None
|
|
|
|
sigma_data = 1.0
|
|
|
|
if sampling == "v_prediction":
|
|
|
|
sampling_type = comfy.model_sampling.V_PREDICTION
|
|
|
|
|
|
|
|
class ModelSamplingAdvanced(comfy.model_sampling.ModelSamplingContinuousV, sampling_type):
|
|
|
|
pass
|
|
|
|
|
|
|
|
model_sampling = ModelSamplingAdvanced(model.model.model_config)
|
|
|
|
model_sampling.set_parameters(sigma_min, sigma_max, sigma_data)
|
|
|
|
m.add_object_patch("model_sampling", model_sampling)
|
|
|
|
return (m, )
|
|
|
|
|
2023-11-11 01:52:10 +00:00
|
|
|
class RescaleCFG:
|
|
|
|
@classmethod
|
|
|
|
def INPUT_TYPES(s):
|
|
|
|
return {"required": { "model": ("MODEL",),
|
|
|
|
"multiplier": ("FLOAT", {"default": 0.7, "min": 0.0, "max": 1.0, "step": 0.01}),
|
|
|
|
}}
|
|
|
|
RETURN_TYPES = ("MODEL",)
|
|
|
|
FUNCTION = "patch"
|
|
|
|
|
|
|
|
CATEGORY = "advanced/model"
|
|
|
|
|
|
|
|
def patch(self, model, multiplier):
|
|
|
|
def rescale_cfg(args):
|
|
|
|
cond = args["cond"]
|
|
|
|
uncond = args["uncond"]
|
|
|
|
cond_scale = args["cond_scale"]
|
|
|
|
sigma = args["sigma"]
|
2023-11-11 03:05:25 +00:00
|
|
|
sigma = sigma.view(sigma.shape[:1] + (1,) * (cond.ndim - 1))
|
2023-11-11 01:52:10 +00:00
|
|
|
x_orig = args["input"]
|
|
|
|
|
|
|
|
#rescale cfg has to be done on v-pred model output
|
|
|
|
x = x_orig / (sigma * sigma + 1.0)
|
|
|
|
cond = ((x - (x_orig - cond)) * (sigma ** 2 + 1.0) ** 0.5) / (sigma)
|
|
|
|
uncond = ((x - (x_orig - uncond)) * (sigma ** 2 + 1.0) ** 0.5) / (sigma)
|
|
|
|
|
|
|
|
#rescalecfg
|
|
|
|
x_cfg = uncond + cond_scale * (cond - uncond)
|
|
|
|
ro_pos = torch.std(cond, dim=(1,2,3), keepdim=True)
|
|
|
|
ro_cfg = torch.std(x_cfg, dim=(1,2,3), keepdim=True)
|
|
|
|
|
|
|
|
x_rescaled = x_cfg * (ro_pos / ro_cfg)
|
|
|
|
x_final = multiplier * x_rescaled + (1.0 - multiplier) * x_cfg
|
|
|
|
|
|
|
|
return x_orig - (x - x_final * sigma / (sigma * sigma + 1.0) ** 0.5)
|
|
|
|
|
|
|
|
m = model.clone()
|
|
|
|
m.set_model_sampler_cfg_function(rescale_cfg)
|
|
|
|
return (m, )
|
|
|
|
|
2023-11-07 08:28:53 +00:00
|
|
|
NODE_CLASS_MAPPINGS = {
|
|
|
|
"ModelSamplingDiscrete": ModelSamplingDiscrete,
|
2023-11-24 00:41:33 +00:00
|
|
|
"ModelSamplingContinuousEDM": ModelSamplingContinuousEDM,
|
2024-06-15 16:14:56 +00:00
|
|
|
"ModelSamplingContinuousV": ModelSamplingContinuousV,
|
2024-02-18 05:55:23 +00:00
|
|
|
"ModelSamplingStableCascade": ModelSamplingStableCascade,
|
2024-06-10 17:26:25 +00:00
|
|
|
"ModelSamplingSD3": ModelSamplingSD3,
|
2024-07-11 21:51:56 +00:00
|
|
|
"ModelSamplingAuraFlow": ModelSamplingAuraFlow,
|
2024-08-03 07:54:38 +00:00
|
|
|
"ModelSamplingFlux": ModelSamplingFlux,
|
2023-11-11 01:52:10 +00:00
|
|
|
"RescaleCFG": RescaleCFG,
|
2023-11-07 08:28:53 +00:00
|
|
|
}
|