2023-03-11 18:09:28 +00:00
|
|
|
import os
|
|
|
|
from comfy_extras.chainner_models import model_loading
|
2023-04-15 22:55:17 +00:00
|
|
|
from comfy import model_management
|
2023-03-11 18:09:28 +00:00
|
|
|
import torch
|
2023-03-11 19:04:13 +00:00
|
|
|
import comfy.utils
|
2023-03-17 21:57:57 +00:00
|
|
|
import folder_paths
|
2023-05-02 18:18:07 +00:00
|
|
|
from tqdm.auto import tqdm
|
2023-03-11 18:09:28 +00:00
|
|
|
|
|
|
|
class UpscaleModelLoader:
|
|
|
|
@classmethod
|
|
|
|
def INPUT_TYPES(s):
|
2023-03-17 21:57:57 +00:00
|
|
|
return {"required": { "model_name": (folder_paths.get_filename_list("upscale_models"), ),
|
2023-03-11 18:09:28 +00:00
|
|
|
}}
|
|
|
|
RETURN_TYPES = ("UPSCALE_MODEL",)
|
|
|
|
FUNCTION = "load_model"
|
|
|
|
|
|
|
|
CATEGORY = "loaders"
|
|
|
|
|
|
|
|
def load_model(self, model_name):
|
2023-03-17 21:57:57 +00:00
|
|
|
model_path = folder_paths.get_full_path("upscale_models", model_name)
|
2023-04-02 03:19:15 +00:00
|
|
|
sd = comfy.utils.load_torch_file(model_path)
|
2023-03-11 18:09:28 +00:00
|
|
|
out = model_loading.load_state_dict(sd).eval()
|
|
|
|
return (out, )
|
|
|
|
|
|
|
|
|
|
|
|
class ImageUpscaleWithModel:
|
|
|
|
@classmethod
|
|
|
|
def INPUT_TYPES(s):
|
|
|
|
return {"required": { "upscale_model": ("UPSCALE_MODEL",),
|
|
|
|
"image": ("IMAGE",),
|
|
|
|
}}
|
|
|
|
RETURN_TYPES = ("IMAGE",)
|
|
|
|
FUNCTION = "upscale"
|
|
|
|
|
2023-03-11 23:10:36 +00:00
|
|
|
CATEGORY = "image/upscaling"
|
2023-03-11 18:09:28 +00:00
|
|
|
|
|
|
|
def upscale(self, upscale_model, image):
|
2023-03-15 19:18:18 +00:00
|
|
|
device = model_management.get_torch_device()
|
2023-03-11 18:09:28 +00:00
|
|
|
upscale_model.to(device)
|
|
|
|
in_img = image.movedim(-1,-3).to(device)
|
2023-05-02 18:18:07 +00:00
|
|
|
|
|
|
|
tile = 128 + 64
|
|
|
|
overlap = 8
|
|
|
|
its = -(in_img.shape[2] // -(tile - overlap)) * -(in_img.shape[3] // -(tile - overlap))
|
|
|
|
pbar = tqdm(total=its)
|
|
|
|
s = comfy.utils.tiled_scale(in_img, lambda a: upscale_model(a), tile_x=tile, tile_y=tile, overlap=overlap, upscale_amount=upscale_model.scale, pbar=pbar)
|
2023-03-11 18:09:28 +00:00
|
|
|
upscale_model.cpu()
|
|
|
|
s = torch.clamp(s.movedim(-3,-1), min=0, max=1.0)
|
|
|
|
return (s,)
|
|
|
|
|
|
|
|
NODE_CLASS_MAPPINGS = {
|
|
|
|
"UpscaleModelLoader": UpscaleModelLoader,
|
|
|
|
"ImageUpscaleWithModel": ImageUpscaleWithModel
|
|
|
|
}
|