2024-08-07 01:59:34 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2023-03-17 21:57:57 +00:00
|
|
|
import os
|
2023-06-02 20:34:47 +00:00
|
|
|
import time
|
2024-09-11 06:00:07 +00:00
|
|
|
import mimetypes
|
2024-03-29 07:07:13 +00:00
|
|
|
import logging
|
2024-09-11 06:00:07 +00:00
|
|
|
from typing import Set, List, Dict, Tuple, Literal
|
2024-08-07 01:59:34 +00:00
|
|
|
from collections.abc import Collection
|
2023-03-17 21:57:57 +00:00
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
supported_pt_extensions: set[str] = {'.ckpt', '.pt', '.bin', '.pth', '.safetensors', '.pkl', '.sft'}
|
2023-03-17 21:57:57 +00:00
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
folder_names_and_paths: dict[str, tuple[list[str], set[str]]] = {}
|
2023-03-17 21:57:57 +00:00
|
|
|
|
2023-04-16 05:36:15 +00:00
|
|
|
base_path = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
models_dir = os.path.join(base_path, "models")
|
2023-09-10 06:36:04 +00:00
|
|
|
folder_names_and_paths["checkpoints"] = ([os.path.join(models_dir, "checkpoints")], supported_pt_extensions)
|
2023-03-17 21:57:57 +00:00
|
|
|
folder_names_and_paths["configs"] = ([os.path.join(models_dir, "configs")], [".yaml"])
|
|
|
|
|
|
|
|
folder_names_and_paths["loras"] = ([os.path.join(models_dir, "loras")], supported_pt_extensions)
|
|
|
|
folder_names_and_paths["vae"] = ([os.path.join(models_dir, "vae")], supported_pt_extensions)
|
|
|
|
folder_names_and_paths["clip"] = ([os.path.join(models_dir, "clip")], supported_pt_extensions)
|
2024-08-18 01:28:36 +00:00
|
|
|
folder_names_and_paths["diffusion_models"] = ([os.path.join(models_dir, "unet"), os.path.join(models_dir, "diffusion_models")], supported_pt_extensions)
|
2023-03-17 21:57:57 +00:00
|
|
|
folder_names_and_paths["clip_vision"] = ([os.path.join(models_dir, "clip_vision")], supported_pt_extensions)
|
|
|
|
folder_names_and_paths["style_models"] = ([os.path.join(models_dir, "style_models")], supported_pt_extensions)
|
2023-03-18 07:08:43 +00:00
|
|
|
folder_names_and_paths["embeddings"] = ([os.path.join(models_dir, "embeddings")], supported_pt_extensions)
|
2023-04-07 04:45:08 +00:00
|
|
|
folder_names_and_paths["diffusers"] = ([os.path.join(models_dir, "diffusers")], ["folder"])
|
2023-06-06 05:26:52 +00:00
|
|
|
folder_names_and_paths["vae_approx"] = ([os.path.join(models_dir, "vae_approx")], supported_pt_extensions)
|
2023-03-17 21:57:57 +00:00
|
|
|
|
|
|
|
folder_names_and_paths["controlnet"] = ([os.path.join(models_dir, "controlnet"), os.path.join(models_dir, "t2i_adapter")], supported_pt_extensions)
|
2023-04-19 13:36:19 +00:00
|
|
|
folder_names_and_paths["gligen"] = ([os.path.join(models_dir, "gligen")], supported_pt_extensions)
|
|
|
|
|
2023-03-17 21:57:57 +00:00
|
|
|
folder_names_and_paths["upscale_models"] = ([os.path.join(models_dir, "upscale_models")], supported_pt_extensions)
|
|
|
|
|
2024-07-01 21:54:03 +00:00
|
|
|
folder_names_and_paths["custom_nodes"] = ([os.path.join(base_path, "custom_nodes")], set())
|
2023-04-16 05:36:15 +00:00
|
|
|
|
2023-04-23 16:35:25 +00:00
|
|
|
folder_names_and_paths["hypernetworks"] = ([os.path.join(models_dir, "hypernetworks")], supported_pt_extensions)
|
2023-04-16 05:36:15 +00:00
|
|
|
|
2024-01-24 14:49:57 +00:00
|
|
|
folder_names_and_paths["photomaker"] = ([os.path.join(models_dir, "photomaker")], supported_pt_extensions)
|
|
|
|
|
2023-10-12 00:35:50 +00:00
|
|
|
folder_names_and_paths["classifiers"] = ([os.path.join(models_dir, "classifiers")], {""})
|
|
|
|
|
2023-04-05 18:01:01 +00:00
|
|
|
output_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), "output")
|
|
|
|
temp_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), "temp")
|
|
|
|
input_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), "input")
|
2024-01-08 22:06:44 +00:00
|
|
|
user_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), "user")
|
2023-04-05 18:01:01 +00:00
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
filename_list_cache: dict[str, tuple[list[str], dict[str, float], float]] = {}
|
2023-05-29 15:26:57 +00:00
|
|
|
|
2024-09-11 06:00:07 +00:00
|
|
|
extension_mimetypes_cache = {
|
|
|
|
"webp" : "image",
|
|
|
|
}
|
|
|
|
|
2024-08-18 01:28:36 +00:00
|
|
|
def map_legacy(folder_name: str) -> str:
|
|
|
|
legacy = {"unet": "diffusion_models"}
|
|
|
|
return legacy.get(folder_name, folder_name)
|
|
|
|
|
2023-04-05 18:01:01 +00:00
|
|
|
if not os.path.exists(input_directory):
|
2023-11-23 21:24:58 +00:00
|
|
|
try:
|
|
|
|
os.makedirs(input_directory)
|
|
|
|
except:
|
2024-03-29 07:07:13 +00:00
|
|
|
logging.error("Failed to create input directory")
|
2023-04-05 18:01:01 +00:00
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def set_output_directory(output_dir: str) -> None:
|
2023-04-05 18:01:01 +00:00
|
|
|
global output_directory
|
|
|
|
output_directory = output_dir
|
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def set_temp_directory(temp_dir: str) -> None:
|
2023-08-11 09:00:25 +00:00
|
|
|
global temp_directory
|
|
|
|
temp_directory = temp_dir
|
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def set_input_directory(input_dir: str) -> None:
|
2023-10-04 22:45:15 +00:00
|
|
|
global input_directory
|
|
|
|
input_directory = input_dir
|
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def get_output_directory() -> str:
|
2023-04-05 18:01:01 +00:00
|
|
|
global output_directory
|
|
|
|
return output_directory
|
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def get_temp_directory() -> str:
|
2023-04-05 18:01:01 +00:00
|
|
|
global temp_directory
|
|
|
|
return temp_directory
|
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def get_input_directory() -> str:
|
2023-04-05 18:01:01 +00:00
|
|
|
global input_directory
|
|
|
|
return input_directory
|
|
|
|
|
2024-09-12 12:10:27 +00:00
|
|
|
def get_user_directory() -> str:
|
|
|
|
return user_directory
|
|
|
|
|
|
|
|
def set_user_directory(user_dir: str) -> None:
|
|
|
|
global user_directory
|
|
|
|
user_directory = user_dir
|
|
|
|
|
2023-04-05 18:01:01 +00:00
|
|
|
|
|
|
|
#NOTE: used in http server so don't put folders that should not be accessed remotely
|
2024-08-07 01:59:34 +00:00
|
|
|
def get_directory_by_type(type_name: str) -> str | None:
|
2023-04-05 18:01:01 +00:00
|
|
|
if type_name == "output":
|
|
|
|
return get_output_directory()
|
|
|
|
if type_name == "temp":
|
|
|
|
return get_temp_directory()
|
|
|
|
if type_name == "input":
|
|
|
|
return get_input_directory()
|
|
|
|
return None
|
|
|
|
|
2024-09-11 06:00:07 +00:00
|
|
|
def filter_files_content_types(files: List[str], content_types: Literal["image", "video", "audio"]) -> List[str]:
|
|
|
|
"""
|
|
|
|
Example:
|
|
|
|
files = os.listdir(folder_paths.get_input_directory())
|
|
|
|
filter_files_content_types(files, ["image", "audio", "video"])
|
|
|
|
"""
|
|
|
|
global extension_mimetypes_cache
|
|
|
|
result = []
|
|
|
|
for file in files:
|
|
|
|
extension = file.split('.')[-1]
|
|
|
|
if extension not in extension_mimetypes_cache:
|
|
|
|
mime_type, _ = mimetypes.guess_type(file, strict=False)
|
|
|
|
if not mime_type:
|
|
|
|
continue
|
|
|
|
content_type = mime_type.split('/')[0]
|
|
|
|
extension_mimetypes_cache[extension] = content_type
|
|
|
|
else:
|
|
|
|
content_type = extension_mimetypes_cache[extension]
|
|
|
|
|
|
|
|
if content_type in content_types:
|
|
|
|
result.append(file)
|
|
|
|
return result
|
2023-03-17 21:57:57 +00:00
|
|
|
|
2023-04-23 19:58:55 +00:00
|
|
|
# determine base_dir rely on annotation if name is 'filename.ext [annotation]' format
|
|
|
|
# otherwise use default_path as base_dir
|
2024-08-07 01:59:34 +00:00
|
|
|
def annotated_filepath(name: str) -> tuple[str, str | None]:
|
2023-04-23 19:58:55 +00:00
|
|
|
if name.endswith("[output]"):
|
|
|
|
base_dir = get_output_directory()
|
|
|
|
name = name[:-9]
|
|
|
|
elif name.endswith("[input]"):
|
|
|
|
base_dir = get_input_directory()
|
|
|
|
name = name[:-8]
|
|
|
|
elif name.endswith("[temp]"):
|
|
|
|
base_dir = get_temp_directory()
|
|
|
|
name = name[:-7]
|
|
|
|
else:
|
|
|
|
return name, None
|
|
|
|
|
|
|
|
return name, base_dir
|
|
|
|
|
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def get_annotated_filepath(name: str, default_dir: str | None=None) -> str:
|
2023-04-23 20:03:26 +00:00
|
|
|
name, base_dir = annotated_filepath(name)
|
2023-04-23 19:58:55 +00:00
|
|
|
|
|
|
|
if base_dir is None:
|
|
|
|
if default_dir is not None:
|
|
|
|
base_dir = default_dir
|
|
|
|
else:
|
|
|
|
base_dir = get_input_directory() # fallback path
|
|
|
|
|
|
|
|
return os.path.join(base_dir, name)
|
|
|
|
|
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def exists_annotated_filepath(name) -> bool:
|
2023-04-23 20:03:26 +00:00
|
|
|
name, base_dir = annotated_filepath(name)
|
2023-04-23 19:58:55 +00:00
|
|
|
|
|
|
|
if base_dir is None:
|
|
|
|
base_dir = get_input_directory() # fallback path
|
|
|
|
|
|
|
|
filepath = os.path.join(base_dir, name)
|
|
|
|
return os.path.exists(filepath)
|
|
|
|
|
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def add_model_folder_path(folder_name: str, full_folder_path: str) -> None:
|
2023-03-17 21:57:57 +00:00
|
|
|
global folder_names_and_paths
|
2024-08-18 01:28:36 +00:00
|
|
|
folder_name = map_legacy(folder_name)
|
2023-03-18 06:52:43 +00:00
|
|
|
if folder_name in folder_names_and_paths:
|
|
|
|
folder_names_and_paths[folder_name][0].append(full_folder_path)
|
2023-08-11 06:33:26 +00:00
|
|
|
else:
|
|
|
|
folder_names_and_paths[folder_name] = ([full_folder_path], set())
|
2023-03-17 21:57:57 +00:00
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def get_folder_paths(folder_name: str) -> list[str]:
|
2024-08-18 01:28:36 +00:00
|
|
|
folder_name = map_legacy(folder_name)
|
2023-03-18 07:08:43 +00:00
|
|
|
return folder_names_and_paths[folder_name][0][:]
|
2023-03-17 21:57:57 +00:00
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def recursive_search(directory: str, excluded_dir_names: list[str] | None=None) -> tuple[list[str], dict[str, float]]:
|
2023-05-29 15:26:57 +00:00
|
|
|
if not os.path.isdir(directory):
|
|
|
|
return [], {}
|
2023-08-23 23:37:31 +00:00
|
|
|
|
|
|
|
if excluded_dir_names is None:
|
|
|
|
excluded_dir_names = []
|
|
|
|
|
2023-03-17 21:57:57 +00:00
|
|
|
result = []
|
2024-01-11 14:34:33 +00:00
|
|
|
dirs = {}
|
2024-01-14 23:06:33 +00:00
|
|
|
|
|
|
|
# Attempt to add the initial directory to dirs with error handling
|
|
|
|
try:
|
|
|
|
dirs[directory] = os.path.getmtime(directory)
|
|
|
|
except FileNotFoundError:
|
2024-03-29 07:07:13 +00:00
|
|
|
logging.warning(f"Warning: Unable to access {directory}. Skipping this path.")
|
|
|
|
|
|
|
|
logging.debug("recursive file list on directory {}".format(directory))
|
2024-08-07 01:59:34 +00:00
|
|
|
dirpath: str
|
|
|
|
subdirs: list[str]
|
|
|
|
filenames: list[str]
|
|
|
|
|
2023-08-23 23:37:31 +00:00
|
|
|
for dirpath, subdirs, filenames in os.walk(directory, followlinks=True, topdown=True):
|
|
|
|
subdirs[:] = [d for d in subdirs if d not in excluded_dir_names]
|
|
|
|
for file_name in filenames:
|
|
|
|
relative_path = os.path.relpath(os.path.join(dirpath, file_name), directory)
|
|
|
|
result.append(relative_path)
|
2024-03-29 07:07:13 +00:00
|
|
|
|
2023-08-23 23:37:31 +00:00
|
|
|
for d in subdirs:
|
2024-08-07 01:59:34 +00:00
|
|
|
path: str = os.path.join(dirpath, d)
|
2024-01-11 14:34:33 +00:00
|
|
|
try:
|
|
|
|
dirs[path] = os.path.getmtime(path)
|
|
|
|
except FileNotFoundError:
|
2024-03-29 07:07:13 +00:00
|
|
|
logging.warning(f"Warning: Unable to access {path}. Skipping this path.")
|
2024-01-11 14:34:33 +00:00
|
|
|
continue
|
2024-03-29 07:07:13 +00:00
|
|
|
logging.debug("found {} files".format(len(result)))
|
2023-05-29 15:26:57 +00:00
|
|
|
return result, dirs
|
2023-03-17 21:57:57 +00:00
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def filter_files_extensions(files: Collection[str], extensions: Collection[str]) -> list[str]:
|
2023-10-14 14:59:35 +00:00
|
|
|
return sorted(list(filter(lambda a: os.path.splitext(a)[-1].lower() in extensions or len(extensions) == 0, files)))
|
2023-03-17 21:57:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def get_full_path(folder_name: str, filename: str) -> str | None:
|
2023-03-17 21:57:57 +00:00
|
|
|
global folder_names_and_paths
|
2024-08-18 01:28:36 +00:00
|
|
|
folder_name = map_legacy(folder_name)
|
2023-05-30 20:44:09 +00:00
|
|
|
if folder_name not in folder_names_and_paths:
|
|
|
|
return None
|
2023-03-17 21:57:57 +00:00
|
|
|
folders = folder_names_and_paths[folder_name]
|
2023-05-29 06:48:50 +00:00
|
|
|
filename = os.path.relpath(os.path.join("/", filename), "/")
|
2023-03-17 21:57:57 +00:00
|
|
|
for x in folders[0]:
|
|
|
|
full_path = os.path.join(x, filename)
|
|
|
|
if os.path.isfile(full_path):
|
|
|
|
return full_path
|
2024-04-17 16:28:05 +00:00
|
|
|
elif os.path.islink(full_path):
|
|
|
|
logging.warning("WARNING path {} exists but doesn't link anywhere, skipping.".format(full_path))
|
2023-03-17 21:57:57 +00:00
|
|
|
|
2023-05-29 06:48:50 +00:00
|
|
|
return None
|
2023-03-17 21:57:57 +00:00
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def get_filename_list_(folder_name: str) -> tuple[list[str], dict[str, float], float]:
|
2024-08-18 01:28:36 +00:00
|
|
|
folder_name = map_legacy(folder_name)
|
2023-03-17 21:57:57 +00:00
|
|
|
global folder_names_and_paths
|
2023-03-17 23:02:37 +00:00
|
|
|
output_list = set()
|
2023-05-29 15:26:57 +00:00
|
|
|
folders = folder_names_and_paths[folder_name]
|
|
|
|
output_folders = {}
|
|
|
|
for x in folders[0]:
|
2023-08-23 23:50:41 +00:00
|
|
|
files, folders_all = recursive_search(x, excluded_dir_names=[".git"])
|
2023-05-29 15:26:57 +00:00
|
|
|
output_list.update(filter_files_extensions(files, folders[1]))
|
|
|
|
output_folders = {**output_folders, **folders_all}
|
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
return sorted(list(output_list)), output_folders, time.perf_counter()
|
2023-05-29 15:26:57 +00:00
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def cached_filename_list_(folder_name: str) -> tuple[list[str], dict[str, float], float] | None:
|
2023-05-29 15:26:57 +00:00
|
|
|
global filename_list_cache
|
|
|
|
global folder_names_and_paths
|
2024-08-18 01:28:36 +00:00
|
|
|
folder_name = map_legacy(folder_name)
|
2023-05-29 15:26:57 +00:00
|
|
|
if folder_name not in filename_list_cache:
|
|
|
|
return None
|
|
|
|
out = filename_list_cache[folder_name]
|
2023-12-29 22:47:24 +00:00
|
|
|
|
2023-05-29 15:26:57 +00:00
|
|
|
for x in out[1]:
|
|
|
|
time_modified = out[1][x]
|
|
|
|
folder = x
|
|
|
|
if os.path.getmtime(folder) != time_modified:
|
|
|
|
return None
|
|
|
|
|
2023-03-17 21:57:57 +00:00
|
|
|
folders = folder_names_and_paths[folder_name]
|
|
|
|
for x in folders[0]:
|
2023-06-02 20:48:56 +00:00
|
|
|
if os.path.isdir(x):
|
|
|
|
if x not in out[1]:
|
|
|
|
return None
|
2023-05-29 15:26:57 +00:00
|
|
|
|
|
|
|
return out
|
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def get_filename_list(folder_name: str) -> list[str]:
|
2024-08-18 01:28:36 +00:00
|
|
|
folder_name = map_legacy(folder_name)
|
2023-05-29 15:26:57 +00:00
|
|
|
out = cached_filename_list_(folder_name)
|
|
|
|
if out is None:
|
|
|
|
out = get_filename_list_(folder_name)
|
|
|
|
global filename_list_cache
|
|
|
|
filename_list_cache[folder_name] = out
|
2023-06-01 00:00:01 +00:00
|
|
|
return list(out[0])
|
2023-03-17 21:57:57 +00:00
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def get_save_image_path(filename_prefix: str, output_dir: str, image_width=0, image_height=0) -> tuple[str, str, int, str, str]:
|
|
|
|
def map_filename(filename: str) -> tuple[int, str]:
|
2023-05-18 03:43:59 +00:00
|
|
|
prefix_len = len(os.path.basename(filename_prefix))
|
|
|
|
prefix = filename[:prefix_len + 1]
|
|
|
|
try:
|
|
|
|
digits = int(filename[prefix_len + 1:].split('_')[0])
|
|
|
|
except:
|
|
|
|
digits = 0
|
2024-08-07 01:59:34 +00:00
|
|
|
return digits, prefix
|
2023-05-18 03:43:59 +00:00
|
|
|
|
2024-08-07 01:59:34 +00:00
|
|
|
def compute_vars(input: str, image_width: int, image_height: int) -> str:
|
2023-05-18 03:43:59 +00:00
|
|
|
input = input.replace("%width%", str(image_width))
|
|
|
|
input = input.replace("%height%", str(image_height))
|
2024-09-09 06:55:51 +00:00
|
|
|
now = time.localtime()
|
|
|
|
input = input.replace("%year%", str(now.tm_year))
|
|
|
|
input = input.replace("%month%", str(now.tm_mon).zfill(2))
|
|
|
|
input = input.replace("%day%", str(now.tm_mday).zfill(2))
|
|
|
|
input = input.replace("%hour%", str(now.tm_hour).zfill(2))
|
|
|
|
input = input.replace("%minute%", str(now.tm_min).zfill(2))
|
|
|
|
input = input.replace("%second%", str(now.tm_sec).zfill(2))
|
2023-05-18 03:43:59 +00:00
|
|
|
return input
|
|
|
|
|
2024-09-09 06:55:51 +00:00
|
|
|
if "%" in filename_prefix:
|
|
|
|
filename_prefix = compute_vars(filename_prefix, image_width, image_height)
|
2023-05-18 03:43:59 +00:00
|
|
|
|
|
|
|
subfolder = os.path.dirname(os.path.normpath(filename_prefix))
|
|
|
|
filename = os.path.basename(os.path.normpath(filename_prefix))
|
|
|
|
|
|
|
|
full_output_folder = os.path.join(output_dir, subfolder)
|
|
|
|
|
|
|
|
if os.path.commonpath((output_dir, os.path.abspath(full_output_folder))) != output_dir:
|
2023-11-26 20:10:31 +00:00
|
|
|
err = "**** ERROR: Saving image outside the output folder is not allowed." + \
|
|
|
|
"\n full_output_folder: " + os.path.abspath(full_output_folder) + \
|
|
|
|
"\n output_dir: " + output_dir + \
|
2024-03-29 07:07:13 +00:00
|
|
|
"\n commonpath: " + os.path.commonpath((output_dir, os.path.abspath(full_output_folder)))
|
|
|
|
logging.error(err)
|
2023-11-26 20:10:31 +00:00
|
|
|
raise Exception(err)
|
2023-05-18 03:43:59 +00:00
|
|
|
|
|
|
|
try:
|
2024-05-16 04:11:01 +00:00
|
|
|
counter = max(filter(lambda a: os.path.normcase(a[1][:-1]) == os.path.normcase(filename) and a[1][-1] == "_", map(map_filename, os.listdir(full_output_folder))))[0] + 1
|
2023-05-18 03:43:59 +00:00
|
|
|
except ValueError:
|
|
|
|
counter = 1
|
|
|
|
except FileNotFoundError:
|
|
|
|
os.makedirs(full_output_folder, exist_ok=True)
|
|
|
|
counter = 1
|
|
|
|
return full_output_folder, filename, counter, subfolder, filename_prefix
|