ComfyUI/web/scripts/api.js

483 lines
13 KiB
JavaScript
Raw Normal View History

2023-03-02 21:33:50 +00:00
class ComfyApi extends EventTarget {
#registered = new Set();
2023-03-02 21:33:50 +00:00
constructor() {
2023-03-03 15:20:49 +00:00
super();
this.api_host = location.host;
this.api_base = location.pathname.split('/').slice(0, -1).join('/');
this.initialClientId = sessionStorage.getItem("clientId");
}
apiURL(route) {
return this.api_base + route;
}
fetchApi(route, options) {
if (!options) {
options = {};
}
if (!options.headers) {
options.headers = {};
}
options.headers["Comfy-User"] = this.user;
return fetch(this.apiURL(route), options);
2023-03-02 21:33:50 +00:00
}
addEventListener(type, callback, options) {
super.addEventListener(type, callback, options);
this.#registered.add(type);
}
2023-03-03 15:47:33 +00:00
/**
* Poll status for colab and other things that don't support websockets.
*/
2023-03-02 21:33:50 +00:00
#pollQueue() {
setInterval(async () => {
try {
const resp = await this.fetchApi("/prompt");
2023-03-02 21:33:50 +00:00
const status = await resp.json();
this.dispatchEvent(new CustomEvent("status", { detail: status }));
} catch (error) {
this.dispatchEvent(new CustomEvent("status", { detail: null }));
}
}, 1000);
}
2023-03-03 15:47:33 +00:00
/**
* Creates and connects a WebSocket for realtime updates
* @param {boolean} isReconnect If the socket is connection is a reconnect attempt
*/
2023-03-02 21:33:50 +00:00
#createSocket(isReconnect) {
if (this.socket) {
return;
}
let opened = false;
let existingSession = window.name;
if (existingSession) {
existingSession = "?clientId=" + existingSession;
}
this.socket = new WebSocket(
`ws${window.location.protocol === "https:" ? "s" : ""}://${this.api_host}${this.api_base}/ws${existingSession}`
);
2023-05-31 01:43:29 +00:00
this.socket.binaryType = "arraybuffer";
2023-03-02 21:33:50 +00:00
this.socket.addEventListener("open", () => {
opened = true;
if (isReconnect) {
this.dispatchEvent(new CustomEvent("reconnected"));
}
});
this.socket.addEventListener("error", () => {
if (this.socket) this.socket.close();
2023-03-06 16:47:02 +00:00
if (!isReconnect && !opened) {
2023-03-03 21:13:11 +00:00
this.#pollQueue();
}
2023-03-02 21:33:50 +00:00
});
this.socket.addEventListener("close", () => {
setTimeout(() => {
this.socket = null;
this.#createSocket(true);
}, 300);
if (opened) {
this.dispatchEvent(new CustomEvent("status", { detail: null }));
this.dispatchEvent(new CustomEvent("reconnecting"));
}
});
this.socket.addEventListener("message", (event) => {
try {
2023-05-31 01:43:29 +00:00
if (event.data instanceof ArrayBuffer) {
const view = new DataView(event.data);
const eventType = view.getUint32(0);
const buffer = event.data.slice(4);
switch (eventType) {
case 1:
const view2 = new DataView(event.data);
const imageType = view2.getUint32(0)
let imageMime
switch (imageType) {
case 1:
default:
imageMime = "image/jpeg";
break;
case 2:
imageMime = "image/png"
2023-03-02 21:33:50 +00:00
}
2023-05-31 02:15:06 +00:00
const imageBlob = new Blob([buffer.slice(4)], { type: imageMime });
this.dispatchEvent(new CustomEvent("b_preview", { detail: imageBlob }));
break;
2023-03-02 21:33:50 +00:00
default:
2023-05-31 01:43:29 +00:00
throw new Error(`Unknown binary websocket message of type ${eventType}`);
}
}
else {
const msg = JSON.parse(event.data);
switch (msg.type) {
case "status":
if (msg.data.sid) {
this.clientId = msg.data.sid;
window.name = this.clientId; // use window name so it isnt reused when duplicating tabs
sessionStorage.setItem("clientId", this.clientId); // store in session storage so duplicate tab can load correct workflow
2023-05-31 01:43:29 +00:00
}
this.dispatchEvent(new CustomEvent("status", { detail: msg.data.status }));
break;
case "progress":
this.dispatchEvent(new CustomEvent("progress", { detail: msg.data }));
break;
case "executing":
Execution Model Inversion (#2666) * Execution Model Inversion This PR inverts the execution model -- from recursively calling nodes to using a topological sort of the nodes. This change allows for modification of the node graph during execution. This allows for two major advantages: 1. The implementation of lazy evaluation in nodes. For example, if a "Mix Images" node has a mix factor of exactly 0.0, the second image input doesn't even need to be evaluated (and visa-versa if the mix factor is 1.0). 2. Dynamic expansion of nodes. This allows for the creation of dynamic "node groups". Specifically, custom nodes can return subgraphs that replace the original node in the graph. This is an incredibly powerful concept. Using this functionality, it was easy to implement: a. Components (a.k.a. node groups) b. Flow control (i.e. while loops) via tail recursion c. All-in-one nodes that replicate the WebUI functionality d. and more All of those were able to be implemented entirely via custom nodes, so those features are *not* a part of this PR. (There are some front-end changes that should occur before that functionality is made widely available, particularly around variant sockets.) The custom nodes associated with this PR can be found at: https://github.com/BadCafeCode/execution-inversion-demo-comfyui Note that some of them require that variant socket types ("*") be enabled. * Allow `input_info` to be of type `None` * Handle errors (like OOM) more gracefully * Add a command-line argument to enable variants This allows the use of nodes that have sockets of type '*' without applying a patch to the code. * Fix an overly aggressive assertion. This could happen when attempting to evaluate `IS_CHANGED` for a node during the creation of the cache (in order to create the cache key). * Fix Pyright warnings * Add execution model unit tests * Fix issue with unused literals Behavior should now match the master branch with regard to undeclared inputs. Undeclared inputs that are socket connections will be used while undeclared inputs that are literals will be ignored. * Make custom VALIDATE_INPUTS skip normal validation Additionally, if `VALIDATE_INPUTS` takes an argument named `input_types`, that variable will be a dictionary of the socket type of all incoming connections. If that argument exists, normal socket type validation will not occur. This removes the last hurdle for enabling variant types entirely from custom nodes, so I've removed that command-line option. I've added appropriate unit tests for these changes. * Fix example in unit test This wouldn't have caused any issues in the unit test, but it would have bugged the UI if someone copy+pasted it into their own node pack. * Use fstrings instead of '%' formatting syntax * Use custom exception types. * Display an error for dependency cycles Previously, dependency cycles that were created during node expansion would cause the application to quit (due to an uncaught exception). Now, we'll throw a proper error to the UI. We also make an attempt to 'blame' the most relevant node in the UI. * Add docs on when ExecutionBlocker should be used * Remove unused functionality * Rename ExecutionResult.SLEEPING to PENDING * Remove superfluous function parameter * Pass None for uneval inputs instead of default This applies to `VALIDATE_INPUTS`, `check_lazy_status`, and lazy values in evaluation functions. * Add a test for mixed node expansion This test ensures that a node that returns a combination of expanded subgraphs and literal values functions correctly. * Raise exception for bad get_node calls. * Minor refactor of IsChangedCache.get * Refactor `map_node_over_list` function * Fix ui output for duplicated nodes * Add documentation on `check_lazy_status` * Add file for execution model unit tests * Clean up Javascript code as per review * Improve documentation Converted some comments to docstrings as per review * Add a new unit test for mixed lazy results This test validates that when an output list is fed to a lazy node, the node will properly evaluate previous nodes that are needed by any inputs to the lazy node. No code in the execution model has been changed. The test already passes. * Allow kwargs in VALIDATE_INPUTS functions When kwargs are used, validation is skipped for all inputs as if they had been mentioned explicitly. * List cached nodes in `execution_cached` message This was previously just bugged in this PR.
2024-08-15 15:21:11 +00:00
this.dispatchEvent(new CustomEvent("executing", { detail: msg.data.display_node }));
2023-05-31 01:43:29 +00:00
break;
case "executed":
this.dispatchEvent(new CustomEvent("executed", { detail: msg.data }));
break;
case "execution_start":
this.dispatchEvent(new CustomEvent("execution_start", { detail: msg.data }));
break;
case "execution_success":
this.dispatchEvent(new CustomEvent("execution_success", { detail: msg.data }));
break;
2023-05-31 01:43:29 +00:00
case "execution_error":
this.dispatchEvent(new CustomEvent("execution_error", { detail: msg.data }));
break;
case "execution_cached":
this.dispatchEvent(new CustomEvent("execution_cached", { detail: msg.data }));
break;
2023-05-31 01:43:29 +00:00
default:
if (this.#registered.has(msg.type)) {
this.dispatchEvent(new CustomEvent(msg.type, { detail: msg.data }));
} else {
throw new Error(`Unknown message type ${msg.type}`);
}
}
2023-03-02 21:33:50 +00:00
}
} catch (error) {
2023-05-31 01:43:29 +00:00
console.warn("Unhandled message:", event.data, error);
2023-03-02 21:33:50 +00:00
}
});
}
2023-03-03 15:47:33 +00:00
/**
* Initialises sockets and realtime updates
*/
2023-03-03 15:20:49 +00:00
init() {
this.#createSocket();
}
2023-03-02 21:33:50 +00:00
2023-03-03 19:05:39 +00:00
/**
* Gets a list of extension urls
* @returns An array of script urls to import
*/
async getExtensions() {
const resp = await this.fetchApi("/extensions", { cache: "no-store" });
2023-03-03 19:05:39 +00:00
return await resp.json();
}
/**
* Gets a list of embedding names
* @returns An array of script urls to import
*/
async getEmbeddings() {
const resp = await this.fetchApi("/embeddings", { cache: "no-store" });
return await resp.json();
}
2023-03-03 15:47:33 +00:00
/**
* Loads node object definitions for the graph
* @returns The node definitions
*/
async getNodeDefs() {
const resp = await this.fetchApi("/object_info", { cache: "no-store" });
return await resp.json();
}
2023-03-03 15:47:33 +00:00
/**
*
* @param {number} number The index at which to queue the prompt, passing -1 will insert the prompt at the front of the queue
* @param {object} prompt The prompt data to queue
*/
async queuePrompt(number, { output, workflow }) {
const body = {
client_id: this.clientId,
prompt: output,
extra_data: { extra_pnginfo: { workflow } },
};
if (number === -1) {
body.front = true;
} else if (number != 0) {
body.number = number;
}
const res = await this.fetchApi("/prompt", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(body),
});
if (res.status !== 200) {
throw {
response: await res.json(),
};
}
return await res.json();
}
2023-03-03 15:20:49 +00:00
2023-03-03 15:47:33 +00:00
/**
* Loads a list of items (queue or history)
* @param {string} type The type of items to load, queue or history
* @returns The items of the specified type grouped by their status
*/
2023-03-03 15:20:49 +00:00
async getItems(type) {
if (type === "queue") {
return this.getQueue();
}
return this.getHistory();
}
2023-03-03 15:47:33 +00:00
/**
* Gets the current state of the queue
* @returns The currently running and queued items
*/
2023-03-03 15:20:49 +00:00
async getQueue() {
try {
const res = await this.fetchApi("/queue");
2023-03-03 15:20:49 +00:00
const data = await res.json();
return {
// Running action uses a different endpoint for cancelling
2023-03-03 15:47:33 +00:00
Running: data.queue_running.map((prompt) => ({
prompt,
remove: { name: "Cancel", cb: () => api.interrupt() },
})),
2023-03-03 15:20:49 +00:00
Pending: data.queue_pending.map((prompt) => ({ prompt })),
};
} catch (error) {
console.error(error);
return { Running: [], Pending: [] };
}
}
2023-03-03 15:47:33 +00:00
/**
* Gets the prompt execution history
* @returns Prompt history including node outputs
*/
async getHistory(max_items=200) {
2023-03-03 15:20:49 +00:00
try {
const res = await this.fetchApi(`/history?max_items=${max_items}`);
2023-03-03 15:20:49 +00:00
return { History: Object.values(await res.json()) };
} catch (error) {
console.error(error);
return { History: [] };
}
}
2023-08-04 07:29:51 +00:00
/**
* Gets system & device stats
* @returns System stats such as python version, OS, per device info
*/
async getSystemStats() {
const res = await this.fetchApi("/system_stats");
return await res.json();
}
2023-03-03 15:47:33 +00:00
/**
* Sends a POST request to the API
* @param {*} type The endpoint to post to
* @param {*} body Optional POST data
*/
2023-03-03 15:20:49 +00:00
async #postItem(type, body) {
try {
await this.fetchApi("/" + type, {
2023-03-03 15:20:49 +00:00
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: body ? JSON.stringify(body) : undefined,
});
} catch (error) {
console.error(error);
}
}
2023-03-03 15:47:33 +00:00
/**
* Deletes an item from the specified list
* @param {string} type The type of item to delete, queue or history
* @param {number} id The id of the item to delete
*/
2023-03-03 15:20:49 +00:00
async deleteItem(type, id) {
await this.#postItem(type, { delete: [id] });
}
2023-03-03 15:47:33 +00:00
/**
* Clears the specified list
* @param {string} type The type of list to clear, queue or history
*/
2023-03-03 15:20:49 +00:00
async clearItems(type) {
await this.#postItem(type, { clear: true });
}
2023-03-03 15:47:33 +00:00
/**
* Interrupts the execution of the running prompt
*/
2023-03-03 15:20:49 +00:00
async interrupt() {
await this.#postItem("interrupt", null);
}
/**
* Gets user configuration data and where data should be stored
* @returns { Promise<{ storage: "server" | "browser", users?: Promise<string, unknown>, migrated?: boolean }> }
*/
async getUserConfig() {
return (await this.fetchApi("/users")).json();
}
/**
* Creates a new user
* @param { string } username
* @returns The fetch response
*/
createUser(username) {
return this.fetchApi("/users", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ username }),
});
}
/**
* Gets all setting values for the current user
* @returns { Promise<string, unknown> } A dictionary of id -> value
*/
async getSettings() {
return (await this.fetchApi("/settings")).json();
}
/**
* Gets a setting for the current user
* @param { string } id The id of the setting to fetch
* @returns { Promise<unknown> } The setting value
*/
async getSetting(id) {
return (await this.fetchApi(`/settings/${encodeURIComponent(id)}`)).json();
}
/**
* Stores a dictionary of settings for the current user
* @param { Record<string, unknown> } settings Dictionary of setting id -> value to save
* @returns { Promise<void> }
*/
async storeSettings(settings) {
return this.fetchApi(`/settings`, {
method: "POST",
body: JSON.stringify(settings)
});
}
/**
* Stores a setting for the current user
* @param { string } id The id of the setting to update
* @param { unknown } value The value of the setting
* @returns { Promise<void> }
*/
async storeSetting(id, value) {
return this.fetchApi(`/settings/${encodeURIComponent(id)}`, {
method: "POST",
body: JSON.stringify(value)
});
}
/**
* Gets a user data file for the current user
* @param { string } file The name of the userdata file to load
* @param { RequestInit } [options]
* @returns { Promise<Response> } The fetch response object
*/
async getUserData(file, options) {
return this.fetchApi(`/userdata/${encodeURIComponent(file)}`, options);
}
/**
* Stores a user data file for the current user
* @param { string } file The name of the userdata file to save
* @param { unknown } data The data to save to the file
* @param { RequestInit & { overwrite?: boolean, stringify?: boolean, throwOnError?: boolean } } [options]
* @returns { Promise<Response> }
*/
async storeUserData(file, data, options = { overwrite: true, stringify: true, throwOnError: true }) {
const resp = await this.fetchApi(`/userdata/${encodeURIComponent(file)}?overwrite=${options?.overwrite}`, {
method: "POST",
body: options?.stringify ? JSON.stringify(data) : data,
...options,
});
if (resp.status !== 200 && options?.throwOnError !== false) {
throw new Error(`Error storing user data file '${file}': ${resp.status} ${(await resp).statusText}`);
}
return resp;
}
/**
* Deletes a user data file for the current user
* @param { string } file The name of the userdata file to delete
*/
async deleteUserData(file) {
const resp = await this.fetchApi(`/userdata/${encodeURIComponent(file)}`, {
method: "DELETE",
});
if (resp.status !== 204) {
throw new Error(`Error removing user data file '${file}': ${resp.status} ${(resp).statusText}`);
}
}
/**
* Move a user data file for the current user
* @param { string } source The userdata file to move
* @param { string } dest The destination for the file
*/
async moveUserData(source, dest, options = { overwrite: false }) {
const resp = await this.fetchApi(`/userdata/${encodeURIComponent(source)}/move/${encodeURIComponent(dest)}?overwrite=${options?.overwrite}`, {
method: "POST",
});
return resp;
}
/**
* @overload
* Lists user data files for the current user
* @param { string } dir The directory in which to list files
* @param { boolean } [recurse] If the listing should be recursive
* @param { true } [split] If the paths should be split based on the os path separator
* @returns { Promise<string[][]>> } The list of split file paths in the format [fullPath, ...splitPath]
*/
/**
* @overload
* Lists user data files for the current user
* @param { string } dir The directory in which to list files
* @param { boolean } [recurse] If the listing should be recursive
* @param { false | undefined } [split] If the paths should be split based on the os path separator
* @returns { Promise<string[]>> } The list of files
*/
async listUserData(dir, recurse, split) {
const resp = await this.fetchApi(
`/userdata?${new URLSearchParams({
recurse,
dir,
split,
})}`
);
if (resp.status === 404) return [];
if (resp.status !== 200) {
throw new Error(`Error getting user data list '${dir}': ${resp.status} ${resp.statusText}`);
}
return resp.json();
}
}
export const api = new ComfyApi();