Show message in the frontend if prompt execution raises an exception

This commit is contained in:
space-nuko 2023-05-25 13:03:41 -05:00
parent ffec815257
commit 6b2a8a3845
3 changed files with 45 additions and 10 deletions

View File

@ -258,27 +258,31 @@ class PromptExecutor:
self.old_prompt = {}
self.server = server
def handle_execution_error(self, prompt_id, current_outputs, executed, error, ex):
def handle_execution_error(self, prompt_id, prompt, current_outputs, executed, error, ex):
node_id = error["node_id"]
class_type = prompt[node_id]["class_type"]
# First, send back the status to the frontend depending
# on the exception type
if isinstance(ex, comfy.model_management.InterruptProcessingException):
mes = {
"prompt_id": prompt_id,
"node_id": node_id,
"node_type": class_type,
"executed": list(executed),
"node_id": error["node_id"],
}
self.server.send_sync("execution_interrupted", mes, self.server.client_id)
else:
if self.server.client_id is not None:
mes = {
"prompt_id": prompt_id,
"node_id": node_id,
"node_type": class_type,
"executed": list(executed),
"message": error["message"],
"exception_type": error["exception_type"],
"traceback": error["traceback"],
"node_id": error["node_id"],
"current_inputs": error["current_inputs"],
"current_outputs": error["current_outputs"],
}
@ -346,7 +350,7 @@ class PromptExecutor:
# error was raised
success, error, ex = recursive_execute(self.server, prompt, self.outputs, output_node_id, extra_data, executed, prompt_id, self.outputs_ui)
if success is not True:
self.handle_execution_error(prompt_id, current_outputs, executed, error, ex)
self.handle_execution_error(prompt_id, prompt, current_outputs, executed, error, ex)
for x in executed:
self.old_prompt[x] = copy.deepcopy(prompt[x])

View File

@ -88,6 +88,12 @@ class ComfyApi extends EventTarget {
case "executed":
this.dispatchEvent(new CustomEvent("executed", { detail: msg.data }));
break;
case "execution_start":
this.dispatchEvent(new CustomEvent("execution_start", { detail: msg.data }));
break;
case "execution_error":
this.dispatchEvent(new CustomEvent("execution_error", { detail: msg.data }));
break;
default:
if (this.#registered.has(msg.type)) {
this.dispatchEvent(new CustomEvent(msg.type, { detail: msg.data }));

View File

@ -784,8 +784,10 @@ export class ComfyApp {
color = "red";
lineWidth = 2;
}
self.graphTime = Date.now()
else if (self.lastExecutionError && +self.lastExecutionError.node_id === node.id) {
color = "#f0f";
lineWidth = 2;
}
if (color) {
const shape = node._shape || node.constructor.shape || LiteGraph.ROUND_SHAPE;
@ -895,6 +897,17 @@ export class ComfyApp {
}
});
api.addEventListener("execution_start", ({ detail }) => {
this.lastExecutionError = null
});
api.addEventListener("execution_error", ({ detail }) => {
this.lastExecutionError = detail;
const formattedError = this.#formatExecutionError(detail);
this.ui.dialog.show(formattedError);
this.canvas.draw(true, true);
});
api.init();
}
@ -1269,7 +1282,7 @@ export class ComfyApp {
return { workflow, output };
}
#formatError(error) {
#formatPromptError(error) {
if (error == null) {
return "(unknown error)"
}
@ -1294,6 +1307,18 @@ export class ComfyApp {
return "(unknown error)"
}
#formatExecutionError(error) {
if (error == null) {
return "(unknown error)"
}
const traceback = error.traceback.join("")
const nodeId = error.node_id
const nodeType = error.node_type
return `Error occurred when executing ${nodeType}:\n\n${error.message}\n\n${traceback}`
}
async queuePrompt(number, batchCount = 1) {
this.#queueItems.push({ number, batchCount });
@ -1315,7 +1340,7 @@ export class ComfyApp {
try {
await api.queuePrompt(number, p);
} catch (error) {
const formattedError = this.#formatError(error)
const formattedError = this.#formatPromptError(error)
this.ui.dialog.show(formattedError);
if (error.response) {
this.lastPromptError = error.response;
@ -1419,7 +1444,7 @@ export class ComfyApp {
clean() {
this.nodeOutputs = {};
this.lastPromptError = null;
this.graphTime = null
this.lastExecutionError = null;
}
}