Show message in the frontend if prompt execution raises an exception
This commit is contained in:
parent
ffec815257
commit
6b2a8a3845
14
execution.py
14
execution.py
|
@ -258,27 +258,31 @@ class PromptExecutor:
|
||||||
self.old_prompt = {}
|
self.old_prompt = {}
|
||||||
self.server = server
|
self.server = server
|
||||||
|
|
||||||
def handle_execution_error(self, prompt_id, current_outputs, executed, error, ex):
|
def handle_execution_error(self, prompt_id, prompt, current_outputs, executed, error, ex):
|
||||||
|
node_id = error["node_id"]
|
||||||
|
class_type = prompt[node_id]["class_type"]
|
||||||
|
|
||||||
# First, send back the status to the frontend depending
|
# First, send back the status to the frontend depending
|
||||||
# on the exception type
|
# on the exception type
|
||||||
if isinstance(ex, comfy.model_management.InterruptProcessingException):
|
if isinstance(ex, comfy.model_management.InterruptProcessingException):
|
||||||
mes = {
|
mes = {
|
||||||
"prompt_id": prompt_id,
|
"prompt_id": prompt_id,
|
||||||
|
"node_id": node_id,
|
||||||
|
"node_type": class_type,
|
||||||
"executed": list(executed),
|
"executed": list(executed),
|
||||||
|
|
||||||
"node_id": error["node_id"],
|
|
||||||
}
|
}
|
||||||
self.server.send_sync("execution_interrupted", mes, self.server.client_id)
|
self.server.send_sync("execution_interrupted", mes, self.server.client_id)
|
||||||
else:
|
else:
|
||||||
if self.server.client_id is not None:
|
if self.server.client_id is not None:
|
||||||
mes = {
|
mes = {
|
||||||
"prompt_id": prompt_id,
|
"prompt_id": prompt_id,
|
||||||
|
"node_id": node_id,
|
||||||
|
"node_type": class_type,
|
||||||
"executed": list(executed),
|
"executed": list(executed),
|
||||||
|
|
||||||
"message": error["message"],
|
"message": error["message"],
|
||||||
"exception_type": error["exception_type"],
|
"exception_type": error["exception_type"],
|
||||||
"traceback": error["traceback"],
|
"traceback": error["traceback"],
|
||||||
"node_id": error["node_id"],
|
|
||||||
"current_inputs": error["current_inputs"],
|
"current_inputs": error["current_inputs"],
|
||||||
"current_outputs": error["current_outputs"],
|
"current_outputs": error["current_outputs"],
|
||||||
}
|
}
|
||||||
|
@ -346,7 +350,7 @@ class PromptExecutor:
|
||||||
# error was raised
|
# error was raised
|
||||||
success, error, ex = recursive_execute(self.server, prompt, self.outputs, output_node_id, extra_data, executed, prompt_id, self.outputs_ui)
|
success, error, ex = recursive_execute(self.server, prompt, self.outputs, output_node_id, extra_data, executed, prompt_id, self.outputs_ui)
|
||||||
if success is not True:
|
if success is not True:
|
||||||
self.handle_execution_error(prompt_id, current_outputs, executed, error, ex)
|
self.handle_execution_error(prompt_id, prompt, current_outputs, executed, error, ex)
|
||||||
|
|
||||||
for x in executed:
|
for x in executed:
|
||||||
self.old_prompt[x] = copy.deepcopy(prompt[x])
|
self.old_prompt[x] = copy.deepcopy(prompt[x])
|
||||||
|
|
|
@ -88,6 +88,12 @@ class ComfyApi extends EventTarget {
|
||||||
case "executed":
|
case "executed":
|
||||||
this.dispatchEvent(new CustomEvent("executed", { detail: msg.data }));
|
this.dispatchEvent(new CustomEvent("executed", { detail: msg.data }));
|
||||||
break;
|
break;
|
||||||
|
case "execution_start":
|
||||||
|
this.dispatchEvent(new CustomEvent("execution_start", { detail: msg.data }));
|
||||||
|
break;
|
||||||
|
case "execution_error":
|
||||||
|
this.dispatchEvent(new CustomEvent("execution_error", { detail: msg.data }));
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
if (this.#registered.has(msg.type)) {
|
if (this.#registered.has(msg.type)) {
|
||||||
this.dispatchEvent(new CustomEvent(msg.type, { detail: msg.data }));
|
this.dispatchEvent(new CustomEvent(msg.type, { detail: msg.data }));
|
||||||
|
|
|
@ -784,8 +784,10 @@ export class ComfyApp {
|
||||||
color = "red";
|
color = "red";
|
||||||
lineWidth = 2;
|
lineWidth = 2;
|
||||||
}
|
}
|
||||||
|
else if (self.lastExecutionError && +self.lastExecutionError.node_id === node.id) {
|
||||||
self.graphTime = Date.now()
|
color = "#f0f";
|
||||||
|
lineWidth = 2;
|
||||||
|
}
|
||||||
|
|
||||||
if (color) {
|
if (color) {
|
||||||
const shape = node._shape || node.constructor.shape || LiteGraph.ROUND_SHAPE;
|
const shape = node._shape || node.constructor.shape || LiteGraph.ROUND_SHAPE;
|
||||||
|
@ -895,6 +897,17 @@ export class ComfyApp {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
api.addEventListener("execution_start", ({ detail }) => {
|
||||||
|
this.lastExecutionError = null
|
||||||
|
});
|
||||||
|
|
||||||
|
api.addEventListener("execution_error", ({ detail }) => {
|
||||||
|
this.lastExecutionError = detail;
|
||||||
|
const formattedError = this.#formatExecutionError(detail);
|
||||||
|
this.ui.dialog.show(formattedError);
|
||||||
|
this.canvas.draw(true, true);
|
||||||
|
});
|
||||||
|
|
||||||
api.init();
|
api.init();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1269,7 +1282,7 @@ export class ComfyApp {
|
||||||
return { workflow, output };
|
return { workflow, output };
|
||||||
}
|
}
|
||||||
|
|
||||||
#formatError(error) {
|
#formatPromptError(error) {
|
||||||
if (error == null) {
|
if (error == null) {
|
||||||
return "(unknown error)"
|
return "(unknown error)"
|
||||||
}
|
}
|
||||||
|
@ -1294,6 +1307,18 @@ export class ComfyApp {
|
||||||
return "(unknown error)"
|
return "(unknown error)"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#formatExecutionError(error) {
|
||||||
|
if (error == null) {
|
||||||
|
return "(unknown error)"
|
||||||
|
}
|
||||||
|
|
||||||
|
const traceback = error.traceback.join("")
|
||||||
|
const nodeId = error.node_id
|
||||||
|
const nodeType = error.node_type
|
||||||
|
|
||||||
|
return `Error occurred when executing ${nodeType}:\n\n${error.message}\n\n${traceback}`
|
||||||
|
}
|
||||||
|
|
||||||
async queuePrompt(number, batchCount = 1) {
|
async queuePrompt(number, batchCount = 1) {
|
||||||
this.#queueItems.push({ number, batchCount });
|
this.#queueItems.push({ number, batchCount });
|
||||||
|
|
||||||
|
@ -1315,7 +1340,7 @@ export class ComfyApp {
|
||||||
try {
|
try {
|
||||||
await api.queuePrompt(number, p);
|
await api.queuePrompt(number, p);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const formattedError = this.#formatError(error)
|
const formattedError = this.#formatPromptError(error)
|
||||||
this.ui.dialog.show(formattedError);
|
this.ui.dialog.show(formattedError);
|
||||||
if (error.response) {
|
if (error.response) {
|
||||||
this.lastPromptError = error.response;
|
this.lastPromptError = error.response;
|
||||||
|
@ -1419,7 +1444,7 @@ export class ComfyApp {
|
||||||
clean() {
|
clean() {
|
||||||
this.nodeOutputs = {};
|
this.nodeOutputs = {};
|
||||||
this.lastPromptError = null;
|
this.lastPromptError = null;
|
||||||
this.graphTime = null
|
this.lastExecutionError = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue