From 56d9496b18baa5946834d1982908df0091e1c925 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Fri, 12 Jan 2024 18:17:06 -0500 Subject: [PATCH] Rename status notes to status messages. I think message describes them better. --- execution.py | 18 +++++++++--------- main.py | 2 +- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/execution.py b/execution.py index 752e1d5a..e91e9a41 100644 --- a/execution.py +++ b/execution.py @@ -276,12 +276,12 @@ class PromptExecutor: self.outputs = {} self.object_storage = {} self.outputs_ui = {} - self.status_notes = [] + self.status_messages = [] self.success = True self.old_prompt = {} - def add_note(self, event, data, broadcast: bool): - self.status_notes.append((event, data)) + def add_message(self, event, data, broadcast: bool): + self.status_messages.append((event, data)) if self.server.client_id is not None or broadcast: self.server.send_sync(event, data, self.server.client_id) @@ -298,7 +298,7 @@ class PromptExecutor: "node_type": class_type, "executed": list(executed), } - self.add_note("execution_interrupted", mes, broadcast=True) + self.add_message("execution_interrupted", mes, broadcast=True) else: mes = { "prompt_id": prompt_id, @@ -312,7 +312,7 @@ class PromptExecutor: "current_inputs": error["current_inputs"], "current_outputs": error["current_outputs"], } - self.add_note("execution_error", mes, broadcast=False) + self.add_message("execution_error", mes, broadcast=False) # Next, remove the subsequent outputs since they will not be executed to_delete = [] @@ -334,8 +334,8 @@ class PromptExecutor: else: self.server.client_id = None - self.status_notes = [] - self.add_note("execution_start", { "prompt_id": prompt_id}, broadcast=False) + self.status_messages = [] + self.add_message("execution_start", { "prompt_id": prompt_id}, broadcast=False) with torch.inference_mode(): #delete cached outputs if nodes don't exist for them @@ -368,7 +368,7 @@ class PromptExecutor: del d comfy.model_management.cleanup_models() - self.add_note("execution_cached", + self.add_message("execution_cached", { "nodes": list(current_outputs) , "prompt_id": prompt_id}, broadcast=False) executed = set() @@ -742,7 +742,7 @@ class PromptQueue: class ExecutionStatus(NamedTuple): status_str: Literal['success', 'error'] completed: bool - notes: List[str] + messages: List[str] def task_done(self, item_id, outputs, status: Optional['PromptQueue.ExecutionStatus']): diff --git a/main.py b/main.py index a40ad2a4..69d9bce6 100644 --- a/main.py +++ b/main.py @@ -115,7 +115,7 @@ def prompt_worker(q, server): status=execution.PromptQueue.ExecutionStatus( status_str='success' if e.success else 'error', completed=e.success, - notes=e.status_notes)) + messages=e.status_messages)) if server.client_id is not None: server.send_sync("executing", { "node": None, "prompt_id": prompt_id }, server.client_id)