Hook up more of the API

This commit is contained in:
space-nuko
2023-05-11 23:11:07 -05:00
parent 4d74720ae9
commit 34c18dea90
11 changed files with 388 additions and 179 deletions

View File

@@ -24,10 +24,6 @@ type ComfyGraphEvents = {
export default class ComfyGraph extends LGraph { export default class ComfyGraph extends LGraph {
eventBus: TypedEmitter<ComfyGraphEvents> = new EventEmitter() as TypedEmitter<ComfyGraphEvents>; eventBus: TypedEmitter<ComfyGraphEvents> = new EventEmitter() as TypedEmitter<ComfyGraphEvents>;
constructor() {
super();
}
override onConfigure() { override onConfigure() {
console.debug("Configured"); console.debug("Configured");
this.eventBus.emit("configured", this); this.eventBus.emit("configured", this);

View File

@@ -60,7 +60,7 @@ export default class ComfyGraphCanvas extends LGraphCanvas {
let state = get(queueState); let state = get(queueState);
let color = null; let color = null;
if (node.id === +state.runningNodeId) { if (node.id === +state.runningNodeID) {
color = "#0f0"; color = "#0f0";
// this.app can be null inside the constructor if rendering is taking place already // this.app can be null inside the constructor if rendering is taking place already
} else if (this.app && this.app.dragOverNode && node.id === this.app.dragOverNode.id) { } else if (this.app && this.app.dragOverNode && node.id === this.app.dragOverNode.id) {

View File

@@ -1,3 +1,8 @@
import type { Progress, SerializedPrompt, SerializedPromptOutput, SerializedPromptOutputs } from "./components/ComfyApp";
import type TypedEmitter from "typed-emitter";
import EventEmitter from "events";
import type { GalleryOutput } from "./nodes/ComfyWidgetNodes";
type PromptRequestBody = { type PromptRequestBody = {
client_id: string, client_id: string,
prompt: any, prompt: any,
@@ -8,27 +13,68 @@ type PromptRequestBody = {
export type QueueItemType = "queue" | "history"; export type QueueItemType = "queue" | "history";
export type ComfyAPIQueueStatus = { export type ComfyAPIStatusExecInfo = {
exec_info: { queueRemaining: number | "X";
queue_remaining: number | "X";
}
} }
export default class ComfyAPI extends EventTarget { export type ComfyAPIStatusResponse = {
private registered: Set<string> = new Set<string>(); execInfo?: ComfyAPIStatusExecInfo,
error?: string
}
export type ComfyAPIQueueResponse = {
running: ComfyAPIHistoryItem[],
pending: ComfyAPIHistoryItem[],
error?: string
}
export type NodeID = string;
export type PromptID = string; // UUID
export type ComfyAPIHistoryItem = [
number, // prompt number
PromptID,
SerializedPrompt,
any, // extra data
NodeID[] // good outputs
]
export type ComfyAPIPromptResponse = {
promptID?: PromptID,
error?: string
}
export type ComfyAPIHistoryEntry = {
prompt: ComfyAPIHistoryItem,
outputs: SerializedPromptOutputs
}
export type ComfyAPIHistoryResponse = {
history: Record<PromptID, ComfyAPIHistoryEntry>,
error?: string
}
type ComfyAPIEvents = {
status: (status: ComfyAPIStatusResponse | null, error?: Error | null) => void,
progress: (progress: Progress) => void,
reconnecting: () => void,
reconnected: () => void,
executing: (promptID: PromptID | null, runningNodeID: NodeID | null) => void,
executed: (promptID: PromptID, nodeID: NodeID, output: SerializedPromptOutput) => void,
execution_cached: (promptID: PromptID, nodes: NodeID[]) => void,
execution_error: (promptID: PromptID, message: string) => void,
}
export default class ComfyAPI {
private eventBus: TypedEmitter<ComfyAPIEvents> = new EventEmitter() as TypedEmitter<ComfyAPIEvents>;
socket: WebSocket | null = null; socket: WebSocket | null = null;
clientId: string | null = null; clientId: string | null = null;
hostname: string | null = null; hostname: string | null = null;
port: number | null = 8188; port: number | null = 8188;
constructor() { addEventListener<E extends keyof ComfyAPIEvents>(type: E, callback: ComfyAPIEvents[E]) {
super(); this.eventBus.addListener(type, callback);
}
override addEventListener(type: string, callback: EventListenerOrEventListenerObject | null, options?: AddEventListenerOptions | boolean) {
super.addEventListener(type, callback, options);
this.registered.add(type);
} }
/** /**
@@ -39,9 +85,9 @@ export default class ComfyAPI extends EventTarget {
try { try {
const resp = await fetch(this.getBackendUrl() + "/prompt"); const resp = await fetch(this.getBackendUrl() + "/prompt");
const status = await resp.json(); const status = await resp.json();
this.dispatchEvent(new CustomEvent("status", { detail: status })); this.eventBus.emit("status", { execInfo: { queueRemaining: status.exec_info.queue_remaining } });
} catch (error) { } catch (error) {
this.dispatchEvent(new CustomEvent("status", { detail: null })); this.eventBus.emit("status", { error: error.toString() });
} }
}, 1000); }, 1000);
} }
@@ -77,7 +123,7 @@ export default class ComfyAPI extends EventTarget {
this.socket.addEventListener("open", () => { this.socket.addEventListener("open", () => {
opened = true; opened = true;
if (isReconnect) { if (isReconnect) {
this.dispatchEvent(new CustomEvent("reconnected")); this.eventBus.emit("reconnected");
} }
}); });
@@ -94,8 +140,8 @@ export default class ComfyAPI extends EventTarget {
this.createSocket(true); this.createSocket(true);
}, 300); }, 300);
if (opened) { if (opened) {
this.dispatchEvent(new CustomEvent("status", { detail: null })); this.eventBus.emit("status", null);
this.dispatchEvent(new CustomEvent("reconnecting")); this.eventBus.emit("reconnecting");
} }
}); });
@@ -108,29 +154,25 @@ export default class ComfyAPI extends EventTarget {
this.clientId = msg.data.sid; this.clientId = msg.data.sid;
sessionStorage["Comfy.SessionId"] = this.clientId; sessionStorage["Comfy.SessionId"] = this.clientId;
} }
this.dispatchEvent(new CustomEvent("status", { detail: msg.data.status })); this.eventBus.emit("status", msg.data.status);
break; break;
case "progress": case "progress":
this.dispatchEvent(new CustomEvent("progress", { detail: msg.data })); this.eventBus.emit("progress", msg.data as Progress);
break; break;
case "executing": case "executing":
this.dispatchEvent(new CustomEvent("executing", { detail: msg.data })); this.eventBus.emit("executing", msg.data.prompt_id, msg.data.node);
break; break;
case "executed": case "executed":
this.dispatchEvent(new CustomEvent("executed", { detail: msg.data })); this.eventBus.emit("executed", msg.data.prompt_id, msg.data.node, msg.data.output);
break; break;
case "execution_cached": case "execution_cached":
this.dispatchEvent(new CustomEvent("execution_cached", { detail: msg.data })); this.eventBus.emit("execution_cached", msg.data.prompt_id, msg.data.nodes);
break; break;
case "execution_error": case "execution_error":
this.dispatchEvent(new CustomEvent("execution_error", { detail: msg.data })); this.eventBus.emit("execution_error", msg.data.prompt_id, msg.data.message);
break; break;
default: default:
if (this.registered.has(msg.type)) { throw new Error(`Unknown message type: ${msg.type} ${msg}`);
this.dispatchEvent(new CustomEvent(msg.type, { detail: msg.data }));
} else {
throw new Error("Unknown message type");
}
} }
} catch (error) { } catch (error) {
console.warn("Unhandled message:", event.data); console.warn("Unhandled message:", event.data);
@@ -149,27 +191,27 @@ export default class ComfyAPI extends EventTarget {
* Gets a list of extension urls * Gets a list of extension urls
* @returns An array of script urls to import * @returns An array of script urls to import
*/ */
async getExtensions() { async getExtensions(): Promise<any> {
const resp = await fetch(this.getBackendUrl() + `/extensions`, { cache: "no-store" }); return fetch(this.getBackendUrl() + `/extensions`, { cache: "no-store" })
return await resp.json(); .then(resp => resp.json())
} }
/** /**
* Gets a list of embedding names * Gets a list of embedding names
* @returns An array of script urls to import * @returns An array of script urls to import
*/ */
async getEmbeddings() { async getEmbeddings(): Promise<any> {
const resp = await fetch(this.getBackendUrl() + "/embeddings", { cache: "no-store" }); return fetch(this.getBackendUrl() + "/embeddings", { cache: "no-store" })
return await resp.json(); .then(resp => resp.json())
} }
/** /**
* Loads node object definitions for the graph * Loads node object definitions for the graph
* @returns The node definitions * @returns The node definitions
*/ */
async getNodeDefs() { async getNodeDefs(): Promise<any> {
const resp = await fetch(this.getBackendUrl() + "/object_info", { cache: "no-store" }); return fetch(this.getBackendUrl() + "/object_info", { cache: "no-store" })
return await resp.json(); .then(resp => resp.json())
} }
/** /**
@@ -177,11 +219,11 @@ export default class ComfyAPI extends EventTarget {
* @param {number} number The index at which to queue the prompt, passing -1 will insert the prompt at the front of the queue * @param {number} number The index at which to queue the prompt, passing -1 will insert the prompt at the front of the queue
* @param {object} prompt The prompt data to queue * @param {object} prompt The prompt data to queue
*/ */
async queuePrompt(number: number, { output, workflow }) { async queuePrompt(number: number, { output, workflow }, extra_data: any): Promise<ComfyAPIPromptResponse> {
const body: PromptRequestBody = { const body: PromptRequestBody = {
client_id: this.clientId, client_id: this.clientId,
prompt: output, prompt: output,
extra_data: { extra_pnginfo: { workflow } }, extra_data,
front: false, front: false,
number: number number: number
}; };
@@ -192,67 +234,52 @@ export default class ComfyAPI extends EventTarget {
body.number = number; body.number = number;
} }
const res = await fetch(this.getBackendUrl() + "/prompt", { let postBody = null;
try {
postBody = JSON.stringify(body)
}
catch (error) {
return Promise.reject({ error })
}
return fetch(this.getBackendUrl() + "/prompt", {
method: "POST", method: "POST",
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
}, },
body: JSON.stringify(body), body: postBody
}); })
.then(res => res.json())
if (res.status !== 200) { .then(raw => { return { promptID: raw.prompt_id } })
throw { .catch(res => { throw res.text() })
response: await res.text(), .catch(error => { return { error } })
};
}
}
/**
* Loads a list of items (queue or history)
* @param {string} type The type of items to load, queue or history
* @returns The items of the specified type grouped by their status
*/
async getItems(type: QueueItemType) {
if (type === "queue") {
return this.getQueue();
}
return this.getHistory();
} }
/** /**
* Gets the current state of the queue * Gets the current state of the queue
* @returns The currently running and queued items * @returns The currently running and queued items
*/ */
async getQueue() { async getQueue(): Promise<ComfyAPIQueueResponse> {
try { return fetch(this.getBackendUrl() + "/queue")
const res = await fetch(this.getBackendUrl() + "/queue"); .then(res => res.json())
const data = await res.json(); .then(data => {
return { return {
// Running action uses a different endpoint for cancelling running: data.queue_running,
Running: data.queue_running.map((prompt) => ({ pending: data.queue_pending,
prompt,
remove: { name: "Cancel", cb: () => this.interrupt() },
})),
Pending: data.queue_pending.map((prompt) => ({ prompt })),
};
} catch (error) {
console.error(error);
return { Running: [], Pending: [], error };
} }
})
.catch(error => { return { running: [], pending: [], error } })
} }
/** /**
* Gets the prompt execution history * Gets the prompt execution history
* @returns Prompt history including node outputs * @returns Prompt history including node outputs
*/ */
async getHistory() { async getHistory(): Promise<ComfyAPIHistoryResponse> {
try { return fetch(this.getBackendUrl() + "/history")
const res = await fetch(this.getBackendUrl() + "/history"); .then(res => res.json())
return { History: Object.values(await res.json()) }; .then(history => { return { history } })
} catch (error) { .catch(error => { return { history: {}, error } })
console.error(error);
return { History: [], error };
}
} }
/** /**
@@ -260,18 +287,21 @@ export default class ComfyAPI extends EventTarget {
* @param {*} type The endpoint to post to * @param {*} type The endpoint to post to
* @param {*} body Optional POST data * @param {*} body Optional POST data
*/ */
private async postItem(type: string, body: any) { private async postItem(type: QueueItemType, body: any): Promise<Response> {
try { try {
await fetch("/" + type, { body = body ? JSON.stringify(body) : body
}
catch (error) {
return Promise.reject(error)
}
return fetch("/" + type, {
method: "POST", method: "POST",
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
}, },
body: body ? JSON.stringify(body) : undefined, body: body
}); });
} catch (error) {
console.error(error);
}
} }
/** /**
@@ -279,22 +309,22 @@ export default class ComfyAPI extends EventTarget {
* @param {string} type The type of item to delete, queue or history * @param {string} type The type of item to delete, queue or history
* @param {number} id The id of the item to delete * @param {number} id The id of the item to delete
*/ */
async deleteItem(type: string, id: number) { async deleteItem(type: QueueItemType, id: number): Promise<Response> {
await this.postItem(type, { delete: [id] }); return this.postItem(type, { delete: [id] });
} }
/** /**
* Clears the specified list * Clears the specified list
* @param {string} type The type of list to clear, queue or history * @param {string} type The type of list to clear, queue or history
*/ */
async clearItems(type: string) { async clearItems(type: QueueItemType): Promise<Response> {
await this.postItem(type, { clear: true }); return this.postItem(type, { clear: true });
} }
/** /**
* Interrupts the execution of the running prompt * Interrupts the execution of the running prompt
*/ */
async interrupt() { async interrupt(): Promise<Response> {
await this.postItem("interrupt", null); return fetch("/interrupt", { method: "POST" });
} }
} }

View File

@@ -1,6 +1,6 @@
import { LiteGraph, LGraph, LGraphCanvas, LGraphNode, type LGraphNodeConstructor, type LGraphNodeExecutable, type SerializedLGraph, type SerializedLGraphGroup, type SerializedLGraphNode, type SerializedLLink, NodeMode, type Vector2, BuiltInSlotType, type INodeInputSlot } from "@litegraph-ts/core"; import { LiteGraph, LGraph, LGraphCanvas, LGraphNode, type LGraphNodeConstructor, type LGraphNodeExecutable, type SerializedLGraph, type SerializedLGraphGroup, type SerializedLGraphNode, type SerializedLLink, NodeMode, type Vector2, BuiltInSlotType, type INodeInputSlot } from "@litegraph-ts/core";
import type { LConnectionKind, INodeSlot } from "@litegraph-ts/core"; import type { LConnectionKind, INodeSlot } from "@litegraph-ts/core";
import ComfyAPI, { type ComfyAPIQueueStatus } from "$lib/api" import ComfyAPI, { type ComfyAPIStatusResponse, type NodeID, type PromptID } from "$lib/api"
import { getPngMetadata, importA1111 } from "$lib/pnginfo"; import { getPngMetadata, importA1111 } from "$lib/pnginfo";
import EventEmitter from "events"; import EventEmitter from "events";
import type TypedEmitter from "typed-emitter"; import type TypedEmitter from "typed-emitter";
@@ -32,6 +32,7 @@ import { download, jsonToJsObject, promptToGraphVis, range, workflowToGraphVis }
import notify from "$lib/notify"; import notify from "$lib/notify";
import configState from "$lib/stores/configState"; import configState from "$lib/stores/configState";
import { blankGraph } from "$lib/defaultGraph"; import { blankGraph } from "$lib/defaultGraph";
import type { GalleryOutput } from "$lib/nodes/ComfyWidgetNodes";
export const COMFYBOX_SERIAL_VERSION = 1; export const COMFYBOX_SERIAL_VERSION = 1;
@@ -55,20 +56,22 @@ export type SerializedAppState = {
} }
/** [link origin, link index] | value */ /** [link origin, link index] | value */
export type SerializedPromptInput = [string, number] | any export type SerializedPromptInput = [NodeID, number] | any
export type SerializedPromptInputs = { export type SerializedPromptInputs = {
inputs: Record<string, SerializedPromptInput>, inputs: Record<NodeID, SerializedPromptInput>,
class_type: string class_type: string
} }
export type SerializedPromptOutput = Record<string, SerializedPromptInputs> export type SerializedPromptInputsAll = Record<NodeID, SerializedPromptInputs>
export type SerializedPrompt = { export type SerializedPrompt = {
workflow: SerializedLGraph, workflow: SerializedLGraph,
output: SerializedPromptOutput output: SerializedPromptInputsAll
} }
export type SerializedPromptOutputs = Record<NodeID, GalleryOutput>
export type Progress = { export type Progress = {
value: number, value: number,
max: number max: number
@@ -176,6 +179,8 @@ export default class ComfyApp {
this.addPasteHandler(); this.addPasteHandler();
this.addKeyboardHandler(); this.addKeyboardHandler();
await this.updateHistoryAndQueue();
// await this.#invokeExtensionsAsync("setup"); // await this.#invokeExtensionsAsync("setup");
// Ensure the canvas fills the window // Ensure the canvas fills the window
@@ -319,47 +324,48 @@ export default class ComfyApp {
* Handles updates from the API socket * Handles updates from the API socket
*/ */
private addApiUpdateHandlers() { private addApiUpdateHandlers() {
this.api.addEventListener("status", ({ detail: ComfyAPIStatus }: CustomEvent) => { this.api.addEventListener("status", (status: ComfyAPIStatusResponse) => {
// this.ui.setStatus(detail); queueState.statusUpdated(status);
}); });
this.api.addEventListener("reconnecting", () => { this.api.addEventListener("reconnecting", () => {
// this.ui.dialog.show("Reconnecting..."); uiState.reconnecting()
}); });
this.api.addEventListener("reconnected", () => { this.api.addEventListener("reconnected", () => {
// this.ui.dialog.close(); uiState.reconnected()
}); });
this.api.addEventListener("progress", ({ detail }: CustomEvent) => { this.api.addEventListener("progress", (progress: Progress) => {
queueState.progressUpdated(detail); queueState.progressUpdated(progress);
this.lGraph.setDirtyCanvas(true, false); this.lGraph.setDirtyCanvas(true, false);
}); });
this.api.addEventListener("executing", ({ detail }: CustomEvent) => { this.api.addEventListener("executing", (promptID: PromptID | null, nodeID: NodeID | null) => {
queueState.executingUpdated(detail.node); queueState.executingUpdated(promptID, nodeID);
this.lGraph.setDirtyCanvas(true, false); this.lGraph.setDirtyCanvas(true, false);
}); });
this.api.addEventListener("status", (ev: CustomEvent) => { this.api.addEventListener("status", (status: ComfyAPIStatusResponse | null) => {
queueState.statusUpdated(ev.detail as ComfyAPIQueueStatus); queueState.statusUpdated(status);
}); });
this.api.addEventListener("executed", ({ detail }: CustomEvent) => { this.api.addEventListener("executed", (promptID: PromptID, nodeID: NodeID, output: GalleryOutput) => {
this.nodeOutputs[detail.node] = detail.output; this.nodeOutputs[nodeID] = output;
const node = this.lGraph.getNodeById(detail.node) as ComfyGraphNode; const node = this.lGraph.getNodeById(parseInt(nodeID)) as ComfyGraphNode;
if (node?.onExecuted) { if (node?.onExecuted) {
node.onExecuted(detail.output); node.onExecuted(output);
} }
queueState.onExecuted(promptID, nodeID, output)
}); });
this.api.addEventListener("execution_cached", ({ detail }: CustomEvent) => { this.api.addEventListener("execution_cached", (promptID: PromptID, nodes: NodeID[]) => {
// TODO detail.nodes queueState.executionCached(promptID, nodes)
}); });
this.api.addEventListener("execution_error", ({ detail }: CustomEvent) => { this.api.addEventListener("execution_error", (promptID: PromptID, message: string) => {
queueState.update(s => { s.progress = null; s.runningNodeId = null; return s; }) queueState.executionError(promptID, message)
notify(`Execution error: ${detail.message}`, { type: "error", timeout: 10000 }) notify(`Execution error: ${message}`, { type: "error", timeout: 10000 })
}); });
this.api.init(); this.api.init();
@@ -379,6 +385,13 @@ export default class ComfyApp {
}); });
} }
private async updateHistoryAndQueue() {
const queue = await this.api.getQueue();
const history = await this.api.getHistory();
console.warn("QUEUE", queue)
console.warn("HISTORY", history)
}
private requestPermissions() { private requestPermissions() {
if (Notification.permission === "default") { if (Notification.permission === "default") {
Notification.requestPermission() Notification.requestPermission()
@@ -443,6 +456,8 @@ export default class ComfyApp {
this.lGraph.start(); this.lGraph.start();
this.lGraph.eventBus.on("afterExecute", () => this.lCanvas.draw(true)) this.lGraph.eventBus.on("afterExecute", () => this.lCanvas.draw(true))
uiState.update(s => { s.uiUnlocked = this.lGraph._nodes.length === 0; return s; })
} }
async initDefaultGraph() { async initDefaultGraph() {
@@ -729,10 +744,20 @@ export default class ComfyApp {
const p = await this.graphToPrompt(tag); const p = await this.graphToPrompt(tag);
console.debug(promptToGraphVis(p)) console.debug(promptToGraphVis(p))
const extra_data = { extra_pnginfo: { workflow: p.workflow } }
let error = null;
let promptID = null;
try { try {
await this.api.queuePrompt(num, p); const response = await this.api.queuePrompt(num, p, extra_data);
promptID = response.promptID;
error = response.error;
} catch (error) { } catch (error) {
// this.ui.dialog.show(error.response || error.toString()); error = error.toString();
}
if (error != null) {
const mes = error.response || error.toString() const mes = error.response || error.toString()
notify(`Error queuing prompt:\n${mes}`, { type: "error" }) notify(`Error queuing prompt:\n${mes}`, { type: "error" })
console.error(promptToGraphVis(p)) console.error(promptToGraphVis(p))
@@ -748,7 +773,7 @@ export default class ComfyApp {
} }
this.lCanvas.draw(true, true); this.lCanvas.draw(true, true);
// await this.ui.queue.update(); queueState.afterQueued(promptID, num, p, extra_data)
} }
} }
} finally { } finally {
@@ -767,7 +792,7 @@ export default class ComfyApp {
if (pngInfo.comfyBoxConfig) { if (pngInfo.comfyBoxConfig) {
this.deserialize(JSON.parse(pngInfo.comfyBoxConfig)); this.deserialize(JSON.parse(pngInfo.comfyBoxConfig));
} else if (pngInfo.parameters) { } else if (pngInfo.parameters) {
throw "TODO import A111 import!" throw "TODO A111 import!"
// importA1111(this.lGraph, pngInfo.parameters, this.api); // importA1111(this.lGraph, pngInfo.parameters, this.api);
} }
else { else {

View File

@@ -49,18 +49,18 @@
$: if (entries) { $: if (entries) {
_entries = [] _entries = []
// for (const entry of entries) { for (const entry of entries) {
// for (const outputs of Object.values(entry.outputs)) { for (const outputs of Object.values(entry.outputs)) {
// const allImages = outputs.images.map(r => { const allImages = outputs.images.map(r => {
// // TODO configure backend URL // TODO configure backend URL
// const url = "http://localhost:8188/view?" const url = "http://localhost:8188/view?"
// const params = new URLSearchParams(r) const params = new URLSearchParams(r)
// return url + params return url + params
// }); });
//
// _entries.push({ allImages, name: "Output" }) _entries.push({ allImages, name: "Output" })
// } }
// } }
} }
</script> </script>
@@ -76,9 +76,9 @@
{/each} {/each}
</div> </div>
<div class="bottom"> <div class="bottom">
{#if $queueState.runningNodeId || $queueState.progress} {#if $queueState.runningNodeID || $queueState.progress}
<div class="node-name"> <div class="node-name">
<span>Node: {getNodeInfo($queueState.runningNodeId)}</span> <span>Node: {getNodeInfo($queueState.runningNodeID)}</span>
</div> </div>
<div> <div>
<ProgressBar value={$queueState.progress?.value} max={$queueState.progress?.max} styles="height: 30px;" /> <ProgressBar value={$queueState.progress?.value} max={$queueState.progress?.max} styles="height: 30px;" />

View File

@@ -50,7 +50,7 @@
$: if ($queueState && widget && widget.node) { $: if ($queueState && widget && widget.node) {
dragItem.isNodeExecuting = $queueState.runningNodeId === widget.node.id; dragItem.isNodeExecuting = $queueState.runningNodeID === widget.node.id;
} }
function getWidgetClass() { function getWidgetClass() {
@@ -72,7 +72,7 @@
<div class="widget {widget.attrs.classes} {getWidgetClass()}" <div class="widget {widget.attrs.classes} {getWidgetClass()}"
class:edit={edit} class:edit={edit}
class:selected={$uiState.uiUnlocked && $layoutState.currentSelection.includes(widget.id)} class:selected={$uiState.uiUnlocked && $layoutState.currentSelection.includes(widget.id)}
class:is-executing={$queueState.runningNodeId && $queueState.runningNodeId == widget.node.id} class:is-executing={$queueState.runningNodeID && $queueState.runningNodeId == widget.node.id}
class:hidden={hidden} class:hidden={hidden}
> >
<svelte:component this={widget.node.svelteComponentType} {widget} {isMobile} /> <svelte:component this={widget.node.svelteComponentType} {widget} {isMobile} />

View File

@@ -1,7 +1,7 @@
import LGraphCanvas from "@litegraph-ts/core/src/LGraphCanvas"; import LGraphCanvas from "@litegraph-ts/core/src/LGraphCanvas";
import ComfyGraphNode from "./ComfyGraphNode"; import ComfyGraphNode from "./ComfyGraphNode";
import ComfyWidgets from "$lib/widgets" import ComfyWidgets from "$lib/widgets"
import type { ComfyWidgetNode } from "./ComfyWidgetNodes"; import type { ComfyWidgetNode, GalleryOutput } from "./ComfyWidgetNodes";
import { BuiltInSlotType, type SerializedLGraphNode } from "@litegraph-ts/core"; import { BuiltInSlotType, type SerializedLGraphNode } from "@litegraph-ts/core";
import type IComfyInputSlot from "$lib/IComfyInputSlot"; import type IComfyInputSlot from "$lib/IComfyInputSlot";
import type { ComfyInputConfig } from "$lib/IComfyInputSlot"; import type { ComfyInputConfig } from "$lib/IComfyInputSlot";
@@ -110,7 +110,7 @@ export class ComfyBackendNode extends ComfyGraphNode {
} }
} }
override onExecuted(outputData: any) { override onExecuted(outputData: GalleryOutput) {
console.warn("onExecuted outputs", outputData) console.warn("onExecuted outputs", outputData)
this.triggerSlot(0, outputData) this.triggerSlot(0, outputData)
} }

View File

@@ -3,7 +3,7 @@ import type { SerializedPrompt } from "$lib/components/ComfyApp";
import type ComfyWidget from "$lib/components/widgets/ComfyWidget"; import type ComfyWidget from "$lib/components/widgets/ComfyWidget";
import { LGraph, LGraphNode, LLink, LiteGraph, NodeMode, type INodeInputSlot, type SerializedLGraphNode, type Vector2, type INodeOutputSlot, LConnectionKind, type SlotType, LGraphCanvas, getStaticPropertyOnInstance, type PropertyLayout, type SlotLayout } from "@litegraph-ts/core"; import { LGraph, LGraphNode, LLink, LiteGraph, NodeMode, type INodeInputSlot, type SerializedLGraphNode, type Vector2, type INodeOutputSlot, LConnectionKind, type SlotType, LGraphCanvas, getStaticPropertyOnInstance, type PropertyLayout, type SlotLayout } from "@litegraph-ts/core";
import type { SvelteComponentDev } from "svelte/internal"; import type { SvelteComponentDev } from "svelte/internal";
import type { ComfyWidgetNode } from "./ComfyWidgetNodes"; import type { ComfyWidgetNode, GalleryOutput } from "./ComfyWidgetNodes";
import type IComfyInputSlot from "$lib/IComfyInputSlot"; import type IComfyInputSlot from "$lib/IComfyInputSlot";
import uiState from "$lib/stores/uiState"; import uiState from "$lib/stores/uiState";
import { get } from "svelte/store"; import { get } from "svelte/store";
@@ -48,7 +48,7 @@ export default class ComfyGraphNode extends LGraphNode {
* Triggered when the backend sends a finished output back with this node's ID. * Triggered when the backend sends a finished output back with this node's ID.
* Valid for output nodes like SaveImage and PreviewImage. * Valid for output nodes like SaveImage and PreviewImage.
*/ */
onExecuted?(output: any): void; onExecuted?(output: GalleryOutput): void;
/* /*
* Allows you to manually specify an auto-config for certain input slot * Allows you to manually specify an auto-config for certain input slot

View File

@@ -1,5 +1,6 @@
import type { ComfyAPIQueueStatus } from "$lib/api"; import type { ComfyAPIHistoryItem, ComfyAPIQueueResponse, ComfyAPIStatusResponse, NodeID, PromptID } from "$lib/api";
import type { Progress } from "$lib/components/ComfyApp"; import type { Progress, SerializedPrompt, SerializedPromptOutputs } from "$lib/components/ComfyApp";
import type { GalleryOutput } from "$lib/nodes/ComfyWidgetNodes";
import { writable, type Writable } from "svelte/store"; import { writable, type Writable } from "svelte/store";
export type QueueItem = { export type QueueItem = {
@@ -7,48 +8,188 @@ export type QueueItem = {
} }
type QueueStateOps = { type QueueStateOps = {
statusUpdated: (status: ComfyAPIQueueStatus | null) => void, queueUpdated: (queue: ComfyAPIQueueResponse) => void,
executingUpdated: (runningNodeId: string | null) => void, statusUpdated: (status: ComfyAPIStatusResponse | null) => void,
progressUpdated: (progress: Progress | null) => void executingUpdated: (promptID: PromptID | null, runningNodeID: NodeID | null) => void,
executionCached: (promptID: PromptID, nodes: NodeID[]) => void,
executionError: (promptID: PromptID, message: string) => void,
progressUpdated: (progress: Progress) => void
afterQueued: (promptID: PromptID, number: number, prompt: SerializedPrompt, extraData: any) => void
onExecuted: (promptID: PromptID, nodeID: NodeID, output: GalleryOutput) => void
}
export type QueueEntry = {
number: number,
promptID: PromptID,
prompt: SerializedPrompt,
extraData: any,
goodOutputs: NodeID[],
// Collected while the prompt is still executing
outputs: SerializedPromptOutputs,
}
export type CompletedQueueEntry = {
entry: QueueEntry,
type: "success" | "error" | "all_cached",
error?: string,
} }
export type QueueState = { export type QueueState = {
queueRunning: QueueEntry[],
queuePending: QueueEntry[],
queueCompleted: CompletedQueueEntry[],
queueRemaining: number | "X" | null; queueRemaining: number | "X" | null;
runningNodeId: number | null; runningNodeID: number | null;
progress: Progress | null progress: Progress | null
} }
type WritableQueueStateStore = Writable<QueueState> & QueueStateOps; type WritableQueueStateStore = Writable<QueueState> & QueueStateOps;
const store: Writable<QueueState> = writable({ queueRemaining: null, runningNodeId: null, progress: null }) const store: Writable<QueueState> = writable({
queueRunning: [],
queuePending: [],
queueCompleted: [],
queueRemaining: null,
runningNodeID: null,
progress: null
})
function statusUpdated(status: ComfyAPIQueueStatus | null) { function toQueueEntry(resp: ComfyAPIHistoryItem): QueueEntry {
const [num, promptID, prompt, extraData, goodOutputs] = resp
return {
number: num,
promptID,
prompt,
extraData,
goodOutputs,
outputs: {}
}
}
function queueUpdated(queue: ComfyAPIQueueResponse) {
store.update((s) => { store.update((s) => {
if (status !== null) s.queueRunning = queue.running.map(toQueueEntry);
s.queueRemaining = status.exec_info.queue_remaining; s.queuePending = queue.pending.map(toQueueEntry);
s.queueRemaining = s.queuePending.length;
return s return s
}) })
} }
function executingUpdated(runningNodeId: string | null) { function progressUpdated(progress: Progress) {
store.update((s) => {
s.progress = null;
s.runningNodeId = parseInt(runningNodeId);
return s
})
}
function progressUpdated(progress: Progress | null) {
store.update((s) => { store.update((s) => {
s.progress = progress; s.progress = progress;
return s return s
}) })
} }
function statusUpdated(status: ComfyAPIStatusResponse | null) {
store.update((s) => {
if (status !== null)
s.queueRemaining = status.execInfo.queueRemaining;
return s
})
}
function executingUpdated(promptID: PromptID | null, runningNodeID: NodeID | null) {
console.debug("[queueState] executingUpdated", promptID, runningNodeID)
store.update((s) => {
s.progress = null;
if (runningNodeID != null) {
s.runningNodeID = parseInt(runningNodeID);
}
else if (promptID != null) {
// Prompt finished executing.
const index = s.queuePending.findIndex(e => e.promptID === promptID)
if (index) {
s.queuePending = s.queuePending.splice(index, 1);
}
s.progress = null;
s.runningNodeID = null;
}
return s
})
}
function executionCached(promptID: PromptID, nodes: NodeID[]) {
console.debug("[queueState] executionCached", promptID, nodes)
store.update(s => {
const index = s.queuePending.findIndex(e => e.promptID === promptID)
if (index) {
const entry = s.queuePending[index]
if (nodes.length >= Object.keys(entry.prompt.output).length) {
s.queuePending = s.queuePending.splice(index, 1);
const completed: CompletedQueueEntry = {
entry,
type: "all_cached"
}
s.queueCompleted.push(completed)
}
}
s.progress = null;
s.runningNodeID = null;
return s
})
}
function executionError(promptID: PromptID, message: string) {
console.debug("[queueState] executionError", promptID, message)
store.update(s => {
const index = s.queuePending.findIndex(e => e.promptID === promptID)
if (index) {
const entry = s.queuePending[index]
s.queuePending = s.queuePending.splice(index, 1);
const completed: CompletedQueueEntry = {
entry,
type: "error",
error: message
}
s.queueCompleted.push(completed)
}
s.progress = null;
s.runningNodeID = null;
return s
})
}
function afterQueued(promptID: PromptID, number: number, prompt: SerializedPrompt, extraData: any) {
console.debug("[queueState] afterQueued", promptID, Object.keys(prompt.workflow.nodes))
store.update(s => {
const entry: QueueEntry = {
number,
promptID,
prompt,
extraData,
goodOutputs: [],
outputs: {}
}
s.queuePending.push(entry)
return s
})
}
function onExecuted(promptID: PromptID, nodeID: NodeID, output: GalleryOutput) {
console.debug("[queueState] onExecuted", promptID, nodeID, output)
store.update(s => {
const entry = s.queuePending.find(e => e.promptID === promptID)
if (entry) {
entry.outputs[nodeID] = output;
s.queuePending.push(entry)
}
return s
})
}
const queueStateStore: WritableQueueStateStore = const queueStateStore: WritableQueueStateStore =
{ {
...store, ...store,
queueUpdated,
statusUpdated, statusUpdated,
progressUpdated,
executingUpdated, executingUpdated,
progressUpdated executionCached,
executionError,
afterQueued,
onExecuted
} }
export default queueStateStore; export default queueStateStore;

View File

@@ -10,11 +10,17 @@ export type UIState = {
uiUnlocked: boolean, uiUnlocked: boolean,
uiEditMode: UIEditMode, uiEditMode: UIEditMode,
reconnecting: boolean,
isSavingToLocalStorage: boolean isSavingToLocalStorage: boolean
} }
export type WritableUIStateStore = Writable<UIState>; type UIStateOps = {
const store: WritableUIStateStore = writable( reconnecting: () => void,
reconnected: () => void,
}
export type WritableUIStateStore = Writable<UIState> & UIStateOps;
const store: Writable<UIState> = writable(
{ {
graphLocked: false, graphLocked: false,
nodesLocked: false, nodesLocked: false,
@@ -22,11 +28,22 @@ const store: WritableUIStateStore = writable(
uiUnlocked: false, uiUnlocked: false,
uiEditMode: "widgets", uiEditMode: "widgets",
reconnecting: false,
isSavingToLocalStorage: false isSavingToLocalStorage: false
}) })
function reconnecting() {
store.update(s => { s.reconnecting = true; return s; })
}
function reconnected() {
store.update(s => { s.reconnecting = false; return s; })
}
const uiStateStore: WritableUIStateStore = const uiStateStore: WritableUIStateStore =
{ {
...store ...store,
reconnecting,
reconnected
} }
export default uiStateStore; export default uiStateStore;

View File

@@ -57,9 +57,9 @@
</script> </script>
<div class="bottom"> <div class="bottom">
{#if $queueState.runningNodeId || $queueState.progress} {#if $queueState.runningNodeID || $queueState.progress}
<div class="node-name"> <div class="node-name">
<span>Node: {getNodeInfo($queueState.runningNodeId)}</span> <span>Node: {getNodeInfo($queueState.runningNodeID)}</span>
</div> </div>
<div class="progress-bar"> <div class="progress-bar">
<ProgressBar value={$queueState.progress?.value} max={$queueState.progress?.max} /> <ProgressBar value={$queueState.progress?.value} max={$queueState.progress?.max} />