Converter for A1111 infotexts to standardized format
This commit is contained in:
155
src/lib/ComfyBoxStdPrompt.ts
Normal file
155
src/lib/ComfyBoxStdPrompt.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import { z, type ZodTypeAny } from "zod"
|
||||
|
||||
const ModelHashes = z.object({
|
||||
a1111_shorthash: z.string().optional(),
|
||||
sha256: z.string().optional(),
|
||||
}).refine(({ a1111_shorthash, sha256 }) =>
|
||||
a1111_shorthash !== undefined || sha256 !== undefined,
|
||||
{ message: "At least one model hash must be specified" })
|
||||
|
||||
const GroupPrompt = z.object({
|
||||
positive: z.string(),
|
||||
negative: z.string()
|
||||
})
|
||||
export type ComfyBoxStdGroupPrompt = z.infer<typeof GroupPrompt>
|
||||
|
||||
const GroupCheckpoint = z.object({
|
||||
model_name: z.string().optional(),
|
||||
model_hashes: ModelHashes.optional(),
|
||||
}).refine(({ model_name, model_hashes }) =>
|
||||
model_name !== undefined || model_hashes !== undefined,
|
||||
{ message: "Must include either model name or model hash" }
|
||||
)
|
||||
export type ComfyBoxStdGroupCheckpoint = z.infer<typeof GroupCheckpoint>
|
||||
|
||||
const GroupVAE = z.object({
|
||||
model_name: z.string().optional(),
|
||||
model_hashes: ModelHashes.optional(),
|
||||
type: z.enum(["internal", "external"])
|
||||
}).refine(({ model_name, model_hashes }) =>
|
||||
model_name !== undefined || model_hashes !== undefined,
|
||||
{ message: "Must include either model name or model hashes" }
|
||||
)
|
||||
export type ComfyBoxStdGroupVAE = z.infer<typeof GroupVAE>
|
||||
|
||||
const GroupKSampler = z.object({
|
||||
cfg_scale: z.number(),
|
||||
seed: z.number(),
|
||||
steps: z.number(),
|
||||
sampler_name: z.string(),
|
||||
scheduler: z.string(),
|
||||
denoise: z.number().default(1.0)
|
||||
})
|
||||
export type ComfyBoxStdGroupKSampler = z.infer<typeof GroupKSampler>
|
||||
|
||||
const GroupLatentImage = z.object({
|
||||
width: z.number(),
|
||||
height: z.number(),
|
||||
type: z.enum(["empty", "image", "image_upscale"]).optional(),
|
||||
upscale_method: z.string().optional(),
|
||||
upscale_by: z.number().optional(),
|
||||
upscale_width: z.number().optional(),
|
||||
upscale_height: z.number().optional(),
|
||||
crop: z.string().optional(),
|
||||
mask_blur: z.number().optional(),
|
||||
batch_count: z.number().default(1).optional(),
|
||||
batch_pos: z.number().default(0).optional()
|
||||
})
|
||||
export type ComfyBoxStdGroupLatentImage = z.infer<typeof GroupLatentImage>
|
||||
|
||||
const GroupSDUpscale = z.object({
|
||||
upscaler: z.string(),
|
||||
overlap: z.number(),
|
||||
})
|
||||
export type ComfyBoxStdGroupSDUpscale = z.infer<typeof GroupSDUpscale>
|
||||
|
||||
const GroupHypernetwork = z.object({
|
||||
model_name: z.string(),
|
||||
model_hashes: ModelHashes.optional(),
|
||||
strength: z.number()
|
||||
})
|
||||
export type ComfyBoxStdGroupHypernetwork = z.infer<typeof GroupHypernetwork>
|
||||
|
||||
const LoRAModelHashes = z.object({
|
||||
addnet_shorthash: z.string().optional(),
|
||||
addnet_shorthash_legacy: z.string().optional(),
|
||||
sha256: z.string().optional(),
|
||||
}).refine(({ addnet_shorthash, addnet_shorthash_legacy, sha256 }) =>
|
||||
addnet_shorthash !== undefined || addnet_shorthash_legacy !== undefined || sha256 !== undefined,
|
||||
{ message: "At least one model hash must be specified" })
|
||||
|
||||
const GroupLoRA = z.object({
|
||||
model_name: z.string(),
|
||||
module_name: z.string().optional(),
|
||||
model_hashes: LoRAModelHashes.optional(),
|
||||
strength_unet: z.number(),
|
||||
strength_tenc: z.number()
|
||||
})
|
||||
export type ComfyBoxStdGroupLoRA = z.infer<typeof GroupLoRA>
|
||||
|
||||
const GroupControlNet = z.object({
|
||||
model: z.string(),
|
||||
model_hashes: ModelHashes.optional(),
|
||||
strength: z.number(),
|
||||
})
|
||||
export type ComfyBoxStdGroupControlNet = z.infer<typeof GroupControlNet>
|
||||
|
||||
const GroupCLIP = z.object({
|
||||
clip_skip: z.number().optional()
|
||||
})
|
||||
export type ComfyBoxStdGroupCLIP = z.infer<typeof GroupCLIP>
|
||||
|
||||
const GroupDynamicThresholding = z.object({
|
||||
mimic_scale: z.number(),
|
||||
threshold_percentile: z.number(),
|
||||
mimic_mode: z.string(),
|
||||
mimic_scale_min: z.number(),
|
||||
cfg_mode: z.string(),
|
||||
cfg_scale_minimum: z.number()
|
||||
})
|
||||
export type ComfyBoxStdGroupDynamicThresholding = z.infer<typeof GroupDynamicThresholding>
|
||||
|
||||
const group = (s: ZodTypeAny) => z.optional(z.array(s).nonempty());
|
||||
|
||||
const Parameters = z.object({
|
||||
prompt: group(GroupPrompt),
|
||||
checkpoint: group(GroupCheckpoint),
|
||||
vae: group(GroupVAE),
|
||||
k_sampler: group(GroupKSampler),
|
||||
clip: group(GroupCLIP),
|
||||
latent_image: group(GroupLatentImage),
|
||||
sd_upscale: group(GroupSDUpscale),
|
||||
hypernetwork: group(GroupHypernetwork),
|
||||
lora: group(GroupLoRA),
|
||||
control_net: group(GroupControlNet),
|
||||
dynamic_thresholding: group(GroupDynamicThresholding)
|
||||
}).partial()
|
||||
export type ComfyBoxStdParameters = z.infer<typeof Parameters>
|
||||
|
||||
const ComfyBoxExtraData = z.object({
|
||||
workflows: z.array(z.string())
|
||||
})
|
||||
|
||||
const ExtraData = z.object({
|
||||
comfybox: ComfyBoxExtraData.optional()
|
||||
})
|
||||
|
||||
const Metadata = z.object({
|
||||
version: z.number(),
|
||||
created_with: z.string(),
|
||||
author: z.string().optional(),
|
||||
commit_hash: z.string().optional(),
|
||||
extra_data: ExtraData
|
||||
})
|
||||
|
||||
const Prompt = z.object({
|
||||
metadata: Metadata,
|
||||
parameters: Parameters
|
||||
})
|
||||
|
||||
const ComfyBoxStdPrompt = z.object({
|
||||
prompt: Prompt,
|
||||
})
|
||||
|
||||
export default ComfyBoxStdPrompt
|
||||
export type ComfyBoxStdPrompt = z.infer<typeof ComfyBoxStdPrompt>
|
||||
213
src/lib/convertA1111ToStdPrompt.ts
Normal file
213
src/lib/convertA1111ToStdPrompt.ts
Normal file
@@ -0,0 +1,213 @@
|
||||
import type { ComfyBoxStdGroupCheckpoint, ComfyBoxStdGroupHypernetwork, ComfyBoxStdGroupKSampler, ComfyBoxStdGroupLatentImage, ComfyBoxStdGroupLoRA, ComfyBoxStdParameters, ComfyBoxStdPrompt } from "./ComfyBoxStdPrompt";
|
||||
import type { A1111ParsedInfotext } from "./parseA1111";
|
||||
|
||||
function getSamplerAndScheduler(a1111Sampler: string): [string, string] {
|
||||
let name = a1111Sampler.toLowerCase().replace("++", "pp").replaceAll(" ", "_");
|
||||
let scheduler = "normal";
|
||||
if (name.includes("karras")) {
|
||||
name = name.replace("karras", "").replace(/_+$/, "");
|
||||
scheduler = "karras";
|
||||
} else {
|
||||
scheduler = "normal"
|
||||
}
|
||||
return [name, scheduler]
|
||||
}
|
||||
|
||||
const reAddNetModelName = /^([^(]+)\(([^)]+)\)$/;
|
||||
const reParens = /\(([^)]+)\)/;
|
||||
|
||||
function parseAddNetModelNameAndHash(name: string): [string | null, string | null] {
|
||||
const match = name.match(reAddNetModelName);
|
||||
if (match) {
|
||||
return [match[1], match[2]]
|
||||
}
|
||||
return [null, null]
|
||||
}
|
||||
|
||||
export default function convertA1111ToStdPrompt(infotext: A1111ParsedInfotext): ComfyBoxStdPrompt {
|
||||
const popOpt = (name: string): string | undefined => {
|
||||
const v = infotext.extraParams[name];
|
||||
delete infotext.extraParams[name];
|
||||
return v;
|
||||
}
|
||||
|
||||
const parameters: ComfyBoxStdParameters = {}
|
||||
|
||||
const hrUp = popOpt("hires upscale");
|
||||
const hrSz = popOpt("hires resize");
|
||||
let hrMethod = popOpt("hires upscaler");
|
||||
let hrWidth = undefined
|
||||
let hrHeight = undefined
|
||||
if (hrSz) {
|
||||
[hrWidth, hrHeight] = hrSz.split(hrSz).map(parseInt);
|
||||
}
|
||||
|
||||
if (hrMethod != null && hrMethod.startsWith("Latent (")) {
|
||||
const result = reParens.exec(hrMethod)
|
||||
if (result)
|
||||
hrMethod = String(result[1])
|
||||
}
|
||||
|
||||
const latent_image: ComfyBoxStdGroupLatentImage = {
|
||||
width: infotext.width,
|
||||
height: infotext.height,
|
||||
upscale_method: hrMethod,
|
||||
upscale_by: hrUp ? parseFloat(hrUp) : undefined,
|
||||
upscale_width: hrWidth,
|
||||
upscale_height: hrHeight,
|
||||
batch_count: infotext.batchSize,
|
||||
batch_pos: infotext.batchPos,
|
||||
}
|
||||
|
||||
const maskBlur = popOpt("mask blur")
|
||||
if (maskBlur != null)
|
||||
latent_image.mask_blur = parseFloat(maskBlur)
|
||||
|
||||
parameters.latent_image = [latent_image];
|
||||
|
||||
const [sampler_name, scheduler] = getSamplerAndScheduler(infotext.sampler)
|
||||
|
||||
const k_sampler: ComfyBoxStdGroupKSampler = {
|
||||
steps: infotext.steps,
|
||||
seed: infotext.seed,
|
||||
cfg_scale: infotext.cfgScale,
|
||||
denoise: infotext.denoise || 1.0,
|
||||
sampler_name,
|
||||
scheduler,
|
||||
}
|
||||
parameters.k_sampler = [k_sampler];
|
||||
|
||||
if (infotext.modelHash || infotext.modelName) {
|
||||
const checkpoint: ComfyBoxStdGroupCheckpoint = {
|
||||
model_name: infotext.modelName,
|
||||
model_hashes: {
|
||||
a1111_shorthash: infotext.modelHash
|
||||
}
|
||||
}
|
||||
parameters.checkpoint = [checkpoint]
|
||||
}
|
||||
|
||||
const clipSkip = popOpt("clip skip")
|
||||
if (clipSkip != null) {
|
||||
parameters.clip = [{
|
||||
clip_skip: parseInt(clipSkip)
|
||||
}]
|
||||
}
|
||||
|
||||
const sdUpscaleUpscaler = popOpt("sd upscale upscaler")
|
||||
if (sdUpscaleUpscaler != null) {
|
||||
const sdUpscaleOverlap = popOpt("sd upscale overlap") || "64"
|
||||
parameters.sd_upscale = [{
|
||||
upscaler: sdUpscaleUpscaler,
|
||||
overlap: parseInt(sdUpscaleOverlap)
|
||||
}]
|
||||
}
|
||||
|
||||
for (const [extraNetworkType, extraNetworks] of Object.entries(infotext.extraNetworks)) {
|
||||
for (const extraNetworkParams of extraNetworks) {
|
||||
let strength;
|
||||
switch (extraNetworkType.toLowerCase()) {
|
||||
case "lora":
|
||||
strength = parseFloat(extraNetworkParams.items[1]);
|
||||
const lora: ComfyBoxStdGroupLoRA = {
|
||||
model_name: extraNetworkParams.items[0],
|
||||
strength_unet: strength,
|
||||
strength_tenc: strength,
|
||||
}
|
||||
if (parameters.lora)
|
||||
parameters.lora.push(lora)
|
||||
else
|
||||
parameters.lora = [lora]
|
||||
break;
|
||||
case "hypernet":
|
||||
strength = parseFloat(extraNetworkParams.items[1]);
|
||||
const hypernetwork: ComfyBoxStdGroupHypernetwork = {
|
||||
model_name: extraNetworkParams.items[0],
|
||||
strength
|
||||
}
|
||||
if (parameters.hypernetwork)
|
||||
parameters.hypernetwork.push(hypernetwork)
|
||||
else
|
||||
parameters.hypernetwork = [hypernetwork]
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
delete infotext.extraNetworks[extraNetworkType]
|
||||
}
|
||||
|
||||
let index = 1;
|
||||
let found = infotext.extraParams[`addnet module ${index}`]
|
||||
while (`addnet module ${index}` in infotext.extraParams) {
|
||||
popOpt("addnet enabled")
|
||||
const moduleName = popOpt(`addnet module ${index}`)
|
||||
const modelName = popOpt(`addnet model ${index}`);
|
||||
const weightA = popOpt(`addnet weight a ${index}`);
|
||||
const weightB = popOpt(`addnet weight b ${index}`);
|
||||
|
||||
if (moduleName == null || modelName == null || weightA == null || weightB == null) {
|
||||
throw new Error(`Error parsing addnet model params: ${moduleName} ${modelName} ${weightA} ${weightB}`)
|
||||
}
|
||||
|
||||
if (moduleName !== "LoRA") {
|
||||
throw new Error("Unknown AddNet model type " + moduleName)
|
||||
}
|
||||
|
||||
const [name, hash] = parseAddNetModelNameAndHash(modelName);
|
||||
if (name == null || hash == null) {
|
||||
throw new Error("Error parsing addnet model name: " + modelName);
|
||||
}
|
||||
|
||||
let shorthash = undefined
|
||||
let shorthash_legacy = undefined
|
||||
if (hash.length > 8) {
|
||||
// new method using safetensors hash
|
||||
shorthash = hash
|
||||
}
|
||||
else {
|
||||
// old hash using webui's 0x10000 hashing method
|
||||
shorthash_legacy = hash
|
||||
}
|
||||
|
||||
const lora: ComfyBoxStdGroupLoRA = {
|
||||
model_name: name,
|
||||
module_name: moduleName,
|
||||
model_hashes: {
|
||||
addnet_shorthash: shorthash,
|
||||
addnet_shorthash_legacy: shorthash_legacy
|
||||
},
|
||||
strength_unet: parseFloat(weightA),
|
||||
strength_tenc: parseFloat(weightB),
|
||||
}
|
||||
if (parameters.lora)
|
||||
parameters.lora.push(lora)
|
||||
else
|
||||
parameters.lora = [lora]
|
||||
|
||||
index += 1;
|
||||
found = infotext.extraParams[`addnet model ${index}`]
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(infotext.extraParams)) {
|
||||
if (key.startsWith("addnet model ")) {
|
||||
const index = key.replace("addnet module ", "")
|
||||
// delete infotext.extraParams[key];
|
||||
}
|
||||
}
|
||||
|
||||
const prompt: ComfyBoxStdPrompt = {
|
||||
prompt: {
|
||||
metadata: {
|
||||
version: 1,
|
||||
created_with: "stable-diffusion-webui",
|
||||
extra_data: {}
|
||||
},
|
||||
parameters
|
||||
}
|
||||
}
|
||||
|
||||
console.warn("Unhandled A1111 parameters:", infotext.extraParams, infotext.extraNetworks)
|
||||
|
||||
return prompt
|
||||
}
|
||||
166
src/lib/parseA1111.ts
Normal file
166
src/lib/parseA1111.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
interface ExtraNetworkParams {
|
||||
items: string[];
|
||||
}
|
||||
|
||||
export type A1111ParsedInfotext = {
|
||||
positive: string,
|
||||
negative: string,
|
||||
|
||||
steps: number,
|
||||
cfgScale: number,
|
||||
width: number,
|
||||
height: number,
|
||||
modelHash?: string,
|
||||
modelName?: string,
|
||||
batchSize?: number,
|
||||
batchPos?: number,
|
||||
sampler: string,
|
||||
seed: number,
|
||||
denoise?: number,
|
||||
|
||||
extraNetworks: Record<string, ExtraNetworkParams[]>
|
||||
extraParams: Record<string, any>
|
||||
}
|
||||
|
||||
export type A1111ParsingError = {
|
||||
error: string
|
||||
}
|
||||
|
||||
const reExtraNetworks = /<(\w+):([^>]+)>/g;
|
||||
const reParam = /\s*([\w ]+):\s*("(?:\\"[^,]|\\"|\\|[^\"])+"|[^,]*)(?:,|$)/g;
|
||||
|
||||
function parseExtraNetworks(prompt: string): [string, Record<string, ExtraNetworkParams[]>] {
|
||||
const res: Record<string, ExtraNetworkParams[]> = {};
|
||||
|
||||
function found(_match: string, modelType: string, args: string): string {
|
||||
if (!res[modelType]) {
|
||||
res[modelType] = [];
|
||||
}
|
||||
|
||||
res[modelType].push({ items: args.split(":") });
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
prompt = prompt.replace(reExtraNetworks, found);
|
||||
|
||||
return [prompt, res];
|
||||
}
|
||||
|
||||
type A1111ParamHandler = string | ((prompt: A1111ParsedInfotext, value: string) => void);
|
||||
|
||||
const wrapFloat = (name: string): ((p: A1111ParsedInfotext, v: string) => void) => {
|
||||
return (p, v) => {
|
||||
p[name] = parseFloat(v);
|
||||
}
|
||||
}
|
||||
|
||||
const wrapInt = (name: string): A1111ParamHandler => {
|
||||
return (p, v) => {
|
||||
p[name] = parseInt(v);
|
||||
}
|
||||
}
|
||||
|
||||
const handlers: Record<string, A1111ParamHandler> = {
|
||||
steps: wrapInt("steps"),
|
||||
"cfg scale": wrapFloat("cfgScale"),
|
||||
"size": (p, v) => {
|
||||
const [widthStr, heightStr] = v.split("x")
|
||||
p.width = parseInt(widthStr);
|
||||
p.height = parseInt(heightStr);
|
||||
},
|
||||
"model hash": "modelHash",
|
||||
model: "modelName",
|
||||
"batch size": wrapInt("batchSize"),
|
||||
"batch pos": wrapInt("batchPos"),
|
||||
sampler: "sampler",
|
||||
seed: wrapInt("seed"),
|
||||
"denoising strength": wrapFloat("denoise")
|
||||
}
|
||||
|
||||
/*
|
||||
* Parses AUTOMATIC1111/stable-diffusion-webui format infotext into their raw parameters.
|
||||
*
|
||||
* Format is as follows:
|
||||
* - Prompt text immediately starts at the start of the file, ending
|
||||
* on the first line starting with "Negative prompt:" or "Steps:"
|
||||
* - "Negative prompt:" is optional and might be omitted
|
||||
* - Following "Steps:" are various sort-of-comma-separated values.
|
||||
* Random characters can completely break parsing. Here be dragons.
|
||||
*/
|
||||
export default function parseA1111(infotext: string): A1111ParsedInfotext | A1111ParsingError {
|
||||
let doneWithPrompt = false;
|
||||
|
||||
let positive_ = ""
|
||||
let negative = ""
|
||||
|
||||
const lines = infotext.trim().split("\n")
|
||||
let lastLineIdx = lines.findIndex(l => l.trim().indexOf("Steps: ") !== -1)
|
||||
if (lastLineIdx === -1) {
|
||||
return { error: "Steps: line not found" }
|
||||
}
|
||||
|
||||
for (let index = 0; index < lastLineIdx; index++) {
|
||||
let line = lines[index].trim()
|
||||
if (line.startsWith("Negative prompt:")) {
|
||||
doneWithPrompt = true;
|
||||
line = line.substring(16).trim();
|
||||
}
|
||||
|
||||
if (doneWithPrompt) {
|
||||
const addNewLine = negative != ""
|
||||
negative += (addNewLine ? "\n" : "") + line
|
||||
}
|
||||
else {
|
||||
const addNewLine = positive_ != ""
|
||||
positive_ += (addNewLine ? "\n" : "") + line
|
||||
}
|
||||
}
|
||||
|
||||
// webui doesn't apply extra networks in the negative prompt
|
||||
let [positive, extraNetworks] = parseExtraNetworks(positive_)
|
||||
|
||||
const extraParams: Record<string, string> = {}
|
||||
|
||||
let result: A1111ParsedInfotext = {
|
||||
positive,
|
||||
negative,
|
||||
|
||||
// defaults taken from webui
|
||||
width: 512,
|
||||
height: 512,
|
||||
steps: 20,
|
||||
cfgScale: 7.0,
|
||||
seed: -1,
|
||||
sampler: "Euler",
|
||||
|
||||
extraNetworks,
|
||||
extraParams
|
||||
}
|
||||
|
||||
for (let index = lastLineIdx; index < lines.length; index++) {
|
||||
const line = lines[index];
|
||||
for (let [_, key, value] of line.matchAll(reParam)) {
|
||||
key = key.toLowerCase()
|
||||
if (value[0] === '"' && value[value.length - 1] === '""')
|
||||
value = value.substring(1, value.length - 1)
|
||||
|
||||
const handler = handlers[key]
|
||||
if (handler != null) {
|
||||
if (value != null) {
|
||||
if (typeof handler === "function") {
|
||||
handler(result, value)
|
||||
}
|
||||
else {
|
||||
(result as any)[handler] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
extraParams[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
234
src/tests/convertA1111ToStdPromptTests.ts
Normal file
234
src/tests/convertA1111ToStdPromptTests.ts
Normal file
@@ -0,0 +1,234 @@
|
||||
import convertA1111ToStdPrompt from "$lib/convertA1111ToStdPrompt";
|
||||
import { expect } from 'vitest';
|
||||
import UnitTest from "./UnitTest";
|
||||
import type { A1111ParsedInfotext } from "$lib/parseA1111";
|
||||
|
||||
export default class convertA1111ToStdPromptTests extends UnitTest {
|
||||
test__convertsBasic() {
|
||||
const infotext: A1111ParsedInfotext = {
|
||||
positive: "highest quality, masterpiece, best quality, masterpiece, asuka langley sitting cross legged on a chair",
|
||||
negative: "lowres, bad anatomy, bad hands, text, error, missing fingers, extra digit, fewer digits, cropped, worst quality, low quality, normal quality, jpeg artifacts,signature, watermark, username, blurry, artist name",
|
||||
height: 512,
|
||||
width: 512,
|
||||
modelHash: "925997e9",
|
||||
cfgScale: 12,
|
||||
sampler: "Euler",
|
||||
seed: 2870305590,
|
||||
steps: 28,
|
||||
extraNetworks: {},
|
||||
extraParams: {
|
||||
"clip skip": "2",
|
||||
"aesthetic embedding": "Belle",
|
||||
"aesthetic lr": "0.0005",
|
||||
"aesthetic slerp": "False",
|
||||
"aesthetic slerp angle": "0.1",
|
||||
"aesthetic steps": "15",
|
||||
"aesthetic text": "",
|
||||
"aesthetic text negative": "False",
|
||||
"aesthetic weight": "0.9",
|
||||
},
|
||||
}
|
||||
|
||||
const converted = convertA1111ToStdPrompt(infotext);
|
||||
|
||||
expect(converted).toEqual({
|
||||
prompt: {
|
||||
metadata: {
|
||||
version: 1,
|
||||
created_with: "stable-diffusion-webui",
|
||||
extra_data: {}
|
||||
},
|
||||
parameters: {
|
||||
checkpoint: [{
|
||||
model_hashes: {
|
||||
a1111_shorthash: "925997e9",
|
||||
}
|
||||
}],
|
||||
clip: [{
|
||||
clip_skip: 2,
|
||||
}],
|
||||
k_sampler: [{
|
||||
cfg_scale: 12,
|
||||
denoise: 1,
|
||||
sampler_name: "euler",
|
||||
scheduler: "normal",
|
||||
seed: 2870305590,
|
||||
steps: 28
|
||||
}],
|
||||
latent_image: [{
|
||||
width: 512,
|
||||
height: 512,
|
||||
}]
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
test__convertsExtraNetworks() {
|
||||
const infotext: A1111ParsedInfotext = {
|
||||
positive: "dreamlike fantasy landscape where everything is a shade of pink,\n dog ",
|
||||
negative: "(worst quality:1.4), (low quality:1.4) , (monochrome:1.1)",
|
||||
width: 640,
|
||||
height: 512,
|
||||
modelHash: "0f0eaaa61e",
|
||||
modelName: "pastelmix-better-vae-fp16",
|
||||
cfgScale: 12,
|
||||
sampler: "DPM++ 2M Karras",
|
||||
seed: 2416682767,
|
||||
steps: 40,
|
||||
denoise: 0.55,
|
||||
extraNetworks: {
|
||||
hypernet: [
|
||||
{ items: ["zxcfc", "0.5", "baz", "quux"], },
|
||||
],
|
||||
lora: [
|
||||
{ items: ["asdfg", "0.8", "foo", "bar"] },
|
||||
],
|
||||
},
|
||||
extraParams: {
|
||||
"clip skip": "2",
|
||||
"ensd": "31337",
|
||||
"hires steps": "20",
|
||||
"hires upscale": "2",
|
||||
"hires upscaler": "Latent",
|
||||
},
|
||||
}
|
||||
|
||||
const converted = convertA1111ToStdPrompt(infotext);
|
||||
|
||||
expect(converted).toEqual({
|
||||
prompt: {
|
||||
metadata: {
|
||||
version: 1,
|
||||
created_with: "stable-diffusion-webui",
|
||||
extra_data: {}
|
||||
},
|
||||
parameters: {
|
||||
checkpoint: [{
|
||||
model_name: "pastelmix-better-vae-fp16",
|
||||
model_hashes: {
|
||||
a1111_shorthash: "0f0eaaa61e",
|
||||
}
|
||||
}],
|
||||
clip: [{
|
||||
clip_skip: 2,
|
||||
}],
|
||||
hypernetwork: [{
|
||||
model_name: "zxcfc",
|
||||
strength: 0.5,
|
||||
}],
|
||||
lora: [{
|
||||
model_name: "asdfg",
|
||||
strength_unet: 0.8,
|
||||
strength_tenc: 0.8,
|
||||
}],
|
||||
k_sampler: [{
|
||||
cfg_scale: 12,
|
||||
denoise: 0.55,
|
||||
sampler_name: "dpmpp_2m",
|
||||
scheduler: "karras",
|
||||
seed: 2416682767,
|
||||
steps: 40
|
||||
}],
|
||||
latent_image: [{
|
||||
width: 640,
|
||||
height: 512,
|
||||
upscale_by: 2,
|
||||
upscale_method: "Latent"
|
||||
}]
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
test__convertsAdditionalNetworks() {
|
||||
const infotext: A1111ParsedInfotext = {
|
||||
positive: "1girl, pink hair",
|
||||
negative: "(worst quality, low quality:1.4)",
|
||||
width: 512,
|
||||
height: 768,
|
||||
modelHash: "0873291ac5",
|
||||
modelName: "AbyssOrangeMix2_nsfw",
|
||||
cfgScale: 6,
|
||||
sampler: "DPM++ SDE Karras",
|
||||
seed: 780207036,
|
||||
steps: 20,
|
||||
denoise: 0.2,
|
||||
extraNetworks: {},
|
||||
extraParams: {
|
||||
"addnet enabled": "True",
|
||||
"addnet model 1": "ElysiaV3-000002(6d3eb064dcc1)",
|
||||
"addnet model 2": "elfmorie2(a34cd9a8c3cc)",
|
||||
"addnet module 1": "LoRA",
|
||||
"addnet module 2": "LoRA",
|
||||
"addnet weight a 1": "0.9",
|
||||
"addnet weight a 2": "1",
|
||||
"addnet weight b 1": "0.7",
|
||||
"addnet weight b 2": "0.8",
|
||||
"ensd": "31337",
|
||||
"mask blur": "1",
|
||||
"sd upscale overlap": "64",
|
||||
"sd upscale upscaler": "4x_Valar_v1",
|
||||
// XXX: just make sure it doesn't fall over for now
|
||||
// this prompt format I swear...
|
||||
"template": "1girl",
|
||||
"negative template": "(worst quality",
|
||||
}
|
||||
}
|
||||
|
||||
const converted = convertA1111ToStdPrompt(infotext)
|
||||
|
||||
expect(converted).toEqual({
|
||||
prompt: {
|
||||
metadata: {
|
||||
version: 1,
|
||||
created_with: "stable-diffusion-webui",
|
||||
extra_data: {}
|
||||
},
|
||||
parameters: {
|
||||
checkpoint: [{
|
||||
model_name: "AbyssOrangeMix2_nsfw",
|
||||
model_hashes: {
|
||||
a1111_shorthash: "0873291ac5",
|
||||
}
|
||||
}],
|
||||
lora: [{
|
||||
module_name: "LoRA",
|
||||
model_name: "ElysiaV3-000002",
|
||||
model_hashes: {
|
||||
addnet_shorthash: "6d3eb064dcc1"
|
||||
},
|
||||
strength_unet: 0.9,
|
||||
strength_tenc: 0.7,
|
||||
},
|
||||
{
|
||||
module_name: "LoRA",
|
||||
model_name: "elfmorie2",
|
||||
model_hashes: {
|
||||
addnet_shorthash: "a34cd9a8c3cc"
|
||||
},
|
||||
strength_unet: 1,
|
||||
strength_tenc: 0.8,
|
||||
}],
|
||||
k_sampler: [{
|
||||
cfg_scale: 6,
|
||||
denoise: 0.2,
|
||||
sampler_name: "dpmpp_sde",
|
||||
scheduler: "karras",
|
||||
seed: 780207036,
|
||||
steps: 20
|
||||
}],
|
||||
latent_image: [{
|
||||
width: 512,
|
||||
height: 768,
|
||||
mask_blur: 1
|
||||
}],
|
||||
sd_upscale: [{
|
||||
upscaler: "4x_Valar_v1",
|
||||
overlap: 64
|
||||
}]
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
158
src/tests/parseA1111Tests.ts
Normal file
158
src/tests/parseA1111Tests.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
import parseA1111 from "$lib/parseA1111";
|
||||
import { expect } from 'vitest';
|
||||
import UnitTest from "./UnitTest";
|
||||
|
||||
export default class parseA1111Tests extends UnitTest {
|
||||
test__parsesBasic() {
|
||||
const infotext = `
|
||||
highest quality, masterpiece, best quality, masterpiece, asuka langley sitting cross legged on a chair
|
||||
Negative prompt: lowres, bad anatomy, bad hands, text, error, missing fingers, extra digit, fewer digits, cropped, worst quality, low quality, normal quality, jpeg artifacts,signature, watermark, username, blurry, artist name
|
||||
Size: 512x512, Seed: 2870305590, Steps: 28, Sampler: Euler, CFG scale: 12, Clip skip: 2, Model hash: 925997e9, Aesthetic LR: 0.0005, Aesthetic text: , Aesthetic slerp: False, Aesthetic steps: 15, Aesthetic weight: 0.9, Aesthetic embedding: Belle, Aesthetic slerp angle: 0.1, Aesthetic text negative: False
|
||||
`
|
||||
|
||||
const parsed = parseA1111(infotext);
|
||||
|
||||
expect(parsed).toEqual({
|
||||
positive: "highest quality, masterpiece, best quality, masterpiece, asuka langley sitting cross legged on a chair",
|
||||
negative: "lowres, bad anatomy, bad hands, text, error, missing fingers, extra digit, fewer digits, cropped, worst quality, low quality, normal quality, jpeg artifacts,signature, watermark, username, blurry, artist name",
|
||||
height: 512,
|
||||
width: 512,
|
||||
modelHash: "925997e9",
|
||||
cfgScale: 12,
|
||||
sampler: "Euler",
|
||||
seed: 2870305590,
|
||||
steps: 28,
|
||||
extraNetworks: {},
|
||||
extraParams: {
|
||||
"clip skip": "2",
|
||||
"aesthetic embedding": "Belle",
|
||||
"aesthetic lr": "0.0005",
|
||||
"aesthetic slerp": "False",
|
||||
"aesthetic slerp angle": "0.1",
|
||||
"aesthetic steps": "15",
|
||||
"aesthetic text": "",
|
||||
"aesthetic text negative": "False",
|
||||
"aesthetic weight": "0.9",
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
test__parsesExtraNetworks() {
|
||||
const infotext = `
|
||||
dreamlike fantasy landscape where everything is a shade of pink,
|
||||
<lora:asdfg:1:foo:bar> dog <hypernet:0.5:baz:quux>
|
||||
Negative prompt: (worst quality:1.4), (low quality:1.4) , (monochrome:1.1)
|
||||
Steps: 40, Sampler: DPM++ 2M Karras, CFG scale: 12, Seed: 2416682767, Size: 640x512, Model hash: 0f0eaaa61e, Model: pastelmix-better-vae-fp16, Denoising strength: 0.55, Clip skip: 2, ENSD: 31337, Hires upscale: 2, Hires steps: 20, Hires upscaler: Latent
|
||||
`
|
||||
const parsed = parseA1111(infotext);
|
||||
|
||||
expect(parsed).toEqual({
|
||||
positive: "dreamlike fantasy landscape where everything is a shade of pink,\n dog ",
|
||||
negative: "(worst quality:1.4), (low quality:1.4) , (monochrome:1.1)",
|
||||
width: 640,
|
||||
height: 512,
|
||||
modelHash: "0f0eaaa61e",
|
||||
modelName: "pastelmix-better-vae-fp16",
|
||||
cfgScale: 12,
|
||||
sampler: "DPM++ 2M Karras",
|
||||
seed: 2416682767,
|
||||
steps: 40,
|
||||
denoise: 0.55,
|
||||
extraNetworks: {
|
||||
hypernet: [
|
||||
{ items: ["0.5", "baz", "quux"], },
|
||||
],
|
||||
lora: [
|
||||
{ items: ["asdfg", "1", "foo", "bar"] },
|
||||
],
|
||||
},
|
||||
extraParams: {
|
||||
"clip skip": "2",
|
||||
"ensd": "31337",
|
||||
"hires steps": "20",
|
||||
"hires upscale": "2",
|
||||
"hires upscaler": "Latent",
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
test__parsesXYZGrid() {
|
||||
const infotext = `
|
||||
1girl
|
||||
Negative prompt: (worst quality, low quality:1.4)
|
||||
Steps: 20, Sampler: DPM++ SDE Karras, CFG scale: 5, Seed: 1964718363, Size: 512x512, Model hash: 736a6f43c2, Denoising strength: 0.5, Clip skip: 2, Hires upscale: 1.75, Hires steps: 14, Hires upscaler: Latent (nearest-exact), Script: X/Y/Z plot, X Type: Prompt S/R, X Values: "<lora:cru5rb:0.5> , <lora:cru5rb:0.6>,<lora:cru5rb:0.7>, <lora:cru5rb:0.8> ,<lora:cru5rb:0.9> , <lora:cru5rb:1>,"
|
||||
`
|
||||
|
||||
const parsed = parseA1111(infotext);
|
||||
|
||||
expect(parsed).toEqual({
|
||||
positive: "1girl",
|
||||
negative: "(worst quality, low quality:1.4)",
|
||||
width: 512,
|
||||
height: 512,
|
||||
modelHash: "736a6f43c2",
|
||||
cfgScale: 5,
|
||||
sampler: "DPM++ SDE Karras",
|
||||
seed: 1964718363,
|
||||
steps: 20,
|
||||
denoise: 0.5,
|
||||
extraNetworks: {},
|
||||
extraParams: {
|
||||
"clip skip": "2",
|
||||
"hires steps": "14",
|
||||
"hires upscale": "1.75",
|
||||
"hires upscaler": "Latent (nearest-exact)",
|
||||
"script": "X/Y/Z plot",
|
||||
"x type": "Prompt S/R",
|
||||
"x values": '"<lora:cru5rb:0.5> , <lora:cru5rb:0.6>,<lora:cru5rb:0.7>, <lora:cru5rb:0.8> ,<lora:cru5rb:0.9> , <lora:cru5rb:1>,"',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
test__parsesDynamicPromptsTemplates() {
|
||||
const infotext = `
|
||||
1girl, pink hair
|
||||
Negative prompt: (worst quality, low quality:1.4)
|
||||
Steps: 20, Sampler: DPM++ SDE Karras, CFG scale: 6, Seed: 780207036, Size: 512x768, Model hash: 0873291ac5, Model: AbyssOrangeMix2_nsfw, Denoising strength: 0.2, ENSD: 31337, Mask blur: 1, SD upscale overlap: 64, SD upscale upscaler: 4x_Valar_v1, AddNet Enabled: True, AddNet Module 1: LoRA, AddNet Model 1: ElysiaV3-000002(6d3eb064dcc1), AddNet Weight A 1: 0.9, AddNet Weight B 1: 0.9, AddNet Module 2: LoRA, AddNet Model 2: elfmorie2(a34cd9a8c3cc), AddNet Weight A 2: 1, AddNet Weight B 2: 1
|
||||
Template: 1girl, __haircolor__
|
||||
Negative Template: (worst quality, low quality:1.4), __badprompt__
|
||||
`
|
||||
|
||||
const parsed = parseA1111(infotext);
|
||||
|
||||
expect(parsed).toEqual({
|
||||
positive: "1girl, pink hair",
|
||||
negative: "(worst quality, low quality:1.4)",
|
||||
width: 512,
|
||||
height: 768,
|
||||
modelHash: "0873291ac5",
|
||||
modelName: "AbyssOrangeMix2_nsfw",
|
||||
cfgScale: 6,
|
||||
sampler: "DPM++ SDE Karras",
|
||||
seed: 780207036,
|
||||
steps: 20,
|
||||
denoise: 0.2,
|
||||
extraNetworks: {},
|
||||
extraParams: {
|
||||
"addnet enabled": "True",
|
||||
"addnet model 1": "ElysiaV3-000002(6d3eb064dcc1)",
|
||||
"addnet model 2": "elfmorie2(a34cd9a8c3cc)",
|
||||
"addnet module 1": "LoRA",
|
||||
"addnet module 2": "LoRA",
|
||||
"addnet weight a 1": "0.9",
|
||||
"addnet weight a 2": "1",
|
||||
"addnet weight b 1": "0.9",
|
||||
"addnet weight b 2": "1",
|
||||
"ensd": "31337",
|
||||
"low quality": "1.4)",
|
||||
"mask blur": "1",
|
||||
"sd upscale overlap": "64",
|
||||
"sd upscale upscaler": "4x_Valar_v1",
|
||||
// XXX: just make sure it doesn't fall over for now
|
||||
// this prompt format I swear...
|
||||
"template": "1girl",
|
||||
"negative template": "(worst quality",
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,2 +1,4 @@
|
||||
export { default as ComfyPromptSerializerTests } from "./ComfyPromptSerializerTests"
|
||||
export { default as ComfyGraphTests } from "./ComfyGraphTests"
|
||||
export { default as parseA1111Tests } from "./parseA1111Tests"
|
||||
export { default as convertA1111ToStdPromptTests } from "./convertA1111ToStdPromptTests"
|
||||
|
||||
Reference in New Issue
Block a user