xxxxxxxxxx
73
// This workflow expects
// - models/checkpoints/sdxl_lightning_2step.safetensors
// to be available on the ComfyUI server
let workflow;
let comfy;
let resImg;
function preload() {
workflow = loadJSON("workflow_api.json");
}
function setup() {
createCanvas(512, 512);
pixelDensity(2); // SDXL operates on 1024x1024
comfy = new ComfyUiP5Helper("https://gpu1.gohai.xyz:8188/");
console.log("workflow is", workflow);
// we can modify the prompts like so
// #6 is our origin ("from")
workflow[6].inputs.text =
"photograph of a silver toaster, monochrome background";
// #16 our destination ("to")
workflow[16].inputs.text =
"photograph of a car, frontal, monochrome background";
let button = createButton("new seed");
button.mousePressed(updateSeed);
}
function requestImage() {
let amount = constrain(map(mouseX, 0, width, 0, 1), 0, 1);
// conditioning_to_strength is a number between 0 and 1, which
// controls how the embeddings of the two prompts are being combined
workflow[17].inputs.conditioning_to_strength = amount;
console.log("conditioning_to_strength", amount);
comfy.run(workflow, gotImage);
}
function updateSeed() {
// update the random seed
workflow[3].inputs.seed = random(9999999);
comfy.run(workflow, gotImage);
}
function mousePressed() {
requestImage();
}
function gotImage(data, err) {
// data is an array of outputs from running the workflow
console.log("gotImage", data);
// we can load them like so
if (data.length > 0) {
resImg = loadImage(data[0].src);
}
// we could automatically run again if we wanted
//requestImage();
}
function draw() {
background(255);
// if we have an image, put it onto the canvas
if (resImg) {
image(resImg, 0, 0, width, height);
}
}