xxxxxxxxxx
355
const replicateProxy = "https://replicate-api-proxy.glitch.me";
const openAIProxy = "https://openai-api-proxy.glitch.me";
let feedback;
let audioContext;
let analyserNode;
let dataArray;
let currentSound;
var inc = 0.08;
var scl = 10;
var cols, rows;
var zoff = 0;
var yoff = 0;
var particles = [];
var flowfield = [];
// for colors
var r1;
var r2;
var g1;
var g2;
var b1;
var b2;
var choice;
function setup() {
createElement("br");
feedback = createP("");
createElement("br");
feedback.position(10, windowHeight - 12);
//setupAudio();
// for visuals
createCanvas(windowWidth, windowHeight);
background(40);
// set columns and rows
cols = floor(width / scl);
rows = floor(height / scl);
flowfield = new Array(cols * rows);
for (var i = 0; i < 300; i++) {
particles[i] = new Particle();
}
//set random colors
r1 = random(255);
r2 = random(255);
g1 = random(255);
g2 = random(255);
b1 = random(255);
b2 = random(255);
// Initialize Web Audio API context and analyser node
audioContext = new AudioContext();
analyserNode = audioContext.createAnalyser();
analyserNode.fftSize = 256;
dataArray = new Uint8Array(analyserNode.frequencyBinCount);
amp = new p5.Amplitude();
colors = ["r","g","b"];
choice = random(colors);
console.log(choice);
}
function draw() {
background(0, 0, 0, 2); // makes things fade after a while
let averageAmplitude = amp.getLevel() * 10000; ///This does all the mapping
let circleSize = map(averageAmplitude, 50, 500, 10, 255);
//console.log(circleSize);
// Adjust the range as needed
let yoffChange = map(averageAmplitude, 50, 500, 0, 0.8);
let zoffChange = map(averageAmplitude, 50, 500, 0, 0.001);
//yoff += inc + yoffChange;
//zoff += 0.0001 + zoffChange;
// for visuals
yoff = 0;
for (var y = 0; y < rows; y++) {
var xoff = 0;
for (var x = 0; x < cols; x++) {
var index = x + y * cols;
var angle = noise(xoff, yoff, zoff) * TWO_PI * 4;
var v = p5.Vector.fromAngle(angle);
v.setMag(2);
flowfield[index] = v;
xoff += inc;
}
if (currentSound) {
yoff += inc + yoffChange;
zoff += 0.0001 + zoffChange;
// yoff += inc + yoffChange;
// zoff += 0.0001 * zoffChange;
} else {
yoff += inc;
zoff += 0.0001;
}
}
if (currentSound) {
background(0, 0, 0, 1);
}
for (var i = 0; i < particles.length; i++) {
particles[i].follow(flowfield);
particles[i].update();
particles[i].edges();
particles[i].show(circleSize, averageAmplitude);
}
if (currentSound) {
r1 = circleSize-150;
g1 = circleSize-150;
b1 = circleSize-150;
if (choice == "r") {
r2 = random(255) * 10;
g2 = random(255) * 5;
b2 = random(255) * 10;
} else if (choice == "g") {
r2 = random(255) * 10;
g2 = random(255) * 15;
b2 = random(255) * 5;
} else if (choice == "b") {
r2 = random(255) * 5;
g2 = random(255) * 10;
b2 = random(255) * 15;
}
}
}
function Particle() {
this.pos = createVector(random(width), random(height));
this.vel = createVector(0, 0);
this.acc = createVector(0, 0);
this.maxspeed = 5;
this.prevPos = this.pos.copy();
this.update = function () {
this.vel.add(this.acc);
this.vel.limit(this.maxspeed);
this.pos.add(this.vel);
this.acc.mult(0);
};
this.follow = function (vectors) {
var x = floor(this.pos.x / scl);
var y = floor(this.pos.y / scl);
var index = x + y * cols;
var force = vectors[index];
this.applyForce(force);
};
this.applyForce = function (force) {
this.acc.add(force);
};
this.show = function (circleSize, averageAmplitude) {
var r = map(this.pos.x, 0, width, r1, r2);
var g = map(this.pos.y, 0, height, g1, g2);
var b = map(this.pos.x, 0, width, b1, b2);
if (choice == "r") {
var g = map(this.pos.x, 0, width, g1, g1);
} else if (choice == "g") {
var b = map(this.pos.x, 0, width, b1, b1);
} else if (choice == "b") {
var r = map(this.pos.x, 0, width, r1, r1);
}
var distFromCenter = dist(this.pos.x, this.pos.y, width / 2, height / 2);
//let diam = map(averageAmplitude, 0, 255, 10, 200);;
var alpha = map(distFromCenter, 0, 300, 200, 0);
//alpha = averageAmplitude-100
//stroke(r,g,b);
//noFill();
//fill(r, g, b, alpha);
stroke(r, g, b, alpha);
//stroke(5, 10);
if (currentSound) {
strokeWeight(map(averageAmplitude, 50, 500, 0.5, 2))
} else {
strokeWeight(1);
}
line(this.pos.x, this.pos.y, this.prevPos.x, this.prevPos.y);
this.updatePrev();
//point(this.pos.x, this.pos.y);
// if (dist(width / 2, height / 2, this.pos.x, this.pos.y) < 10) {
// fill(40);
// ellipse(this.pos.x, this.pos.y, 1);
// }
};
this.updatePrev = function () {
this.prevPos.x = this.pos.x;
this.prevPos.y = this.pos.y;
};
this.edges = function () {
if (this.pos.x > width) {
this.pos.x = 0;
this.updatePrev();
}
if (this.pos.x < 0) {
this.pos.x = width;
this.updatePrev();
}
if (this.pos.y > height) {
this.pos.y = 0;
this.updatePrev();
}
if (this.pos.y < 0) {
this.pos.y = height;
this.updatePrev();
}
};
}
async function askForSound(p_prompt) {
let data = {
//replicate / riffusion / riffusion
version: "8cf61ea6c56afd61d8f5b9ffd14d7c216c0a93844ce2d82ac1c9ecc9c7f24e05",
input: {
prompt_a: p_prompt,
//"prompt_a": openAI_json.transcription
},
};
console.log("Asking for Sound Info From Replicate via Proxy", data);
let options = {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(data),
};
const url = replicateProxy + "/create_n_get/";
console.log("url", url, "options", options);
const picture_info = await fetch(url, options);
//console.log("picture_response", picture_info);
const proxy_said = await picture_info.json();
console.log("proxy_said", proxy_said.output.audio);
if (currentSound) currentSound.stop();
playSound = loadSound(proxy_said.output.audio, function () {
playSound.play();
playSound.loop();
//playSound.loop = true;
currentSound = playSound;
});
}
async function askForAudio(audio) {
document.body.style.cursor = "progress";
try {
feedback.html("Waiting for reply from OpenAi Audio...");
console.log("Asking Audio From OpenAI via Proxy");
const formData = new FormData();
formData.append("file", audio);
formData.append("model", "whisper-1");
const url = openAIProxy + "/askOpenAIAudio/";
console.log("audio url", url);
const response = await fetch(url, {
mode: "cors",
method: "POST",
body: formData,
});
const openAI_json = await response.json();
console.log("audio_response", openAI_json.transcription);
feedback.html(openAI_json.transcription);
document.body.style.cursor = "auto";
askForSound(openAI_json.transcription);
} catch (error) {
console.error("Error reading audio:", error);
}
}
// when 'a' is held down, start recording audio, and when released, stop recording audio
function keyPressed() {
if (key === "a") {
startRecording();
}
if (key === "s") saveCanvas("flowfield", "png");
if (key === "c") {
background(40);
}
}
function keyReleased() {
if (key === "a") {
stopRecording();
}
}
async function startRecording() {
try {
// Request access to the user's microphone
mediaStream = await navigator.mediaDevices.getUserMedia({ audio: true });
let mrChunks = [];
// Create a media recorder and start recording
mediaRecorder = new MediaRecorder(mediaStream);
mediaRecorder.addEventListener("dataavailable", (event) => {
mrChunks.push(event.data);
});
mediaRecorder.addEventListener("stop", (event) => {
const recordedData = new Blob(mrChunks, { type: "audio/webm" });
console.log("Recording stopped", recordedData);
let audioElement = document.createElement("audio");
console.log(audioElement);
askForAudio(recordedData);
});
mediaRecorder.start();
console.log("Recording started");
} catch (error) {
console.error("Error starting recording:", error);
}
}
function stopRecording() {
if (mediaRecorder && mediaRecorder.state !== "inactive") {
mediaRecorder.stop();
}
}