xxxxxxxxxx
108
// spectrogram or irisgram
function preload()
{
//song = loadSound("lofi.mp3")
//song = loadSound("rick.mp3")
song = loadSound("ad.mp3")
playing = false
//called when loopy ends
song.onended(() => {playing = false; document.getElementById("audio").innerText = "Play"; a = a})
fr = 60
}
function setup()
{
createCanvas(500, 500);
//creating transparent layers upon the black canvas
layer = createGraphics(width, height)
background('#0C0C0C')
// fft is fast fourier transformation used to analyse audio frequencies
fft = new p5.FFT(0, 256);
//main formula for the iris
a = 360/((song.duration()) * fr)
b = a
frameRate(fr)
layer.clear()
}
function draw() {
background(0);
layer.noFill()
layer.colorMode(RGB)
var spectrumA = fft.analyze()
var spectrumB = spectrumA.reverse()
spectrumB.splice(0, 60)
push()
translate(250, 250)
noFill()
stroke('#009688')
beginShape()
for(let i = 0; i < spectrumB.length; i++)
{
var amp = spectrumB[i]
var x = map(amp, 0, 256, -2, 2)
var y = map(i, 0, spectrumB.length, 30, 200)
vertex(x, y)
}
endShape()
pop()
///////////////////////////////////////////
push()
translate(width/2, height/2)
rotate(radians(a))
layer.push()
layer.translate(width/2, height/2)
layer.rotate(radians(-a))
for(let i = 0; i < spectrumB.length; i++)
{
layer.strokeWeight(0.018 * spectrumB[i])
layer.stroke(45 +spectrumB[i] , 255, 255, spectrumB[i]/60)
layer.line(0, i, 0, i)
}
layer.pop()
image(layer, -width/2, -height/2)
pop()
if(playing)a += b
}
//playing and pausing and the button controls
function toggleAudio(){
if(!playing){
song.play()
console.log("playing")
document.getElementById("audio").innerText = "Pause"
}
else{
song.pause()
console.log("paused")
document.getElementById("audio").innerText = "Play"
}
playing = !playing
}