xxxxxxxxxx
195
let facemesh;
let video;
let predictions = [];
let canvas_w = 800;
let canvas_h = 600;
let video_w = 640;
let video_h = 480;
let margin_left = 50;
let margin_top = 50;
let x=10;
let y=10;
let lastx;
let lasty;
let amp=0;
let ampv=0;
let freq=0;
let freqv=0;
function preload() {
video = createCapture(VIDEO, () => {
loadHandTrackingModel();
});
video.hide();
// Create the UI buttons
}
async function loadHandTrackingModel() {
// Load the MediaPipe handpose model.
model = await handpose.load();
select('#status').html('Hand Tracking Model Loaded')
predictHand();
}
async function predictHand() {
// Pass in a video stream (or an image, canvas, or 3D tensor) to obtain a hand prediction from the MediaPipe graph.
predictions = await model.estimateHands(video.elt);
setTimeout(() => predictHand(), 100);
}
function setup() {
createCanvas(canvas_w, canvas_h);
video = createCapture(VIDEO);
video.size(video_w)
mic=new p5.AudioIn();
mic.start();
angleMode(DEGREES);
stroke(255);
strokeWeight(1);
facemesh = ml5.facemesh(video, modelReady);
// This sets up an event that fills the global variable "predictions"
// with an array every time new predictions are made
facemesh.on("predict", (results) => {
predictions = results;
});
}
function modelReady() {
console.log("Model ready!");
}
function draw() {
drawBackground();
image(video, margin_left, margin_top, video_w, video_h);
// We can call both functions to draw all keypoints
drawKeypoints();
drawForeground();
ampv = ((width/2 - mouseX)-amp)/20;
amp+=ampv;
freqv = ((height - mouseY)-freq)/10;
freq+=freqv;
drawWave(width/2, amp, freq/100, 255);
drawWave(width/4, amp, freq/100, 255);
drawWave(3*width/4, amp, freq/100, 255);
}
function drawForeground() {
rect(random(width/2), random(height/2), 10+300*miclevel);
}
function drawBackground() {
background("lightblue");
miclevel = mic.getLevel();
console.log(miclevel);
ellipse(random(width), 0, map(mouseX, 0, width, 10, 100), height);
stroke(0,0,255);
circle(mouseX*miclevel*10,mouseY*miclevel,40)
strokeWeight(5);
//stroke(random(0, 255), random(0,255), random(0, 255));
stroke(52, 131, 235);
line(350, 350, mouseX*miclevel*50, mouseY*miclevel);
}
function drawWave(baseX, amp, period){
lastx = baseX;
lasty = 0;
for (let y = 0; y < height; y += 2) {
let shift = amp * sin(period * y);
let x = baseX + shift;
line(x, y, lastx, lasty);
lastx = x;
lasty = y;
}
}
// A function to draw ellipses over the detected keypoints
function drawKeypoints() {
for (let i = 0; i < predictions.length; i += 1) {
const keypoints = predictions[i].scaledMesh;
// Draw facial keypoints.
for (let j = 0; j < keypoints.length; j += 1) {
let [x, y] = keypoints[j];
// Adjust the keypoint position based on the mouse position
const offsetX = map(mouseX, 0, width, -100, 55);
const offsetY = map(mouseY, 0, height, -100, 55);
x += offsetX;
y += offsetY;
push();
translate(margin_left, margin_top);
fill(random(255), 0, 0);
ellipse(x,y,10);
//star
if(key=='1'){
noStroke();
fill(random(0,255),random(0,255),random(0,255));
ellipse(random(0,800),random(0,800),
random(1,5),random(1,5));
micLevel=mic.getLevel();
}
if(key=='2'){
stroke(233,128,237);
fill(100,0,100);
ellipse(pmouseX,pmouseY,random(0,20),random(0,20));
}
if(key == '3'){
fill(random(["red","green","blue"]))
circle(random(width),random(height),random(10,20))
}
pop();
}
}
}
// text the assignment : When the mouse is moved, the mask goes with it, and blue stripes change curvature as the mouse moves. I specified three for the button interaction; when pressing 1, random particles will surface. When you press the number 2, a purple dot appears that moves with the mouse. When the number 3 is pushed, circles emerge. As the sound interacts, the blocks behind will get larger as the sound gets louder. At the same time, the circle will get larger.for the body I wrote it very simply, I added hearts and circles. Since I added the previous document, it will report an error, so I made a separate one. //