xxxxxxxxxx
152
// this demo uses ml5js and p5js. built on top of ml5's posenet webcam example
// https://ml5js.org/docs/posenet-webcam
//
// this sketch will play a specific note when your nose in a specific region
//
let video;
let poseNet;
let spacing;
let poses = [];
let skeletons = [];
let synth;
// an array of objects to hold notes and "playing" booleans for each note
let notes = [{
note: "A3",
playing: false
}, {
note: "B3",
playing: false
}, {
note: "C3",
playing: false
}, {
note: "D3",
playing: false
}, {
note: "E3",
playing: false
}, {
note: "F3",
playing: false
}, {
note: "G3",
playing: false
}];
function setup() {
createCanvas(640, 480);
video = createCapture(VIDEO);
video.size(width, height);
// Create a new poseNet method with a single detection
poseNet = ml5.poseNet(video, modelReady);
// This sets up an event that fills the global variable "poses"
// with an array every time new poses are detected
poseNet.on('pose', function(results) {
poses = results;
});
// Hide the video element, and just show the canvas
video.hide();
synth = new p5.PolySynth();
}
function modelReady() {
select('#status').html('Model Loaded');
}
function draw() {
image(video, 0, 0, width, height);
// We can call both functions to draw all keypoints and the skeletons.
// functions are defined below
drawKeypoints();
drawSkeleton();
// use a spacing variable to determine how far apart our boundry lines are
// spacing will be the width divided by however many items are in the array
spacing = width / notes.length;
textSize(36);
// draw our boundry lines based on the spacing and size of our array
for (let i = 0; i < notes.length; i++) {
stroke(255, 0, 0);
line(i * spacing, 0, i * spacing, height);
noStroke();
text(notes[i].note, (i * spacing) + 10, 40);
}
//if we have pose, trigger the function that will control
if (poses.length > 0) {
bodyKeyboard();
}
}
//function for playing music based on nose position
function bodyKeyboard() {
// define x position of the nose
let noseX = poses[0].pose.keypoints[0].position.x;
// define y position of the nose
let noseY = poses[0].pose.keypoints[0].position.y;
spacing = width / notes.length;
// if we have a noseX position to track
if (noseX) {
//loop through all potential regions to play a note
for (let i = 0; i < notes.length; i++) {
// check and see which lines the nose is inbetween
if (noseX >= i * spacing && noseX < (i * spacing) + spacing) {
// if that particular note is not already playing
if (!notes[i].playing) {
//begin playing that note
synth.noteAttack(notes[i].note, 0.9, 0.9, 0.9);
// set that note's playing boolean to be true
notes[i].playing = true;
} else {
// if that note is already playing, turn it off (dont play again,)
synth.noteRelease(notes[i].note);
}
} //if we're not inbetween a given region, turn that note's playing boolean back to false
else {
synth.noteRelease(notes[i].note);
notes[i].playing = false;
}
}
}
}
// A function to draw ellipses over the detected keypoints
function drawKeypoints() {
// Loop through all the poses detected
for (let i = 0; i < poses.length; i++) {
// For each pose detected, loop through all the keypoints
for (let j = 0; j < poses[i].pose.keypoints.length; j++) {
// A keypoint is an object describing a body part (like rightArm or leftShoulder)
let keypoint = poses[i].pose.keypoints[j];
// Only draw an ellipse is the pose probability is bigger than 0.2
if (keypoint.score > 0.6) {
noStroke();
ellipse(keypoint.position.x, keypoint.position.y, 10, 10);
}
}
}
}
// A function to draw the skeletons
function drawSkeleton() {
// Loop through all the skeletons detected
for (let i = 0; i < poses.length; i++) {
// For every skeleton, loop through all body connections
for (let j = 0; j < poses[i].skeleton.length; j++) {
let partA = poses[i].skeleton[j][0];
let partB = poses[i].skeleton[j][1];
stroke(255, 0, 0);
line(partA.position.x, partA.position.y, partB.position.x, partB.position.y);
}
}
}