xxxxxxxxxx
145
//Simplified version of https://github.com/Creativeguru97/YouTube_tutorial/tree/master/Play_with_APIs/ml5FaceApi/face-api_videoInput
//Youtube tutorial: https://www.youtube.com/watch?v=3yqANLRWGLo&ab_channel=KazukiUmeda
let faceapi;
let detections = [];
let video;
let canvas;
let happySound;
let neutralSound;
let currentEmotion = "";
function preload() {
happySound = loadSound("0.mp3")
neutralSound = loadSound("1.mp3")
}
function setup() {
canvas = createCanvas(480, 360);
canvas.id('canvas');
video = createCapture(VIDEO);
video.size(width, height);
//video.id('video');
video.hide();
const faceOptions = {
withLandmarks: true,
withExpressions: true,
withDescriptors: true,
minConfidence: 0.5
};
//Initialize the model
faceapi = ml5.faceApi(video, faceOptions, faceReady);
}
function draw() {
image(video, 0, 0, width, height);
if(detections.length > 0) {
drawBoxs(detections);//Draw detection box
drawLandmarks(detections);//// Draw all the face points
drawExpressions(detections, 20, 250, 14);//Draw face
}
}
function mousePressed() {
//
console.log(detections[0].expressions);
// Create items array
var items = Object.keys(detections[0].expressions).map(function(key) {
return [key, detections[0].expressions[key]];
});
// Sort the array based on the second element
items.sort(function(first, second) {
return second[1] - first[1];
});
console.log(items[0][0]);
currentEmotion = items[0][0];
if (currentEmotion == "happy") {
happySound.play();
neutralSound.stop();
} else if (currentEmotion == "neutral"){
neutralSound.play();
happySound.stop();
}
}
function faceReady() {
faceapi.detect(gotFaces);// Start detecting faces
}
// Got faces
function gotFaces(error, result) {
if (error) {
console.log(error);
return;
}
detections = result;
//console.log(detections);
faceapi.detect(gotFaces);// Call the function again at here
}
function drawBoxs(detections){
if (detections.length > 0) {//If at least 1 face is detected
for (f=0; f < detections.length; f++){
let {_x, _y, _width, _height} = detections[0].alignedRect._box;
stroke(44, 169, 225);
strokeWeight(1);
noFill();
rect(_x, _y, _width, _height);
}
}
}
function drawLandmarks(detections){
if (detections.length > 0) {//If at least 1 face is detected
for (f=0; f < detections.length; f++){
let points = detections[f].landmarks.positions;
for (let i = 0; i < points.length; i++) {
stroke(44, 169, 225);
strokeWeight(3);
point(points[i]._x, points[i]._y);
}
}
}
}
function drawExpressions(detections, x, y, textYSpace){
if(detections.length > 0){//If at least 1 face is detected
let {neutral, happy, angry, sad, disgusted, surprised, fearful} = detections[0].expressions;
textSize(14);
noStroke();
fill(0,255,0);
text("neutral: " + nf(neutral*100, 2, 2)+"%", x, y);
text("happiness: " + nf(happy*100, 2, 2)+"%", x, y+textYSpace);
text("anger: " + nf(angry*100, 2, 2)+"%", x, y+textYSpace*2);
text("sad: "+ nf(sad*100, 2, 2)+"%", x, y+textYSpace*3);
text("disgusted: " + nf(disgusted*100, 2, 2)+"%", x, y+textYSpace*4);
text("surprised: " + nf(surprised*100, 2, 2)+"%", x, y+textYSpace*5);
text("fear: " + nf(fearful*100, 2, 2)+"%", x, y+textYSpace*6);
}else{//If no faces is detected
text("neutral: ", x, y);
text("happiness: ", x, y + textYSpace);
text("anger: ", x, y + textYSpace*2);
text("sad: ", x, y + textYSpace*3);
text("disgusted: ", x, y + textYSpace*4);
text("surprised: ", x, y + textYSpace*5);
text("fear: ", x, y + textYSpace*6);
}
}