xxxxxxxxxx
70
let faceapi; //store faceapi
let detections = []; //store results
let video;
let canvas;
function setup() {
canvas = createCanvas(480, 360);
canvas.id("canvas");
video = createCapture(VIDEO);// Create the video:
video.id("video");
video.size(width, height);
video.style("transform", "scaleX(-1)");
const faceOptions = {
withLandmarks: true,
withExpressions: true,
withDescriptors: true,
minConfidence: 0.5
};
//Initialize the model:
faceapi = ml5.faceApi(video, faceOptions, faceReady);
}
function faceReady() {
faceapi.detect(gotFaces);// Start detecting faces:
}
// Got faces:
function gotFaces(error, result) {
if (error) {
console.log(error);
return;
}
detections = result; //Now all the data in this detections:
// console.log(detections);
clear();//Draw transparent background;:
drawExpressions(detections, 20, 250, 14);//Draw face expression:
faceapi.detect(gotFaces);// Call the function again at here:
}
function drawExpressions(detections, x, y, textYSpace){
if(detections.length > 0){//If at least 1 face is detected
let {neutral, happy, angry, sad} = detections[0].expressions;
textFont('Helvetica Neue');•
textSize(14);
noStroke();
fill(255);
text("neutral: " + nf(neutral*100, 2, 2)+"%", x, y);
text("happiness: " + nf(happy*100, 2, 2)+"%", x, y+textYSpace);
text("anger: " + nf(angry*100, 2, 2)+"%", x, y+textYSpace*2);
text("sad: "+ nf(sad*100, 2, 2)+"%", x, y+textYSpace*3);
}else{//If no faces is detected:
text("neutral: ", x, y);
text("happiness: ", x, y + textYSpace);
text("anger: ", x, y + textYSpace*2);
text("sad: ", x, y + textYSpace*3);
}
}