xxxxxxxxxx
199
let faceapi;
let video;
let detections;
let minY, maxY;
let mouthValues=[];
let midY1,midY2;
let browHeight=1;
let openingSize;
// by default all options are set to true
const detection_options = {
withLandmarks: true,
withDescriptors: false,
}
var loop = new Tone.Loop(playTone, '4n').start(0);
Tone.Transport.bpm.value = 120;
Tone.Transport.start();
var sampler = new Tone.Sampler({
"C3" : "Ah_C3.mp3",
"E3" : "Ah_E3.mp3",
"G3" : "Ah_G3.mp3",
"C4" : "Ah_C4.mp3",
// "F4" : "ah_F4.wav",
// "G4" : "Ah_G4.wav",
// "A4" : "Ah_A4.wav",
// "C5" : "Ah_C5.wav",
// "D5" : "Ah_D5.wav",
// "F5" : "Ah_F5.wav",
}).toMaster();
function setup() {
createCanvas(360, 270);
midY1=height/2;
midY2=height/2;
openingSize=50;
// load up your video
video = createCapture(VIDEO);
video.size(width, height);
// video.hide(); // Hide the video element, and just show the canvas
faceapi = ml5.faceApi(video, detection_options, modelReady)
textAlign(RIGHT);
}
function modelReady() {
console.log('ready!')
console.log(faceapi)
faceapi.detect(gotResults)
}
function gotResults(err, result) {
if (err) {
console.log(err)
return
}
// console.log(result)
detections = result;
// background(220);
background(255);
image(video, 0,0, width, height)
if (detections) {
if (detections.length > 0) {
// console.log(detections)
//drawBox(detections)
drawLandmarks(detections)
}
}
faceapi.detect(gotResults)
}
function drawBox(detections){
for(let i = 0; i < detections.length; i++){
const alignedRect = detections[i].alignedRect;
const x = alignedRect._box._x
const y = alignedRect._box._y
const boxWidth = alignedRect._box._width
const boxHeight = alignedRect._box._height
noFill();
stroke(161, 95, 251);
strokeWeight(2);
rect(x, y, boxWidth, boxHeight);
}
}
function drawLandmarks(detections){
noFill();
stroke(161, 95, 251)
strokeWeight(2)
for(let i = 0; i < detections.length; i++){
const mouth = detections[i].parts.mouth;
//const nose = detections[i].parts.nose;
const leftEye = detections[i].parts.leftEye;
// const rightEye = detections[i].parts.rightEye;
// const rightEyeBrow = detections[i].parts.rightEyeBrow;
const leftEyeBrow = detections[i].parts.leftEyeBrow;
drawPart(mouth, true);
//drawPart(nose, false);
drawPart(leftEye, true);
drawPart(leftEyeBrow, false);
// drawPart(rightEye, true);
// drawPart(rightEyeBrow, false);
extractY1(leftEye);
extractY2(leftEyeBrow);
measureOpening(mouth);
}
}
function drawPart(feature, closed){
beginShape();
for(let i = 0; i < feature.length; i++){
const x = feature[i]._x
const y = feature[i]._y
vertex(x, y)
}
if(closed === true){
endShape(CLOSE);
} else {
endShape();
}
}
function extractY1(feature){
let i = floor(feature.length/2);
midY1 = feature[i]._y
// fill(255,0,0);
// circle(width/2,midY,25,25);
//console.log("midY1:");
}
function extractY2(feature){
let i = floor(feature.length/2);
midY2 = feature[i]._y
// fill(255,0,0);
// circle(width/2,midY,25,25);
//console.log("midY2:");
}
function measureOpening(feature){
let featureValues=[];
for(let i = 0; i < feature.length; i++){
const y = feature[i]._y;
featureValues.push(y);
}
maxY=max(featureValues);
minY=min(featureValues);
openingSize=maxY-minY;
//console.log(openingSize);
}
function playTone() {
browHeight=midY1-midY2;
console.log(browHeight);
//let noteMapped=map(browHeight,30,50,65,77);
let noteMapped=map(browHeight,30,50,48,60);
let midiNote=round(noteMapped);
//console.log(midiNote);
let noteObject = Tone.Frequency(midiNote, "midi");
singingVolume=map(openingSize,20,80,-35,10);
//console.log(singingVolume);
sampler.volume.value=singingVolume;
sampler.triggerAttackRelease(noteObject,0.5);
let displayNote=Tone.Frequency(midiNote, "midi").toNote();
//console.log(displayNote);//"A4"
textSize(40);
noStroke();
fill(255);
text(displayNote,width*3/4,midY2);
//console.log("midiNote:");
//console.log(midiNote);
}