xxxxxxxxxx
148
let video,
w,
h,
detector,
poses,
confidence = 0.4,
tolerance = 12,
fpsCtr = 0,
fpsOldValue = 0,
fpsNewValue = 0;
let speechRec, said;
let myRightEye, myLeftEye, showEyesFlag = false;
function eye() {
(this.scleraX = 1),
(this.scleraY = 1),
(this.scleraD = 18),
(this.scleraR = 255),
(this.scleraG = 255),
(this.scleraB = 255),
(this.scleraRotateCtr = 0),
(this.scleraRotateBy = 0),
(this.pupilX = 3),
(this.pupilY = 3),
(this.pupilD = 8),
(this.pupilR = 0),
(this.pupilG = 0),
(this.pupilB = 0),
(this.pupilRotateCtr = 0),
(this.pupilRotateBy = random(-0.5, 0.5));
this.showEye = function () {
noStroke();
rotate((this.scleraRotateCtr += this.scleraRotateBy));
fill(this.scleraR, this.scleraG, this.scleraB);
ellipse(this.scleraX, this.scleraY, this.scleraD);
rotate((this.pupilRotateCtr += this.pupilRotateBy));
fill(this.pupilR, this.pupilG, this.pupilB);
ellipse(this.pupilX, this.pupilY, this.pupilD);
};
}
async function setup() {
myRightEye = new eye();
myLeftEye = new eye();
video = createCapture(VIDEO, videoReady);
video.hide();
// createCanvas(640, 480);
voiceRecog();
await init();
}
async function init() {
console.log("Init");
const detectorConfig = {};
detector = await poseDetection.createDetector(
poseDetection.SupportedModels.MoveNet,
detectorConfig
);
}
async function videoReady() {
w = video.elt.videoWidth;
h = video.elt.videoHeight;
print(w, h);
createCanvas(w, h);
await getPoses();
}
async function getPoses() {
if (detector) {
poses = await detector.estimatePoses(video.elt);
}
requestAnimationFrame(getPoses);
}
// Main Draw Loop
function draw() {
image(video, 0, 0);
fps();
if (said == "show eyes") {
showEyesFlag = true;
}
if (said == "no eyes") {
showEyesFlag = false;
}
if(showEyesFlag){
drawCrazyEyes()
}
}
function voiceRecog() {
let lang = navigator.language || "en-US";
// Create a Speech Recognition object with callback
let speechRec = new p5.SpeechRec(lang, gotSpeech);
speechRec.continuous = true;
speechRec.interimResults = true;
/*
// "Continuous recognition" (as opposed to one time only)
let continuous = true;
// If you want to try partial recognition (faster, less accurate)
let interim = true;
*/
// This must come after setting the propertie
speechRec.start();
// DOM element to display results
let output = select("#speech");
function gotSpeech() {
// Something is there
// Get it as a string, you can also get JSON with more info
if (speechRec.resultValue) {
said = speechRec.resultString;
// Show user
output.html(said).position(0, 600);
console.log(speechRec.continuous)
}
}
}
function fps() {
noStroke();
fill(255);
rect(0, 0, 40, 15);
fill(0);
fpsNewValue = int(1 / (deltaTime / 1000));
if (fpsCtr > 9) {
fpsOldValue = fpsNewValue;
text("fps: " + fpsOldValue, 2, 12);
fpsCtr = 0;
} else {
text("fps: " + fpsOldValue, 2, 12);
}
fpsCtr++;
}
function drawCrazyEyes() {
push();
translate(poses[0].keypoints[1].x, poses[0].keypoints[1].y);
myLeftEye.showEye();
pop();
translate(poses[0].keypoints[2].x, poses[0].keypoints[2].y);
myRightEye.showEye();
}