xxxxxxxxxx
150
let cam;
let ctracker;
let cnv;
let v=16;
let song;
let img;
let mouth;
let vScale = 2;
const options = {
multiplier: 0.75, // 1.0, 0.75, or 0.50, 0.25
outputStride: 16, // 8, 16, or 32, default is 16
segmentationThreshold: 0.5, // 0 - 1, defaults to 0.5
}
function preload() {
soundFormats('wav', 'ogg');
bodypix = ml5.bodyPix(options)
song = loadSound('assets/voice.wav');
img = loadImage('assets/suit.png');
mouth =loadImage('assets/mouth.png');
}
function setup() {
cnv=createCanvas(320,240);
cnv.position(0,0);
cam = createCapture(VIDEO);
cam.size(320, 240);
cam.position(0,0);
ctracker= new clm.tracker();
ctracker.init(pModel);
ctracker.start(cam.elt);
mouth.resize(100,50,100,100);
cam.hide();
//don't draw strokes for my shapes
noStroke();
textSize(10);
song.loop();
// createSimplePalette()
}
function draw() {
clear();
cam.loadPixels();
image(cam,0,0);
// for (let y = 0; y < cam.width; y += 2){
// for (let x = 0; x < cam.height; x += 2){
// let offset = ((y*cam.width)+x)*4;
// fill(cam.pixels[offset+0],
// cam.pixels[offset+1],
// cam.pixels[offset+2]);
// noStroke();
// rect(x*vScale, y*vScale, vScale*vScale, vScale*vScale);
// }
// }
let positions = ctracker.getCurrentPosition();
for (let i=0; i<positions.length; i++) {
// // set the color of the ellipse based on position on screen
// fill(map(positions[i][0], width*0.33, width*0.66, 0, 255), map(positions[i][1], height*0.33, height*0.66, 0, 255), 255);
fill(255);
// text(i,positions[i][0],positions[i][1]);
v =dist(positions[47][0],positions[47][1],positions[53][0],positions[53][1]);
image(mouth,positions[37][0]-mouth.width/2,positions[37][1]-mouth.height/3,mouth.width,mouth.height*v/15);
image(img,positions[7][0]-img.width/2,positions[7][1]-img.height/5);
}
// console.log(v);
v =constrain(v,15,45);
let playSp = map(v,20, 50, 0, 2);
playSp = constrain(playSp, 0.01, 4);
song.rate(playSp);
if(playSp >0.5){
if (!song.isPlaying()) {
song.play();
}
}else{
song.stop();
}
}
// const s = (sketch) => {
// let bodypix;
// let video;
// let segmentation;
// const options = {
// outputStride: 8, // 8, 16, or 32, default is 16
// segmentationThreshold: 0.3 // 0 - 1, defaults to 0.5
// }
// sketch.setup = function () {
// // Set the p5Instance so that ml5 knows which instance to use
// ml5.p5Utils.setP5Instance(sketch);
// sketch.createCanvas(320, 240);
// // load up your video
// video = sketch.createCapture(sketch.VIDEO);
// video.size(sketch.width, sketch.height);
// // video.hide(); // Hide the video element, and just show the canvas
// bodypix = ml5.bodyPix(video, modelReady)
// }
// function modelReady() {
// console.log('ready!')
// bodypix.segment(gotResults, options)
// }
// function gotResults(err, result) {
// if (err) {
// console.log(err)
// return
// }
// // console.log(result);
// segmentation = result;
// sketch.background(0);
// // sketch.image(video, 0, 0, sketch.width, sketch.height)
// sketch.image(segmentation.backgroundMask, 0, 0, sketch.width, sketch.height)
// bodypix.segment(gotResults, options)
// }
// }
// let myp5 = new p5(s, document.getElementById('p5sketch'));
//reference https://gist.github.com/lmccart/2273a047874939ad8ad1
//https://github.com/auduno/clmtrackr
//https://editor.p5js.org/p5/sketches/Sound:_Manipulate_Sound