xxxxxxxxxx
215
// This template provides a 3D interactive space with audio and video.
// It uses p5.js WebGL mode.
//
// For more information about WebGL Mode, see here: https://www.youtube.com/watch?v=nqiKWXUX-o8
//
// Press File > Duplicate to start editing your own version
let otherVideos = {};
let myVideo;
let myPos = {
x: 0,
y: -1, // -1 up, 1 is down
z: 0
};
let angle = 0.0;
let p5l;
let cam; // Camera p5.camera
let pebblesImg;
function preload() {
pebblesImg = loadImage('assets/pebbles.jpg');
}
function setup() {
createCanvas(800, 800, WEBGL);
// Create and set up our camera
cam = createCamera();
// Tell p5 to use this camera
setCamera(cam);
// Setting "perspective matrix"
// 4x4 matrix of numbers - position, rotation, - shift from 3d to 2d
// field of view, aspect ratio, camera near (plane), camera (far)
cam.perspective(PI / 3.0, width / height, 0.1, 50000);
// position in 3d space
cam.setPosition(0,-1,0);
// cam.move(0, -1, 0); // are these numbers reversed?
strokeWeight(0.2);
let constraints = {
audio: true,
video: true
};
myVideo = createCapture(constraints,
function(stream) {
p5l = new p5LiveMedia(this, "CAPTURE", stream, "Shared Space")
p5l.on('stream', gotStream);
p5l.on('disconnect', gotDisconnect);
p5l.on('data', gotData);
}
);
myVideo.elt.muted = true;
myVideo.hide();
}
function draw() {
// set background color
background(220, 230, 250);
// default lighting:
lights();
// add random objects to break up visual field
// let numObjects = 10;
// for (let i = 0; i < numObjects; i++){
// for (let j = 0; j < numObjects; j++){
// let scale = 4;
// let xOff = i * scale;
// let noiseAtPos = noise(i/4,j/4);
// let yOff = noiseAtPos;
// let zOff = j * scale;
// push();
// translate(-numObjects/2 * scale, 0, -numObjects/2 * scale)
// translate(xOff,yOff,zOff);
// ambientMaterial(noiseAtPos * 255,noiseAtPos * 255,noiseAtPos * 255);
// box(scale, 1, scale);
// pop();
// }
// }
// add simple ground
// have to push and pop
push();
// box has height of 1 so 1/2 of that
translate(0,0.5,0);
texture(pebblesImg)
box(200, 1, 200);
pop();
// out controls
let leftRightMove = 0,
upDownMove = 0,
forwardBackwardMove = 0;
if (keyIsDown(87)) {
forwardBackwardMove = -0.1;
}
if (keyIsDown(83)) {
forwardBackwardMove = 0.1;
}
if (keyIsDown(65)) {
leftRightMove = 0.01;
}
if (keyIsDown(68)) {
leftRightMove = -0.01;
}
// move the camera along its local axes
cam.move(0, 0, forwardBackwardMove);
cam.pan(leftRightMove);
myPos = {
x: (cam.eyeX), // There is no x, y, z
y: (cam.eyeY),
z: (cam.eyeZ)
};
// print(myPos);
if (p5l){
p5l.send(JSON.stringify(myPos));
}
// display ourselves
// push();
// translate(myPos.x, myPos.y, myPos.z);
// texture(myVideo);
// box(1, 1, 1);
// pop();
let count = 1;
for (const id in otherVideos) {
push();
translate(otherVideos[id].position.x, otherVideos[id].position.y, otherVideos[id].position.z);
// needs a rotate - something we have to send through
texture(otherVideos[id].stream);
box(5, 5,5);
pop();
count++;
}
}
// We got a new stream!
function gotStream(stream, id) {
stream.hide();
otherVideos[id] = {
"stream": stream,
"position": {
"x": 0,
"y": 0
}
};
}
// function mousePressed() {
// myPos = {
// x: (mouseX - width / 2),
// y: (mouseY - height / 2)
// };
// print(myPos);
// p5l.send(JSON.stringify(myPos));
// }
function mouseDragged() {
let scaleFactor = 0.01;
let deltaX = pmouseX - mouseX;
let deltaY = pmouseY - mouseY;
cam.pan(deltaX * scaleFactor);
cam.tilt(-deltaY * scaleFactor);
// myPos = {
// x: (mouseX - width / 2),
// y: (mouseY - height / 2)
// };
// print(myPos);
// p5l.send(JSON.stringify(myPos));
}
function gotData(pos, id) {
// print(pos);
otherVideos[id].position = JSON.parse(pos);
}
function gotDisconnect(id) {
delete otherVideos[id];
}