xxxxxxxxxx
497
// video cap
let vid;
let scl = 16;
// chat box interface
let userInput; // Input field for user messages, developer use only
let sendButton;
let chatLog = []; // Chat log to store messages
const apiKey =
"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"; // Open AI API
let sentUserMessage = {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: "Bearer " + apiKey,
},
};
let userMessage = "";
// emotion detection
let emotions = [
"neutral",
"happy",
"sad",
"angry",
"fearful",
"disgusted",
"surprised",
];
let emotionsLevel = [1, 2, 3, 4, 5, 6, 7];
let faceapi;
let detections = [];
let storedFaces = []; // Stores recognized face landmarks
let respondedFaces = []; // Stores responded faces
let timer = 0;
let elapsedTime = 5; // 5 seconds gap for message sent to ChatGPT
// Standard Serial API Variables
let port;
let writer;
let reader;
let inData = 0;
// motor moving parameter
let centerFaceX;
let tolerance = 40 * 8 * 0.25; // tolerance needs to be changed based on the construction, the equation is tolerance = vid.width * scl * 0.25 (or other percent based on what's desired)
let direction = 0; // 1 is right, 0 is stop, 2 is left
function setup() {
let canvasWidth = 1600;
let canvasHeight = canvasWidth * 0.75;
createCanvas(canvasWidth, canvasHeight);
vid = createCapture(VIDEO);
vid.size(40, 30);
vid.hide();
// ml5 faceapi set up
const faceOptions = {
withLandmarks: true,
withExpressions: true,
withDescriptors: true,
};
faceapi = ml5.faceApi(vid, faceOptions, faceReady);
serial = new p5.WebSerial();
// Check available serial ports
serial.getPorts();
serial.on("noport", makePortButton);
serial.on("portavailable", openPort);
serial.on("requesterror", portError);
serial.on("data", serialEvent);
serial.on("close", makePortButton);
makePortButton();
/*
// Create input field
userInput = createInput();
userInput.position( width - 400, height - 50);
userInput.size(300);
// Create send button
sendButton = createButton("Send");
sendButton.position(userInput.x + userInput.width + 10, height - 50);
sendButton.mousePressed(sendMessage);
*/
}
function faceReady() {
faceapi.detect(gotFaces);
}
function gotFaces(error, result) {
if (error) {
console.log(error);
return;
}
detections = result;
if (detections.length > 0) {
// it only compares the largest face on the screen
let descriptor = detections[findLargestFace(detections)[1]].descriptor; // Unique 128D face vector
let match = findMatchingFace(descriptor);
if (match === null) {
storedFaces.push(descriptor);
console.log("New face stored! Total stored:", storedFaces.length);
} else {
console.log("Recognized a stored face!");
}
}
// remove old faces to clear memory
if (storedFaces.length > 10) {
for (let i; i < 9; i++) {
storedFaces.shift();
}
}
faceapi.detect(gotFaces);
console.log(detections);
}
function findMatchingFace(newDescriptor) {
for (let stored of storedFaces) {
let similarity = cosineSimilarity(stored, newDescriptor);
if (similarity > 0.9) {
// Threshold for recognition
return stored;
}
}
return null;
}
function findRespondedFace(newDescriptor) {
for (let responded of respondedFaces) {
let similarity = cosineSimilarity(responded, newDescriptor);
if (similarity > 0.9) {
// Threshold for recognition
return responded;
}
}
return null;
}
function cosineSimilarity(vec1, vec2) {
let dot = 0,
mag1 = 0,
mag2 = 0;
for (let i = 0; i < vec1.length; i++) {
dot += vec1[i] * vec2[i];
mag1 += vec1[i] ** 2;
mag2 += vec2[i] ** 2;
}
return dot / (Math.sqrt(mag1) * Math.sqrt(mag2));
}
function draw() {
background(220);
push();
fill(0, 0, 180);
rect(0, 0, width, height * 0.05);
fill(180);
rect(0, height * 0.05, width, height * 1 /15);
stroke(120);
strokeWeight(3);
fill(255);
rect(160, height * 0.05 + height * 1 / 30 - height * 0.05 / 2, width * 0.75, height * 0.05);
translate(width, 0);
scale(-1, 1); // Mirror the canvas horizontally
let offsetX = width / 2 + 50;
let offsetY = 80
// pixels
for (let i = 0; i < vid.width; i++) {
for (let j = 0; j < vid.height; j++) {
let val = vid.get(i, j);
let c = map(brightness(val), 0, 100, 0, 255);
fill(c);
noStroke();
rect(i * scl + offsetX, j * scl + height / 2 + offsetY, scl, scl);
}
}
pop();
centerFaceX = width / 2 - 30 - (vid.width * scl) / 2;
centerFaceY = height / 2 + offsetY;
// Show base center point (flipped)
fill(255, 0, 0);
ellipse(centerFaceX, centerFaceY, 10, 10);
push();
if (detections.length > 0) {
const largestFaceAlignedRect = findLargestFace(detections)[0];
const largestFaceIndex = findLargestFace(detections)[1];
// Adjust bounding box for flipped canvas
const box = largestFaceAlignedRect._box;
const flippedX = width - scl * box.x - scl * box.width; // Adjust X for flip
push();
noFill();
stroke(0, 255, 0);
rect(flippedX - offsetX, scl * box.y + height / 2 + offsetY, scl * box.width, scl * box.height);
pop();
fill("green");
let faceX = scl * box.x + (scl * box.width) / 2;
let flippedFaceX = width - faceX - offsetX;
if (flippedFaceX - centerFaceX > tolerance) {
direction = 2;
} else if (flippedFaceX - centerFaceX < -tolerance) {
direction = 1;
} else {
direction = 0;
}
let directionByte = new Uint8Array([direction]);
serial.write(directionByte);
console.log("Data sent:", direction);
// Show tracking point (flipped)
fill(255, 0, 0);
ellipse(flippedFaceX, height / 2 + offsetY, 10, 10);
const dominantEmotion = findDominantEmotion(largestFaceIndex);
/*
// for testing
for (k = 0; k < emotions.length; k++) {
var thisemotion = emotions[k];
var thisemotionlevel = detections[largestFaceIndex].expressions[thisemotion];
text(thisemotion + " value: " + thisemotionlevel, 40, 30 + 30 * k);
rect(40, 30 + 30 * k, thisemotionlevel * 100, 10);
}
*/
// final message sent to GPT
push();
textSize(20);
strokeWeight(3);
text(dominantEmotion, 40, 300);
pop();
let updatedTime = millis() / 1000;
if (updatedTime - timer > elapsedTime) {
timer = updatedTime;
if (dominantEmotion !== userMessage) {
let familiar;
userMessage = dominantEmotion; // Update the user message if the dominant emotion changes
// it only compares the largest face on the screen
let descriptor = detections[findLargestFace(detections)[1]].descriptor; // Unique 128D face vector
let match = findRespondedFace(descriptor);
if (match === null) {
familar = false;
respondedFaces.push(descriptor);
console.log(
"New face responded! Total responded:",
respondedFaces.length
);
} else {
familiar = true;
console.log("Recognized a responded face!");
}
// remove old faces to clear memory
if (respondedFaces.length > 10) {
for (let i; i < 9; i++) {
respondedFaces.shift();
}
}
sendMessage(familiar);
console.log("Message sent");
}
}
} else {
direction = 0;
}
// chat box interface
fill(255);
rect(width/2 + 50, height * 1 / 6, width * 3 / 8 + 10, vid.height * scl * 2);
fill(0);
// Set text size and wrapping width
textSize(20);
let textWrapWidth = width * 3 / 8 + 10 - 20; // Leave some padding on the sides
let y = height * 1/ 6 + 20;
for (const message of chatLog) {
// Calculate the height of the text block
let lines = ceil(textWidth(message) / textWrapWidth); // Number of lines
let messageHeight = lines * 30; // Approximate height of each line
// Display the message
text(message, width / 2 + 70, y, textWrapWidth);
// Increment y position for the next message
y += messageHeight + 5; // Add extra padding between messages
}
}
function sendMessage(familiar) {
if (userMessage === "") return; // Do nothing if the input is empty
chatLog.push("You: " + userMessage);
// Set a maximum chat log length
let maxMessages = 18; // Adjust this based on your UI preference
if (chatLog.length > maxMessages) {
chatLog.shift(); // Remove the oldest message to maintain size
}
let promt;
// Define custom emotion-based prompts
let emotionPrompts = {
neutral: "The user looks neutral. Respond in a calm and friendly manner.",
happy:
"The user looks happy. Respond in a warm and engaging way, asking what made them happy today.",
sad:
"The user looks sad. Respond in a compassionate and understanding way, offering a comforting message.",
angry:
"The user looks angry. Respond with empathy, acknowledging their frustration and offering to listen.",
fearful:
"The user looks fearful. Respond gently, reassuring them and encouraging them to feel safe.",
disgusted:
"The user looks disgusted. Respond with curiosity, asking what caused their reaction.",
surprised:
"The user looks surprised. Respond playfully, asking what shocked them so much.",
};
// Select the appropriate prompt based on detected emotion
if (!familiar) {
prompt =
"The person is new to you. Greet them first, but do not use Hi or Hello.";
} else {
prompt =
"You have seen this user just before. Their emotion has changed. Greet them first, and " +
emotionPrompts[userMessage];
}
sentUserMessage.body = JSON.stringify({
model: "gpt-4o-mini", // Use your valid model
messages: [
{
role: "developer",
content:
"You are a friendly and expressive AI that reacts to the user's emotions naturally, as if you're talking to a friend. Keep responses short and engaging.",
},
{
role: "user",
content: prompt,
},
],
max_tokens: 50, // Limit the response length
});
// Call OpenAI API
fetch("https://api.openai.com/v1/chat/completions", sentUserMessage)
.then((response) => {
if (!response.ok) {
throw new Error(`HTTP error! Status: ${response.status}`);
}
return response.json();
})
.then((data) => {
const botResponse = data.choices[0].message.content.trim();
chatLog.push("Bot: " + botResponse);
})
.catch((error) => {
console.error("Error:", error);
chatLog.push("Bot: Sorry, something went wrong.");
});
}
function findLargestFace(detections) {
let largestFace;
let maxArea = 0;
let largestFaceIndex = 0;
for (let i = 0; i < detections.length; i++) {
const { alignedRect } = detections[i];
const box = alignedRect._box;
const area = box.area;
// Update largestFace if this one is larger
if (area > maxArea) {
maxArea = area;
largestFace = alignedRect; // Store the largest face
largestFaceIndex = i; // Store the largest face index in detections
}
}
return [largestFace, largestFaceIndex];
}
function findDominantEmotion(largestFaceIndex) {
let dominantEmotion = "";
let maxEmotionLevel = 0;
const { expressions } = detections[largestFaceIndex];
const neutralLevel = expressions.neutral;
emotionsLevel[0] = neutralLevel;
const happyLevel = expressions.happy;
emotionsLevel[1] = happyLevel;
const sadLevel = expressions.sad;
emotionsLevel[2] = sadLevel;
const angryLevel = expressions.angry;
emotionsLevel[3] = angryLevel;
const fearfulLevel = expressions.fearful;
emotionsLevel[4] = fearfulLevel;
const disgustedLevel = expressions.disgusted;
emotionsLevel[5] = disgustedLevel;
const surprisedLevel = expressions.surprised;
emotionsLevel[6] = surprisedLevel;
for (let k = 0; k < emotionsLevel.length; k++) {
if (emotionsLevel[k] > maxEmotionLevel) {
maxEmotionLevel = emotionsLevel[k];
dominantEmotion = emotions[k];
}
}
return dominantEmotion;
}
function makePortButton() {
if (portButton) portButton.remove(); // Remove old button if it exists
portButton = createButton("Choose Serial Port");
portButton.position(10, 10);
portButton.size(150, 30);
portButton.style("font-size", "16px");
portButton.style("z-index", "1000");
portButton.mousePressed(choosePort);
}
// Open serial port when user selects
function choosePort() {
serial.requestPort();
}
// Open the selected port
function openPort() {
serial
.open()
.then(() => {
console.log("✅ Port opened successfully!");
if (portButton) portButton.hide();
})
.catch((err) => {
console.error("❌ Failed to open port:", err);
});
}
// Handle serial errors
function portError(err) {
alert("Serial port error: " + err);
}
// Read incoming data
function serialEvent() {
let received = serial.read();
if (received !== null) {
inData = received;
console.log("Received:", inData);
}
}
// Handle port disconnection
function closePort() {
serial.close();
console.log("Port closed.");
}