I'm looking to detect whether or not an individual in an image has their eyes closed. To do so, I'm using Face-Api.js. I kept noticing that whichever image I used, the results were fairly similar when comparing completely shut, vs completely open. So I thought I'd draw circles around the eyes to double-check it's finding the eyes correctly... and this is what happens:

As you can see, it doesn't seem to be finding the eyes correctly which could be the issue here.
To explain my code: The function calculates the Eye Aspect Ratio (EAR) for both the left and right eyes using the calculateEyeAspectRatio function. This is to measure if the eyes are closed or open. The code then compares the calculated EAR values of the left and right eyes with a defined threshold (EAR_THRESHOLD). If both eyes have an EAR value below this threshold, it considers the eyes to be closed (hasClosedEyes = true).
async function detectFaces(image) {
await faceapi.nets.ssdMobilenetv1.loadFromUri('/models');
await faceapi.nets.faceLandmark68Net.loadFromUri('/models');
await faceapi.nets.faceRecognitionNet.loadFromUri('/models');
const faces = await faceapi.detectAllFaces(image);
let hasClosedEyes = false;
if (faces.length > 0) {
const landmarks = await faceapi.detectFaceLandmarks(image);
const leftEye = landmarks.getLeftEye();
const rightEye = landmarks.getRightEye();
// Create a canvas overlay on top of the image
const canvas = document.createElement('canvas');
canvas.width = image.width;
canvas.height = image.height;
// Append the canvas to the same parent as the image
image.parentElement.appendChild(canvas);
// Position the canvas over the image using absolute positioning
canvas.style.position = 'absolute';
canvas.style.top = image.offsetTop + 'px';
canvas.style.left = image.offsetLeft + 'px';
// Get the 2D context
const ctx = canvas.getContext('2d');
ctx.drawImage(image, 0, 0);
// Visualize the eye landmarks on the canvas overlay
ctx.strokeStyle = '#FF0000'; // Red color
ctx.lineWidth = 2;
// Draw lines around the eyes
const eyeContour = [...leftEye, ...rightEye];
// Draw lines connecting eye landmarks
for (let i = 0; i < eyeContour.length - 1; i++) {
ctx.beginPath();
ctx.moveTo(eyeContour[i]._x, eyeContour[i]._y);
ctx.lineTo(eyeContour[i + 1]._x, eyeContour[i + 1]._y);
ctx.stroke();
}
const leftEAR = calculateEyeAspectRatio(leftEye);
const rightEAR = calculateEyeAspectRatio(rightEye);
console.log("Left Eye EAR:", leftEAR);
console.log("Right Eye EAR:", rightEAR);
// Define a threshold to determine if eyes are closed
const EAR_THRESHOLD = 0.2;
if (leftEAR < EAR_THRESHOLD && rightEAR < EAR_THRESHOLD) {
hasClosedEyes = true;
}
}
return {
hasClosedEyes
};
}
// Calculate eye aspect ratio
function calculateEyeAspectRatio(eyeLandmarks) {
const eyeWidth = Math.sqrt(
Math.pow(eyeLandmarks[3]._x - eyeLandmarks[0]._x, 2) +
Math.pow(eyeLandmarks[3]._y - eyeLandmarks[0]._y, 2)
);
const eyeHeight1 = Math.sqrt(
Math.pow(eyeLandmarks[1]._x - eyeLandmarks[5]._x, 2) +
Math.pow(eyeLandmarks[1]._y - eyeLandmarks[5]._y, 2)
);
const eyeHeight2 = Math.sqrt(
Math.pow(eyeLandmarks[2]._x - eyeLandmarks[4]._x, 2) +
Math.pow(eyeLandmarks[2]._y - eyeLandmarks[4]._y, 2)
);
const EAR = (eyeHeight1 + eyeHeight2) / (2 * eyeWidth);
alert(EAR);
return EAR;
}
I'm not entirely sure what I'm doing wrong, I should note that all the Face-Api.js models found here are loading just fine. Any help would be highly appreciated.
*It should be noted that the code won't run on StackOverflow due to lack of Models inclusion.