how to store detected emotion value of the faceapi into a variable

146 Views Asked by At

I am currently writing a prototype where If video plays then I would like to store user's emotion while watching the video. I am using the faceapi.js with the predefined models. below is my code

index.html

<html>
    <head>
        <title>Video detecting and emotion storing in database</title>
        <script
            type="text/css"
            defer
            src="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css"
        ></script>
        <script type="text/css" src="style.css"></script>
        <script
            defer
            type="text/javascript"
            src="script/face-api.min.js"
        ></script>
        <script defer type="text/javascript" src="script/script.js"></script>
        <style>
            #livecam {
                height: 240px;
                width: 320px;
                float: right;
                position: absolute;
                display: flex;
                float: right;
            }
            canvas {
                position: absolute;
                float: right;
            }
        </style>
    </head>
    <body>
        <div class="container">
            <video
                id="myvideo"
                src="laravel.mp4"
                controls
                height="400"
                width="600"
            ></video>

            <video id="livecam" autoplay muted height="240" width="320"></video>
        </div>
    </body>
</html>

Script.js

var video = document.getElementById("myvideo");
const liveview = document.getElementById("livecam");
var modeloadedstatus = null;
let canvas;

//loading the all models
Promise.all([
    faceapi.nets.tinyFaceDetector.loadFromUri("/models"),
    faceapi.nets.faceLandmark68Net.loadFromUri("/models"),
    faceapi.nets.faceRecognitionNet.loadFromUri("/models"),
    faceapi.nets.faceExpressionNet.loadFromUri("/models"),
]).then(function () {
    modeloadedstatus = true;
});

//enable the camera and check also all models are loaded or not
video.onplaying = function () {
    if (modeloadedstatus) {
        enablewebcam();
    } else {
        alert("Error in model loading");
    }
};

//video pause and disbale the camera
video.onpause = function () {
    disablecam();
};

//enable the camera
function enablewebcam() {
    navigator.mediaDevices
        .getUserMedia({ video: { width: 600, height: 400 }, audio: false })
        .then((stream) => {
            liveview.srcObject = stream;
        });
}

//disbale camera function
function disablecam() {
    liveview.srcObject = null;
}

liveview.addEventListener("play", () => {
    const canvas = faceapi.createCanvasFromMedia(liveview);
    document.body.append(canvas);
    const displaySize = { width: liveview.width, height: liveview.height };
    faceapi.matchDimensions(canvas, displaySize);
    setInterval(async () => {
        const detections = await faceapi
            .detectAllFaces(liveview, new faceapi.TinyFaceDetectorOptions())
            .withFaceExpressions();
        if (detections.length == 1) {
            console.log(detections[0].expressions);
        } else {
            console.log("No face found");
        }
        const resizeddetections = faceapi.resizeResults(
            detections,
            displaySize
        );
        canvas.getContext("2d").clearRect(0, 0, canvas.width, canvas.height);
        faceapi.draw.drawDetections(canvas, resizeddetections);
        faceapi.draw.drawFaceExpressions(canvas, resizeddetections);
    }, 100);
});

In my detections I am getting the object values

[
    {
        "detection": {
            "_imageDims": {
                "_width": 600,
                "_height": 400
            },
            "_score": 0.9888063469609896,
            "_classScore": 0.9888063469609896,
            "_className": "",
            "_box": {
                "_x": 151.11491276957094,
                "_y": 82.07477362841183,
                "_width": 211.3173156083444,
                "_height": 190.0989689704312
            }
        },
        "expressions": {
            "neutral": 0.998404860496521,
            "happy": 0.0006479635485447943,
            "sad": 6.173863198455365e-7,
            "angry": 0.0009338973904959857,
            "fearful": 5.859014162723497e-9,
            "disgusted": 7.676168252146454e-7,
            "surprised": 0.000011679761882987805
        }
    }
]

now all though The value of my expression is Neutral how can i get the value of neutral because it will show the expression which is close to 1 So my problem is I want to store the emotion in a variable

All though if I WILL TRY THIS detections[0].expression

I am getting all values of expression. but I want only that value which is predicted by model.

0

There are 0 best solutions below