I'm using Azure's cognitive service Face API to analyze images. I'd gotten it working earlier using url links to images without issue, but I'm trying to get it working with local images. I'd found a question asked that's exactly what I'm looking for with a request.post
, but I'm trying to get it working with axios.
My code:
"use strict";
const axios = require("axios").default;
const fs = require("fs");
let file = "localFile.jpg";
const imageBuffer = fs.readFileSync(file, { encoding: "base64" });
// Add a valid subscription key and endpoint to your environment variables.
var subscriptionKey = "<Subscription key>";
var urlApi=
"https://.../face/v1.0/detect";
// Send a POST request
axios({
method: "post",
url: urlApi,
params: {
returnFaceId: true,
returnFaceLandmarks: false,
returnFaceAttributes:
"age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,occlusion,accessories,blur,exposure,noise",
},
headers: {
"Content-Type": "application/octet-stream",
"Ocp-Apim-Subscription-Key": subscriptionKey,
"Content-Length": imageBuffer.length,
},
data: imageBuffer,
})
.then((response) => {
console.log("Status text: " + response.status);
console.log("Status text: " + response.statusText);
console.log();
//console.log(response.data)
response.data.forEach((face) => {
console.log("Face ID: " + face.faceId);
console.log("Smile: " + face.faceAttributes.smile);
console.log(
"Head pose: " + JSON.stringify(face.faceAttributes.headPose)
);
console.log("Gender: " + face.faceAttributes.gender);
console.log("Age: " + face.faceAttributes.age);
console.log(
"Facial hair: " + JSON.stringify(face.faceAttributes.facialHair)
);
console.log("Glasses: " + face.faceAttributes.glasses);
console.log("Smile: " + face.faceAttributes.smile);
console.log(
"Emotion: " + JSON.stringify(face.faceAttributes.emotion)
);
console.log("Blur: " + JSON.stringify(face.faceAttributes.blur));
console.log(
"Exposure: " + JSON.stringify(face.faceAttributes.exposure)
);
console.log("Noise: " + JSON.stringify(face.faceAttributes.noise));
console.log(
"Makeup: " + JSON.stringify(face.faceAttributes.makeup)
);
console.log(
"Accessories: " +
JSON.stringify(face.faceAttributes.accessories)
);
console.log("Hair: " + JSON.stringify(face.faceAttributes.hair));
console.log();
});
})
.catch(function (error) {
console.log(error);
});
I was expecting to get a similar output in a json format, but instead I'm getting a status: 400, statusText: 'Bad Request'
and I'm not sure why. Any help on this would be appreciated.
I've also tried
const imageBuffer = fs.readFileSync(file);
and
const options = [
"returnFaceId=true",
"returnFaceLandmarks=true",
"returnFaceAttributes=age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,accessories",
];
urlApi = urlApi.concat("?", options.join("&"));
axios.post(urlApi,
{ body: imageBuffer},
{ headers: {
"Content-Type": "application/octet-stream",
"Ocp-Apim-Subscription-Key": subscriptionKey,
"Content-Length": imageBuffer.length,
},
)
Detailed error response: (At the request of Jim Xu)
response: {
status: 400,
statusText: 'Bad Request',
headers: {
'transfer-encoding': 'chunked',
'content-type': 'application/json; charset=utf-8',
'x-envoy-upstream-service-time': '8',
'apim-request-id': 'cb7f0380-a785-49f5-94fe-ff5ed0e36742',
'strict-transport-security': 'max-age=31536000; includeSubDomains; preload',
'x-content-type-options': 'nosniff',
'csp-billing-usage': 'CognitiveServices.Face.Transaction=1',
date: 'Sun, 20 Sep 2020 13:55:45 GMT',
connection: 'close'
}
If you want to run
Axios
in the node environment, we should useStream, Buffer
as request body data. For more details, please refer to here.For example