I'm trying to create a simple route, where you can provide a key and a bucketname of an S3 bucket, and it will download into an archive all the files. This seems to be downloading everything (the final size is reasonable) but the archive is invalid. Not sure if I'm not closing it right or if I'm missing something else. Also, I can't seem to find a way to set the name "filepath/filename" of the zipped item.
Is the only solution to use something like the archiver module? I'm trying to use as little as possible the server resources and the browser memory.
if (process.env.NODE_ENV !== 'production') require('dotenv').config()
const { ListObjectsCommand, GetObjectCommand, S3Client } = require('@aws-sdk/client-s3');
const s3 = new S3Client();
const app = require('express')();
app.get('/download', async (req, res) => {
res.setMaxListeners(40)
res.setHeader('Content-Disposition', 'attachment; filename="archive.zip"');
res.setHeader('Content-Type', 'application/zip');
try {
const key = req.query.key
const bucketName = req.query.bucketName
let params = {
Bucket: bucketName,
Prefix: key,
};
while (true) {
const listCommand = new ListObjectsCommand(params);
const listResponse = await s3.send(listCommand);
for (const object of listResponse.Contents) {
const fileParams = {
Bucket: bucketName,
Key: object.Key,
};
const getCommand = new GetObjectCommand(fileParams);
const getResponse = await s3.send(getCommand);
// Pipe the S3 stream directly to the response stream
getResponse.Body.pipe(res, { end: false });
}
if (!listResponse.IsTruncated) break;
params.Marker = listResponse.NextMarker;
}
}
catch (error) {
console.error('Error streaming files:', error);
res.status(500).send({ error: 'Internal server error' });
}
res.end();
});
app.listen(3000, () => {
console.log('Server is running on port 3000');
});