Node.js upload Image Stream.Readable to S3

517 Views Asked by At

My lambda is triggered by a request from the browser. The browser sends an image as multipart/form-data.

The lambda uses busboy to parse the request:

function parseForm(event: IHttpEvent) {
  return new Promise(
    (resolve, reject) => {
      const busboy = new Busboy({
        headers: event.headers,
        limits: { files: 10 },
      });
      const imageResponse = new Map<string, IImageParseResponse>();

      busboy.on("file", (id, file, filename, encoding, mimeType) => {
           imageResponse.set(id, { file, filename, mimeType });
      });

      busboy.on("error", (error) => reject(`Parse error: ${error}`));
      busboy.on("finish", () => resolve(imageResponse));

      busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
      busboy.end();
    }
  );
}

When I parsed the request I want to upload the file to AWS S3.

export async function handler(event: IHttpEvent) {
  var res = await parseForm(event);
  const s3 = new S3Client({ region: "eu-central-1" });
  for (const [k, v] of res) {
    console.log(`File ${v.filename} ${v.mimeType} streaming`);
    const stream = new Readable().wrap(v.file);
    const upload = new Upload({
      client: s3,
      params: {
        Key: v.filename,
        Bucket: "my-image-bucket",
        Body: stream,
        ContentType: v.mimeType,
      },
    });
    upload.on("httpUploadProgress", (p) => console.log(p));
    const result = await upload.done();
    console.log(result);
    return result;
  }
}

This does not work. However the Browser will receive a 200 OK with a null body response. What confuses me even more is that console.log(result); does not log anything to console.

Where is my mistake? I dont't fully understand the mechanics of streams. But as far as I understand it will be more memory-efficient. In the future I plan to upload multiple images at once. And in order to save cost I want my method to be as efficient as possible.

1

There are 1 best solutions below

0
On BEST ANSWER

In general I did 2 mistakes.

  1. Tried to upload the stream when it was already read to the end by busboy
  2. I did not properly wait for the completion of the upload to s3 before terminating the function.

In the end i ended up with the following:

const s3 = new S3Client({ region: "eu-central-1" });
const { BUCKET_NAME, MAX_IMAGE_SIZE } = process.env;

export async function handler(event: IHttpEvent) {
  const results = await parseForm(event);
  const response = [];
  for (const r of results) {
    if (r.status === "fulfilled") {
      const value: any = r.value.result;
      response.push({
        id: r.value.id,
        key: value.Key,
        url: value.Location,
      });
    }
    if (r.status === "rejected")
      response.push({ id: r.reason.id, reason: r.reason.error });
  }
  return response;
}

async function doneHandler(
  id: string,
  uploadMap: Map<string, Upload>
): Promise<{ id: string; result: ServiceOutputTypes }> {
  try {
    var result = await uploadMap.get(id).done();
  } catch (e: any) {
    var error = e;
  } finally {
    uploadMap.delete(id);
    if (error) throw { id, error };
    return { id, result };
  }
}

function parseForm(event: IHttpEvent) {
  return new Promise( (resolve, reject) => {
      const busboy = new Busboy({
        headers: event.headers,
        limits: { files: 1, fileSize: parseInt(MAX_IMAGE_SIZE) },
      });

      const responses: Promise<{
        id: string;
        result: ServiceOutputTypes;
      }>[] = [];
      const uploads = new Map<string, Upload>();

      busboy.on("file", (id, file, filename, encoding, mimeType) => {
        uploads.set(
          id,
          new Upload({
            client: s3,
            params: {
              Bucket: BUCKET_NAME,
              Body: new Readable().wrap(file),
              Key: filename,
              ContentType: mimeType,
              ContentEncoding: encoding,
            },
          })
        );
        responses.push(doneHandler(id, uploads));

        file.on("limit", async () => {
          const aborts = [];
          for (const [k, upload] of uploads) {
            aborts.push(upload.abort());
          }
          await Promise.all(aborts);
          return reject(new Error("File is too big."));
        });
      });

      busboy.on("error", (error: any) => {
        reject(new Error(`Parse error: ${error}`));
      });
      busboy.on("finish", async () => {
        const res = await Promise.allSettled(responses);
        resolve(res);
      });

      busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
      busboy.end();
    }
  );
}

This solution also handles file-limits and tries to abort all pending uploads to S3