How do I merge images and an audio file into a single video?

106 Views Asked by At

I am creating a web application using next js. I want to create a video by combining three images and an audio track in such a way that each image is displayed for an equal duration that collectively matches the length of the audio. It will all happen locally on the browser.

This is my code for converting images and audio into a video.

import {FFmpeg} from '@ffmpeg/ffmpeg';
import { fetchFile, toBlobURL } from '@ffmpeg/util';


export async function createVideo(ImageFiles, audioFile) {

  try {
    const baseURL = 'https://unpkg.com/@ffmpeg/[email protected]/dist/umd';
    const ffmpeg = new FFmpeg({ log: true});

    console.log('Loading ffmpeg core');
    await ffmpeg.load({
      corePath: await toBlobURL(`${baseURL}/ffmpeg-core.js`, 'text/javascript'),
      wasmPath: await toBlobURL(`${baseURL}/ffmpeg-core.wasm`, 'application/wasm'),
    });
    await ffmpeg.load();
    console.log('Finished loading ffmpeg core');

    for (let i = 0; i < ImageFiles.length; i++) {
      ffmpeg.writeFile(
        `image${i+1}.jpg`,
        await fetchFile(ImageFiles[i].imageUrl)
      );
    }

    ffmpeg.FS('writeFile', 'audio.mp3', await fetchFile(audioFile));


    const durationPerImage = (await getAudioDuration(ffmpeg, 'audio.mp3')) / ImageFiles.length;
    let filterComplex = '';
    for (let i = 0; i < ImageFiles.length - 1; i++) {filterComplex += `[${i}:v]trim=duration=${durationPerImage},setpts=PTS-STARTPTS[v${i}]; `;
    }
    filterComplex += `${ImageFiles.slice(0, -1).map((_, i) => `[v${i}]`).join('')}concat=n=${ImageFiles.length - 1}:v=1:a=0,format=yuv420p[v];`;

    await ffmpeg.run(
      '-framerate', '1', '-loop', '1', '-t', durationPerImage, '-i', 'image%d.jpg', '-i', 'audio.mp3',
      '-filter_complex', filterComplex, '-map', '[v]', '-map', '1:a',
      '-c:v', 'libx264', '-tune', 'stillimage', '-c:a', 'aac', '-b:a', '192k', 'output.mp4'
    );

    const data = ffmpeg.FS('readFile', 'output.mp4');

    const videoURL = URL.createObjectURL(new Blob([data.buffer], { type: 'video/mp4' }));
    return videoURL;
  } catch (error) {
    console.error('Error creating video:', error);
    throw new Error('Failed to create video');
  }
}

async function getAudioDuration(ffmpeg, audioFilename) {
  await ffmpeg.run('-i', audioFilename, '-show_entries', 'format=duration', '-of', 'default=noprint_wrappers=1:nokey=1', 'duration.txt');
  const data = ffmpeg.FS('readFile', 'duration.txt');
  const durationString = new TextDecoder().decode(data);
  const duration = Math.floor(parseFloat(durationString.trim())); 
  return duration;
}

I am getting this error:

CreateVideo.js:65  Error creating video: RuntimeError: Aborted(LinkError: WebAssembly.instantiate(): Import #70 module="a" function="qa": function import requires a callable). Build with -sASSERTIONS for more info.

Can someone help me with this?

1

There are 1 best solutions below

0
On

Maybe this can help. This is how I do it in bash:

#!/bin/bash

# Image as video.
# Usage: ./img4vid.sh image.png audio.aac 720
# $1 = PNG/JPG      $2 = M4A/MP3        $3 = Height (px)

ffmpeg -i "$1" -i "$2" \
    -loop 1 -vf "scale='min($3,iw)':-2,format=yuv420p" \
    -c:v libx264 -preset medium -profile:v main \
    -c:a aac -shortest -movflags +faststart \
    output.mp4