Using a separate AudioContext / scriptProcessor Node in Wavesurfer

882 Views Asked by At

I'm attempting to use a separate context/script processor from Wavesurfer's default so I can manipulate the pitch of the audio independent of playback rate. When I attempt to give the context/script processor as parameters, and playback the audio, I don't get any sound.

My Waveform component:

const playbackEngine = new PlaybackEngine({
                emitter: emitter,
                pitch: pitch,
        });

const Waveform = WaveSurfer.create({
            audioContext: playbackEngine.context,
            audioScriptProcessor: playbackEngine.scriptProcessor,
            barWidth: 1,
            cursorWidth: 1,
            pixelRatio: 1,
            container: '#audio-spectrum',
            progressColor: '#03a9f4',
            height: 100,
            normalize: true,
            responsive: true,
            waveColor: '#ccc',
            cursorColor: '#4a74a5'
        });

// called in ComponentDidMount()

function loadMediaUrl(url) {
    var request = new XMLHttpRequest();
    request.open('GET', url, true);
    request.responseType = 'arraybuffer';

    // Decode asynchronously
    request.onload = async function() {
        let buffer = request.response;

        // sets arrayBuffer for Playback Engine
        const audioBuff =  await playbackEngine.decodeAudioData(buffer, (error) => {
                        console.error(`Error decoding audio:`, error);
        });

        // sets audioBuffer for Wavesurfer to render Waveform (where I believe the problem 
        // begins)
        Waveform.loadDecodedBuffer(audioBuff);

        // sets audioBuffer for Playback Engine to playback audio
        playbackEngine.setBuffer(audioBuff);
    }
    request.send();
}

Playback.js


const {SimpleFilter, SoundTouch} = require('./soundtouch');

const BUFFER_SIZE = 4096;

class PlaybackEngine {
    constructor({emitter, pitch}) {
        this.emitter = emitter;
        this.context = new (window.AudioContext || window.webkitAudioContext);
        this.scriptProcessor = this.context.createScriptProcessor(BUFFER_SIZE, 2, 2);

        this.scriptProcessor.onaudioprocess = e => {
            const l = e.outputBuffer.getChannelData(0);
            const r = e.outputBuffer.getChannelData(1);
            const framesExtracted = this.simpleFilter.extract(this.samples, BUFFER_SIZE);
            if (framesExtracted === 0) {
                this.emitter.emit('stop');
            }
            for (let i = 0; i < framesExtracted; i++) {
                l[i] = this.samples[i * 2];
                r[i] = this.samples[i * 2 + 1];
            }
        };

        this.soundTouch = new SoundTouch();
        this.soundTouch.pitch = pitch;

        this.duration = undefined;
    }

    get pitch() {
        return this.soundTouch.pitch;
    }
    set pitch(pitch) {
        this.soundTouch.pitch = pitch;
    }

    decodeAudioData(data) {
        return this.context.decodeAudioData(data);
    }

    setBuffer(buffer) {
        const bufferSource = this.context.createBufferSource();
        bufferSource.buffer = buffer;
        this.samples = new Float32Array(BUFFER_SIZE * 2);
        this.source = {
            extract: (target, numFrames, position) => {
                this.emitter.emit('time', (position / this.context.sampleRate));
                const l = buffer.getChannelData(0);
                const r = buffer.getChannelData(1);
                for (let i = 0; i < numFrames; i++) {
                    target[i * 2] = l[i + position];
                    target[i * 2 + 1] = r[i + position];
                }
                return Math.min(numFrames, l.length - position);
            },
        };
        this.simpleFilter = new SimpleFilter(this.source, this.soundTouch);

        this.duration = buffer.duration;
        this.emitter.emit('duration', buffer.duration);
    }

    play() {
        this.scriptProcessor.connect(this.context.destination);
    }

    pause() {
        this.scriptProcessor.disconnect(this.context.destination);
    }

    seekPercent(percent) {
        if (this.simpleFilter !== undefined) {
            this.simpleFilter.sourcePosition = Math.round(
                percent / 100 * this.duration * this.context.sampleRate
            );
        }
    }
}

export default PlaybackEngine;


In this setup, with Waveform.play() I can cause playback from the wavesurfer instance but cannot manipulate the pitch. Similarly, with playbackEngine.play() I can manipulate the pitch but lose all Wavesurfer functionality.

Though I'm pretty sure the problem stems from Wavesurfer and my Playback Engine using two separate AudioBuffers, I need to set up the buffer in my playback context, as well as render the waveform with wavesurfer.

I'd like to see if anyone can confirm how to use the Playback Engine's context, script processor, and AudioBuffer to control the Wavesurfer instance (ie. having Waveform.play() play audio from the Playback Engine, as well as update the Wavesurfer UI).

All help is appreciated.

1

There are 1 best solutions below

0
On

So I ended up manually removing

audioScriptProcessor: playbackEngine.scriptProcessor,

from the Wavesurfer initialization, then attaching playbackEngine's script processor to the destinationNode manually. I had a previously attempt set up like this, and heard annoying popping sounds during playback. What I thought was annoying sample/buffer errors was actually coming from an EventEmitter instance I had constantly broadcasting time between the files. Removing that solved my noise issue (ツ)