Recording and uploading a wav to Amazon S3

816 Views Asked by At

I want a user to be able to record and upload a .wav to an S3 bucket. Using this, I am able to achieve this, working correctly, as a .webm. file. I am now trying to adapt this to use a MediaRecorder bolt-on that allows the support of .wav files. So I am now trying to integrate that code with RecordRTC which adds .wav support to MediaRecorder.

This functionality essentially works, in that I end up with a .wav file in my Amazon S3 bucket, but the file is corrupted. I think the main place of concern is in the callback function for ondataavailable (a lot of the code afterwards probably can be ignored, but is there just in case). The line console.log(blob); in the following code shows that the blob type is audio/webm.

Any ideas how this can be fixed?

Edit: The resulting file is actually a .webm file, according to link, not a .wav after all! So why not? (However, my computer still shows it as a .wav in the File Inspector)

function isConstructor(obj) {
  return !!obj.prototype && !!obj.prototype.constructor.name;
}

class AudioStream {

  constructor(region, IdentityPoolId, audioStoreWithBucket) {
    this.region = region; //s3 region
    this.IdentityPoolId = IdentityPoolId; //identity pool id
    this.bucketName = audioStoreWithBucket; //audio file store
    this.s3; //variable defination for s3
    this.dateinfo = new Date();
    this.timestampData = this.dateinfo.getTime(); //timestamp used for file uniqueness
    this.etag = []; // etag is used to save the parts of the single upload file
    this.recordedChunks = []; //empty Array
    this.booleanStop = false; // this is for final multipart complete
    this.incr = 0; // multipart requires incremetal so that they can merge all parts by ascending order
    this.filename = this.timestampData.toString() + ".wav"; //unique filename
    this.uploadId = ""; // upload id is required in multipart
    this.recorder; //initializing recorder variable

    this.audioConstraints = {
      audio: true
    };
  }

  audioStreamInitialize() {

      var self = this;
    AWS.config.region = self.region;
    AWS.config.credentials = new AWS.CognitoIdentityCredentials({
      IdentityPoolId: self.IdentityPoolId,
    });


      self.s3 = new AWS.S3();

      navigator.mediaDevices.getUserMedia(self.audioConstraints)
    .then(function(stream) {

                self.recorder = RecordRTC(stream, {
                      type: 'audio',
                      mimeType: 'audio/wav',
                      recorderType: MediaStreamRecorder,
                      disableLogs: true,
                      // get intervals based blobs
                      // value in milliseconds
                      timeSlice: 1800000,
                      // requires timeSlice above
                      // returns blob via callback function
                      ondataavailable: function(blob) {
                        console.log("ondata!")

         var normalArr = [];
        /*
          Here we push the stream data to an array for future use.
        */
        console.log(blob);
          self.recordedChunks.push(blob);
        normalArr.push(blob);

        /*
          here we create a blob from the stream data that we have received.
        */
          var bigBlob = new Blob(normalArr, {
            type: 'audio/wav'
          });


          /*
            if the length of recordedChunks is 1 then it means its the 1st part of our data.
          So we createMultipartUpload which will return an upload id.
          Upload id is used to upload the other parts of the stream

          else.
          It Uploads a part in a multipart upload.
          */
            if (self.recordedChunks.length == 1) {
              self.startMultiUpload(bigBlob, self.filename)
            } else {
              /*
                self.incr is basically a part number.
              Part number of part being uploaded. This is a positive integer between 1 and 10,000.
              */
                self.incr = self.incr + 1
              self.continueMultiUpload(bigBlob, self.incr, self.uploadId, self.filename, self.bucketName);
            }

                      } // end ondataavailable
                  });

                /*
                    Called to handle the dataavailable event, which is periodically triggered each time timeslice milliseconds of media have been recorded
                    (or when the entire media has been recorded, if timeslice wasn't specified).
    The event, of type BlobEvent, contains the recorded media in its data property.
    You can then collect and act upon that recorded media data using this event handler.
    */

    });
  }

  disableAllButton() {
    //$("#formdata button[type=button]").attr("disabled", "disabled");
  }

  enableAllButton() {
    //$("#formdata button[type=button]").removeAttr("disabled");
  }

  /*
    The MediaRecorder method start(), which is part of the MediaStream Recording API,
  begins recording media into one or more Blob objects.
  You can record the entire duration of the media into a single Blob (or until you call requestData()),
  or you can specify the number of milliseconds to record at a time.
  Then, each time that amount of media has been recorded, an event will be delivered to let you act upon the recorded media,
  while a new Blob is created to record the next slice of the media
  */
    startRecording(id) {
      var self = this;
      //self.enableAllButton();
      //$("#record_q1").attr("disabled", "disabled");
      /*
        1800000 is the number of milliseconds to record into each Blob.
      If this parameter isn't included, the entire media duration is recorded into a single Blob unless the requestData()
            method is called to obtain the Blob and trigger the creation of a new Blob into which the media continues to be recorded.
        */
        /*
        PLEASE NOTE YOU CAN CHANGE THIS PARAM OF 1800000 but the size should be greater then or equal to 5MB.
        As for multipart upload the minimum breakdown of the file should be 5MB
        */
        //this.recorder.start(1800000);
        this.recorder.startRecording();
        Shiny.setInputValue("timecode", self.filename);

    }


    stopRecording(id) {
        var self = this;
        self.recorder.stopRecording();

    }


    pauseRecording(id) {
        var self = this;
        self.recorder.pauseRecording();
        //$("#pause_q1").addClass("hide");
        //$("#resume_q1").removeClass("hide");
    }



    resumeRecording(id) {
        var self = this;
        self.recorder.resumeRecording();
        //$("#resume_q1").addClass("hide");
        //$("#pause_q1").removeClass("hide");
    }

    /*
        Initiates a multipart upload and returns an upload ID.
        Upload id is used to upload the other parts of the stream
    */
    startMultiUpload(blob, filename) {
        var self = this;
        var audioBlob = blob;
        var params = {
            Bucket: self.bucketName,
            Key: filename,
            ContentType: 'audio/wav',
            ACL: 'private',
        };
        self.s3.createMultipartUpload(params, function(err, data) {
            if (err) {
                console.log(err, err.stack); // an error occurred
            } else {
                self.uploadId = data.UploadId
                self.incr = 1;
                self.continueMultiUpload(audioBlob, self.incr, self.uploadId, self.filename, self.bucketName);
            }
        });
    }


    continueMultiUpload(audioBlob, PartNumber, uploadId, key, bucketName) {
        var self = this;
        var params = {
            Body: audioBlob,
            Bucket: bucketName,
            Key: key,
            PartNumber: PartNumber,
            UploadId: uploadId
        };
        console.log(params);
        self.s3.uploadPart(params, function(err, data) {
            if (err) {
                console.log(err, err.stack)
            } // an error occurred
            else {
                /*
                    Once the part of data is uploaded we get an Entity tag for the uploaded object(ETag).
                    which is used later when we complete our multipart upload.
                */
                self.etag.push(data.ETag);
                if (self.booleanStop === true) {
                    self.completeMultiUpload();
                }
            }
        });
    }


    /*
        Completes a multipart upload by assembling previously uploaded parts.
    */
    completeMultiUpload() {
        var self = this;
        var outputTag = [];
        /*
            here we are constructing the Etag data in the required format.
        */
        self.etag.forEach((data, index) => {
            const obj = {
                ETag: data,
                PartNumber: ++index
            };
            outputTag.push(obj);
        });

        var params = {
            Bucket: self.bucketName, // required
            Key: self.filename, // required
            UploadId: self.uploadId, // required
            MultipartUpload: {
                Parts: outputTag
            }
        };

        self.s3.completeMultipartUpload(params, function(err, data) {
            if (err) {
                console.log(err, err.stack);
            } // an error occurred
            else {
                // initialize variable back to normal
                self.etag = [], self.recordedChunks = [];
                self.uploadId = "";
                self.booleanStop = false;
                //self.disableAllButton();
                self.removeLoader();
                console.log("sent!");
            }
        });
    }


    /*
        set loader
    */
    setLoader() {
        //$("#kc-container").addClass("overlay");
        //$(".preloader-wrapper.big.active.loader").removeClass("hide");
    }


    /*
        remove loader
    */
    removeLoader() {
       // $("#kc-container").removeClass("overlay");
        //$(".preloader-wrapper.big.active.loader").addClass("hide");
    }

    getFilename() {
        return this.filename;
    }

}
0

There are 0 best solutions below