I need help implementing a Pause/Resume functionality to my file uploader. Currently the uploader works as intended, outside of poor internet conditions. We have had instances where an user's file would be uploaded to our upload bucket corrupted and broken. A way to alleviate this issue would to be implementing this functionality to the uploader. Our current section for multipart upload
instructorUpload.js
/**
* Begins AWS S3 Multipart Upload for object creation in S3
*
* @param {Object} i - File index
* @param {Number} key - Unique key for file upload
*/
function createMultipartUpload(i, key){
var formdata = new FormData();
formdata.append("command" , 'CreateMultipartUpload');
formdata.append("fileindex", i );
formdata.append("key" , key );
var xhr = request(false, "/api/UploadFile_v0/FileUploader.php", new XMLHttpRequest(), formdata);
xhr.onreadystatechange = function() {
if (xhr.readyState === 4)
uploadPart(JSON.parse(xhr.responseText), 1);
};
}
/**
* Upload indivdual part for AWS S3 Multipart Upload for object creation in S3
*
* @param {Object} sendBackData - Parsed JSON information receive after POST is complete
* @param {Number} partNum - File chunk index
*
* @returns
*/
function uploadPart(data, partNum){
var fileIndex = data['fileindex'];
var file = files[fileIndex];
if (partNum > Math.ceil(file.size / chunkSize)) {
completeMultipartUpload(data);
return;
}
var blobs = chunkFile(file, partNum);
var formdata = new FormData();
for(var i = 0; i < blobs.length; i++)
formdata.append("file[]", blobs[i]);
formdata.append("command", 'UploadPart');
formdata.append("uploadId", data['uploadId']);
formdata.append("key", data['key']);
formdata.append("partNumber", partNum);
var xhr = request(true, "/api/UploadFile_v0/FileUploader.php", new XMLHttpRequest(), formdata);
xhr.onreadystatechange = function() {
if (xhr.readyState === 4)
uploadPart(data, partNum + 1);
};
}
/**
* Completes Multipart Upload
*
* @param {Object} data - Parsed JSON information receive after POST is complete
*/
function completeMultipartUpload(data) {
var fileIndex = data['fileindex'];
var file = files[fileIndex];
var type = file.type.split("/")[0];
var formdata = new FormData();
var key = data['key'];
formdata.append("command" , 'CompleteMultipartUpload');
formdata.append("uploadId", data['uploadId']);
formdata.append("key", key);
var xhr = request(false, "/api/UploadFile_v0/FileUploader.php", new XMLHttpRequest(), formdata);
xhr.onreadystatechange = function() {
if (xhr.readyState === 4) {
var sendBackData = JSON.parse(xhr.responseText);
if(sendBackData['success']){
if(type === "video")
elasticTranscoder(data['key'], file.name);
var urlEmbed = "";
if(type === "video"){
urlEmbed = createEmbed((key.split('.')[0]+"/"+key.split("/")[key.split("/").length - 1]).split(".")[0]);
} else {
urlEmbed = '<img alt="Embed Imgage" height="100%" width="100%" src="' + sendBackData['url'] + '"/>';
}
directoryInsert(fileIndex, urlEmbed, type);
}
}
};
}
FileUpload.php
function s3Command($command){
switch ($command) {
case 'CreateMultipartUpload':
$fileindex = $_POST['fileindex'];
$key = preg_replace('/\s+/', '_', $_POST['key']);
$response = $GLOBALS["s3"]->createMultipartUpload(array(
'Bucket' => TMP_IMG,
'Key' => $key,
'ACL' => 'public-read'
));
$uploadId = $response['UploadId'];
sendJson(array(
'uploadId' => $uploadId,
'key' => $key,
'fileindex' => $fileindex
));
break;
case 'UploadPart':
$key = preg_replace('/\s+/', '_', $_POST['key']);
$tmp_files = $_FILES['file'];
$files = array();
for ($i = 0; $i < count($tmp_files['name']); $i++){
$files[] = array(
'name' => $tmp_files['name'][$i],
'tmp_name' => $tmp_files['tmp_name'][$i],
'type' => $tmp_files['type'][$i],
'size' => $tmp_files['size'][$i],
'error' => $tmp_files['error'][$i]
);
}
$body = mergeFiles($files);
$result = $GLOBALS["s3"]->uploadPart(array(
'Bucket' => TMP_IMG,
'Key' => $key,
'UploadId' => $_POST['uploadId'],
'PartNumber' => $_POST['partNumber'],
'Body' => $body
));
break;
case 'CompleteMultipartUpload':
$key = preg_replace('/\s+/', '_', $_POST['key']);
$partsModel = $GLOBALS["s3"]->listParts(array(
'Bucket' => TMP_IMG,
'Key' => $key,
'UploadId' => $_POST['uploadId']
));
$model = $GLOBALS["s3"]->completeMultipartUpload(array(
'Bucket' => TMP_IMG,
'Key' => $key,
'UploadId' => $_POST['uploadId'],
'Parts' => $partsModel['Parts'],
));
$url = $GLOBALS["s3"]->getObjectUrl(TMP_IMG, $key);
$tmp = explode('.', $key);
$type = getFileType(end($tmp));
if($type == 'image'){
$url = resizeImage($url);
}
else if($type != 'video'){
copyObject($key, FINISHED_IMG);
$url = $GLOBALS["s3"]->getObjectUrl(FINISHED_IMG, $key);
}
sendJson(array(
'success' => true,
'url' => $url,
'key' => $key,
'type' => $type
));
break;
case 'AbortMultipartUpload':
$key = preg_replace('/\s+/', '_', $_POST['key']);
$s3->abortMultipartUpload(array(
'Bucket' => TMP_IMG,
'Key' => $key,
'UploadId' => $_POST['uploadId']
));
break;
default:
# code...
break;
}
}
From what I gathered, I need the file, its key, its upload ID, and what parts have been successfully uploaded. Are there any suggestions on what I can do to accomplish this?