Send ArrayBuffer to S3 put to signedURL

I ended up not needing to create my own Buffer of the file, instead if I post the fileReference returned by the input directly to axios (or xhr) the request automatically chunked the upload.

Initially I could only make it work with XMLHttpRequest, but I quickly found a way to wrap this around axios which neatens the logic.

XMLHttpRequest
const xhr = createCORSRequest('PUT', url);

if (!xhr) {
  console.log('CORS not supported');
} else {
  xhr.onload = function(){
    if(xhr.status == 200) {
      console.log('completed');
    } else {
      console.log('Upload error: ' + xhr.status);
    }
  };

  xhr.onerror = function(err) {
    console.log(err)
  };

  xhr.upload.onprogress = function(progressEvent){
   console.log(progressEvent);
  };

  xhr.setRequestHeader('Content-Type', file.type);
  xhr.setRequestHeader('Content-MD5', md5_base64_binary);
  xhr.setRequestHeader('Content-Encoding', 'UTF-8');
  xhr.setRequestHeader('x-amz-acl', 'private');
  xhr.send(file);
}

Or using axios;

uploadFileToS3 = fileObject => {
  return new Promise((resolve, reject) => {
    const { enqueueSnackbar } = this.props;
    const decodedURL = decodeURIComponent(fileObject.signedURL);

    axios
      .put(decodedURL, fileObject.fileRef, {
        headers: {
          'Content-Type': fileObject.mime,
          'Content-MD5': fileObject.checksum,
          'Content-Encoding': 'UTF-8',
          'x-amz-acl': 'private',
        },
        onUploadProgress: progressEvent => {
          const { loaded, total } = progressEvent;
          const uploadPercentage = parseInt(
            Math.round((loaded * 100) / total),
            10,
          );
          this.setState({ uploadProgress: uploadPercentage });
        },
      })
      .then(response => {
        resolve(response.data);
      })
      .catch(error => {
        reject(error);
      });
  });
};