I\'m trying to upload files to S3 service using Dropzone.js
I use this tutorial to upload the files directly from the client:
https://devcenter.heroku.com/articl
For someone who might also jumped into this question, I'd like to share my working example as well. Note that I went a step further by taking off my own backend and use AWS Lambda (aka. serverless) instead to do the signing job, the concept is the same though.
So, basically,
xhr.send
function as you already mentioned.processFile
inside the accept
function. So the upload will starts immediately for each file being accepted and you're able to upload multiple files simultaneously.const vm = this
let options = {
// The URL will be changed for each new file being processing
url: '/',
// Since we're going to do a `PUT` upload to S3 directly
method: 'put',
// Hijack the xhr.send since Dropzone always upload file by using formData
// ref: https://github.com/danialfarid/ng-file-upload/issues/743
sending (file, xhr) {
let _send = xhr.send
xhr.send = () => {
_send.call(xhr, file)
}
},
// Upload one file at a time since we're using the S3 pre-signed URL scenario
parallelUploads: 1,
uploadMultiple: false,
// Content-Type should be included, otherwise you'll get a signature
// mismatch error from S3. We're going to update this for each file.
header: '',
// We're going to process each file manually (see `accept` below)
autoProcessQueue: false,
// Here we request a signed upload URL when a file being accepted
accept (file, done) {
lambda.getSignedURL(file)
.then((url) => {
file.uploadURL = url
done()
// Manually process each file
setTimeout(() => vm.dropzone.processFile(file))
})
.catch((err) => {
done('Failed to get an S3 signed upload URL', err)
})
}
}
// Instantiate Dropzone
this.dropzone = new Dropzone(this.$el, options)
// Set signed upload URL for each file
vm.dropzone.on('processing', (file) => {
vm.dropzone.options.url = file.uploadURL
})
The code above has something related to Vue.js, but the concept is actually framework agnostic, you get the idea. For a full working dropzone component example, please have a look at my GitHub repo.
Here's what worked for my on the dropzone init parameters and node S3 signature on the backend:
HTML Frontend Code using Dropzone:
var myDropzone = new Dropzone(dropArea, {
url:"#",
dictDefaultMessage: "Drag n drop or tap here",
method: "PUT",
uploadMultiple: false,
paramName: "file",
maxFiles: 10,
thumbnailWidth: 80,
thumbnailHeight: 80,
parallelUploads: 20,
autoProcessQueue: true,
previewTemplate: dropPreviewTemplate,
//autoQueue: false, // Make sure the files aren't queued until manually added
previewsContainer: dropPreviewContainer, // Define the container to display the previews
clickable: true, //".fileinput-button" // Define the element that should be used as click trigger to select files.
accept: function(file, cb) {
//override the file name, to use the s3 signature
//console.log(file);
var params = {
fileName: file.name,
fileType: file.type,
};
//path to S3 signature
$.getJSON('/uploader', params).done(function(data) {
//console.log(data);
if (!data.signedRequest) {
return cb('Failed to receive an upload url');
}
file.signedRequest = data.signedRequest;
file.finalURL = data.downloadURL;
cb();
}).fail(function() {
return cb('Failed to receive an upload url');
});
},
sending: function(file, xhr) {
console.log('sending')
var _send = xhr.send;
xhr.setRequestHeader('x-amz-acl', 'public-read');
xhr.send = function() {
_send.call(xhr, file);
}
},
processing:function(file){
this.options.url = file.signedRequest;
}
});
Here's the libraries I used on the node.js side
var Crypto = require("crypto"),
AWS = require("aws-sdk"),
Here's a sample of the CORS config on S3
<?xml version="1.0" encoding="UTF-8"?>
<CORSConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<CORSRule>
<AllowedOrigin>*</AllowedOrigin>
<AllowedMethod>PUT</AllowedMethod>
<AllowedHeader>*</AllowedHeader>
</CORSRule>
Here's the code to generate the S3 Signature on node.js :
getPolicy:function(req,res)
{
var fileId = Crypto.randomBytes(20).toString('hex').toUpperCase();
var prefix = "bl_";
var newFileName = prefix+fileId;//req.query.fileName;
var s3 = new AWS.S3();
var s3_params = {
Bucket: BUCKET,
Key: newFileName,
Expires: 60,
ContentType: req.query.fileType,
ACL: 'public-read'
};
s3.getSignedUrl('putObject', s3_params, function(err, data){
if(err){
console.log(err);
}
else{
var return_data = {
signedRequest: data,
uploadURL: 'https://'+BUCKET+'.s3.amazonaws.com/'+newFileName,
downloadURL: 'http://'+BUCKET+'.s3-website-us-east-1.amazonaws.com/'+newFileName,
};
res.write(JSON.stringify(return_data));
res.end();
}
});
}
Hopefully some of this is helpful.
There are two separate items that must be dealt with to upload to S3 - authentication and uploading.
Some possibilities, in order of security:
Generating pre-signed links was demonstrated by Aaron Rau.
Using STS is conceptually simpler (no need to sign each link), but is somewhat less secure (the same temp credentials can be used elsewhere until they expire).
If you use federated auth, you can skip the server-side entirely!
Some good tutorials for getting temporary IAM credentials from federated users, are here (for FineUploader, but the mechanism is the same)] and here.
To generate your own temporary IAM credentials you can use the AWS-SDK. An example in PHP:
Server:
<?php
require 'vendor/autoload.php';
use Aws\Result;
use Aws\Sts\StsClient;
$client = new StsClient(['region' => 'us-east-1', 'version' => 'latest']);
$result = $client->getSessionToken();
header('Content-type: application/json');
echo json_encode($result['Credentials']);
Client:
let dropzonesetup = async () => {
let creds = await fetch('//example.com/auth.php')
.catch(console.error);
// If using aws-sdk.js
AWS.config.credentials = new AWS.Credentials(creds);
Either use DropZone natively and amend as needed, or have Dropzone be a front for the aws-sdk.
You need to include it
<script src="//sdk.amazonaws.com/js/aws-sdk-2.262.1.min.js"></script>
And then update Dropzone to interact with it (based on this tutorial).
let canceled = file => { if (file.s3upload) file.s3upload.abort() }
let options =
{ canceled
, removedfile: canceled
, accept (file, done) {
let params = {Bucket: 'mybucket', Key: file.name, Body: file };
file.s3upload = new AWS.S3.ManagedUpload({params});
done();
}
}
// let aws-sdk send events to dropzone.
function sendEvents(file) {
let progress = i => dz.emit('uploadprogress', file, i.loaded * 100 / i.total, i.loaded);
file.s3upload.on('httpUploadProgress', progress);
file.s3upload.send(err => err ? dz.emit('error', file, err) : dz.emit('complete', file));
}
Dropzone.prototype.uploadFiles = files => files.map(sendEvents);
var dz = new Dropzone('#dz', options)
let options =
{ method: 'put'
// Have DZ send raw data instead of formData
, sending (file, xhr) {
let _send = xhr.send
xhr.send = () => _send.call(xhr, file)
}
// For STS, if creds is the result of getSessionToken / getFederatedToken
, headers: { 'x-amz-security-token': creds.SessionToken }
// Or, if you are using signed URLs (see other answers)
processing: function(file){ this.options.url = file.signedRequest; }
async accept (file, done) {
let url = await fetch('https://example.com/auth.php')
.catch(err => done('Failed to get an S3 signed upload URL', err));
file.uploadURL = url
done()
}
}
The above is without testing - have added just the token, but am not sure which headers really needed to be added. Check here, here and here for the docs, and perhaps use FineUploader's implementation as a guide.
Hopefully this will help, and if anyone wants to add a pull request for S3 support (as is in FineUploader), I'm sure it will be appreciated.