I am at a loss of what I am doing wrong, here is what I have:
HTML
<html>
<body>
<form method="POST" action="/upload" enctype="multipart/form-data">
<div class="field">
<label for="image">Image Upload</label>
<input type="file" name="image" id="image">
</div>
<input type="submit" class="btn" value="Save">
</form>
</body>
</html>
Port
5000 is my Node.js server's port.
In this example I am using POST
to /upload
, and it works fine.
module.exports = function(app, models) {
var fs = require('fs');
var AWS = require('aws-sdk');
var accessKeyId = process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.post('/upload', function(req, res){
var params = {
Bucket: 'makersquest',
Key: 'myKey1234.png',
Body: "Hello"
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
}
Now I want to post the file that I am POST
ing, which is where the problem arises.
module.exports = function(app, models) {
var fs = require('fs');
var AWS = require('aws-sdk');
var accessKeyId = process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.post('/upload', function(req, res){
var path = req.files.image.path;
fs.readFile(path, function(err, file_buffer){
var params = {
Bucket: 'makersquest',
Key: 'myKey1234.png',
Body: file_buffer
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
});
}
The error I get is:
TypeError: Cannot read property 'path' of undefined
As a matter of fact files
is completely empty.
I am assuming I am missing something pretty obvious but I can't seem to find it.
You will need something like multer
to handle multipart uploading.
Here is an example streaming your file upload to s3 using aws-sdk
.
var multer = require('multer');
var AWS = require('aws-sdk');
var accessKeyId = process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.use(multer({ // https://github.com/expressjs/multer
dest: './public/uploads/',
limits : { fileSize:100000 },
rename: function (fieldname, filename) {
return filename.replace(/\W+/g, '-').toLowerCase();
},
onFileUploadData: function (file, data, req, res) {
// file : { fieldname, originalname, name, encoding, mimetype, path, extension, size, truncated, buffer }
var params = {
Bucket: 'makersquest',
Key: file.name,
Body: data
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
}
}));
app.post('/upload', function(req, res){
if(req.files.image !== undefined){ // `image` is the field name from your form
res.redirect("/uploads"); // success
}else{
res.send("error, no file chosen");
}
});
Latest Answer @ Dec-2016 [New]
Use multer-s3 for multipart uploading to s3 without saving on local disk as:
var express = require('express'),
aws = require('aws-sdk'),
bodyParser = require('body-parser'),
multer = require('multer'),
multerS3 = require('multer-s3');
aws.config.update({
secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
accessKeyId: 'XXXXXXXXXXXXXXX',
region: 'us-east-1'
});
var app = express(),
s3 = new aws.S3();
app.use(bodyParser.json());
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'bucket-name',
key: function (req, file, cb) {
console.log(file);
cb(null, file.originalname); //use Date.now() for unique file keys
}
})
});
//open in browser to see upload form
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
});
//use by upload form
app.post('/upload', upload.array('upl',1), function (req, res, next) {
res.send("Uploaded!");
});
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
Latest Answer @ Mar-2016 [Old-One]
Edited 1 use multer@1.1.0
and multer-s3@1.4.1
for following snippet:
var express = require('express'),
bodyParser = require('body-parser'),
multer = require('multer'),
s3 = require('multer-s3');
var app = express();
app.use(bodyParser.json());
var upload = multer({
storage: s3({
dirname: '/',
bucket: 'bucket-name',
secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
accessKeyId: 'XXXXXXXXXXXXXXX',
region: 'us-east-1',
filename: function (req, file, cb) {
cb(null, file.originalname); //use Date.now() for unique file keys
}
})
});
//open in browser to see upload form
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
});
//use by upload form
app.post('/upload', upload.array('upl'), function (req, res, next) {
res.send("Uploaded!");
});
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
For complete running example clone express_multer_s3 repo and run node app
.
Sounds like you might not have the express bodyParser middleware setup. Can you post your entire server file (app.js, server.js, what have you)
Instead of multipart/form-data, you can try using image/png or whatever the correct mime type is.
You need something like multer in your set of middleware to handle multipart/form-data
for you and populate req.files
. From the doco:
var express = require('express')
var multer = require('multer')
var app = express()
app.use(multer({ dest: './uploads/'}))
Now req.files.image.path
should be populated in your app.post
function.
It looks like your req.files.image is undefined. console.log out what req.files.image returns and see if you can go from there.
This stack overflow was the best answer I found explaining exactly how to get Node to S3 working.
AWS Missing credentials when i try send something to my S3 Bucket (Node.js)
This in addition to some more stuff I had to hack on to get it all working. In my situation I was using a MEAN stack application so my Node file I was working with was a route file.
my aconfig.json file with the amazon credentials looks like this:
{ "accessKeyId": "*****YourAccessKey****", "secretAccessKey": "***YourSecretKey****" }
The final contents of the route file look like the file pasted below.
router.post('/sendToS3', function(req, res) {
var fs = require('fs');
var multer = require('multer');
var AWS = require('aws-sdk');
var path = require('path');
var awsCredFile = path.join(__dirname, '.', 'aconfig.json');
console.log('awsCredFile is');
console.log(awsCredFile);
AWS.config.loadFromPath(awsCredFile);
var s3 = new AWS.S3();
var photoBucket = new AWS.S3({params: {Bucket: 'myGreatBucketName'}});
var sampleFile = {
"_id" : 345345,
"fieldname" : "uploads[]",
"originalname" : "IMG_1030.JPG",
"encoding" : "7bit",
"mimetype" : "image/jpeg",
"destination" : "./public/images/uploads",
"filename" : "31a66c51883595e74ab7ae5e66fb2ab8",
"path" : "/images/uploads/31a66c51883595e74ab7ae5e66fb2ab8",
"size" : 251556,
"user" : "579fbe61adac4a8a73b6f508"
};
var filePathToSend = path.join(__dirname, '../public', sampleFile.path);
function uploadToS3(filepath, destFileName, callback) {
photoBucket
.upload({
ACL: 'public-read',
Body: fs.createReadStream(filepath),
Key: destFileName.toString(),
ContentType: 'application/octet-stream' // force download if it's accessed as a top location
})
// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#httpUploadProgress-event
.on('httpUploadProgress', function(evt) { console.log(evt); })
// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#send-property
.send(callback);
}
multer({limits: {fileSize:10*1024*1024}});
console.log('filePathToSend is ');
console.log(filePathToSend);
uploadToS3(filePathToSend, sampleFile.filename, function (err, data) {
if (err) {
console.error(err);
return res.status(500).send('failed to upload to s3').end();
}
res.status(200)
.send('File uploaded to S3: '
+ data.Location.replace(/</g, '<')
+ '<br/><img src="' + data.Location.replace(/"/g, '"') + '"/>')
.end();
});
console.log('uploading now...');
});
This took me a while to finally get working, but if you setup the route below, update the sampleFile JSON to point to a real file on your system and hit it with Postman it will publish a file to your S3 account.
Hope this helps
来源:https://stackoverflow.com/questions/17930204/simple-file-upload-to-s3-using-aws-sdk-and-node-express