Read file from aws s3 bucket using node fs

后端 未结 11 715
逝去的感伤
逝去的感伤 2020-12-07 21:37

I am attempting to read a file that is in a aws s3 bucket using

fs.readFile(file, function (err, contents) {
  var myLines = contents.Body.toString().split(         


        
相关标签:
11条回答
  • 2020-12-07 22:26

    You have a couple options. You can include a callback as a second argument, which will be invoked with any error message and the object. This example is straight from the AWS documentation:

    s3.getObject(params, function(err, data) {
      if (err) console.log(err, err.stack); // an error occurred
      else     console.log(data);           // successful response
    });
    

    Alternatively, you can convert the output to a stream. There's also an example in the AWS documentation:

    var s3 = new AWS.S3({apiVersion: '2006-03-01'});
    var params = {Bucket: 'myBucket', Key: 'myImageFile.jpg'};
    var file = require('fs').createWriteStream('/path/to/file.jpg');
    s3.getObject(params).createReadStream().pipe(file);
    
    0 讨论(0)
  • 2020-12-07 22:26

    If you are looking to avoid the callbacks you can take advantage of the sdk .promise() function like this:

    const s3 = new AWS.S3();
    const params = {Bucket: 'myBucket', Key: 'myKey.csv'}
    const response = await s3.getObject(params).promise() // await the promise
    const fileContent = getObjectResult.Body.toString('utf-8'); // can also do 'base64' here if desired
    

    I'm sure the other ways mentioned here have their advantages but this works great for me. Sourced from this thread (see the last response from AWS): https://forums.aws.amazon.com/thread.jspa?threadID=116788

    0 讨论(0)
  • 2020-12-07 22:29
    var fileStream = fs.createWriteStream('/path/to/file.jpg');
    var s3Stream = s3.getObject({Bucket: 'myBucket', Key: 'myImageFile.jpg'}).createReadStream();
    
    // Listen for errors returned by the service
    s3Stream.on('error', function(err) {
        // NoSuchKey: The specified key does not exist
        console.error(err);
    });
    
    s3Stream.pipe(fileStream).on('error', function(err) {
        // capture any errors that occur when writing data to the file
        console.error('File Stream:', err);
    }).on('close', function() {
        console.log('Done.');
    });
    

    Reference: https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/requests-using-stream-objects.html

    0 讨论(0)
  • 2020-12-07 22:35

    I had exactly the same issue when downloading from S3 very large files.

    The example solution from AWS docs just does not work:

    var file = fs.createWriteStream(options.filePath);
            file.on('close', function(){
                if(self.logger) self.logger.info("S3Dataset file download saved to %s", options.filePath );
                return callback(null,done);
            });
            s3.getObject({ Key:  documentKey }).createReadStream().on('error', function(err) {
                if(self.logger) self.logger.error("S3Dataset download error key:%s error:%@", options.fileName, error);
                return callback(error);
            }).pipe(file);
    

    While this solution will work:

        var file = fs.createWriteStream(options.filePath);
        s3.getObject({ Bucket: this._options.s3.Bucket, Key: documentKey })
        .on('error', function(err) {
            if(self.logger) self.logger.error("S3Dataset download error key:%s error:%@", options.fileName, error);
            return callback(error);
        })
        .on('httpData', function(chunk) { file.write(chunk); })
        .on('httpDone', function() { 
            file.end(); 
            if(self.logger) self.logger.info("S3Dataset file download saved to %s", options.filePath );
            return callback(null,done);
        })
        .send();
    

    The createReadStream attempt just does not fire the end, close or error callback for some reason. See here about this.

    I'm using that solution also for writing down archives to gzip, since the first one (AWS example) does not work in this case either:

            var gunzip = zlib.createGunzip();
            var file = fs.createWriteStream( options.filePath );
    
            s3.getObject({ Bucket: this._options.s3.Bucket, Key: documentKey })
            .on('error', function (error) {
                if(self.logger) self.logger.error("%@",error);
                return callback(error);
            })
            .on('httpData', function (chunk) {
                file.write(chunk);
            })
            .on('httpDone', function () {
    
                file.end();
    
                if(self.logger) self.logger.info("downloadArchive downloaded %s", options.filePath);
    
                fs.createReadStream( options.filePath )
                .on('error', (error) => {
                    return callback(error);
                })
                .on('end', () => {
                    if(self.logger) self.logger.info("downloadArchive unarchived %s", options.fileDest);
                    return callback(null, options.fileDest);
                })
                .pipe(gunzip)
                .pipe(fs.createWriteStream(options.fileDest))
            })
            .send();
    
    0 讨论(0)
  • 2020-12-07 22:37

    here is the example which i used to retrive and parse json data from s3.

        var params = {Bucket: BUCKET_NAME, Key: KEY_NAME};
        new AWS.S3().getObject(params, function(err, json_data)
        {
          if (!err) {
            var json = JSON.parse(new Buffer(json_data.Body).toString("utf8"));
    
           // PROCESS JSON DATA
               ......
         }
       });
    
    0 讨论(0)
提交回复
热议问题