Node reading file in specified chunk size

后端 未结 3 1719
野性不改
野性不改 2020-12-30 10:22

The goal: Upload large files to AWS Glacier without holding the whole file in memory.

I\'m currently uploading to glacier now using fs.readFileSync() and things are

3条回答
  •  时光说笑
    2020-12-30 10:36

    If nothing else you can just use fs.open(), fs.read(), and fs.close() manually. Example:

    var CHUNK_SIZE = 10 * 1024 * 1024, // 10MB
        buffer = Buffer.alloc(CHUNK_SIZE),
        filePath = '/tmp/foo';
    
    fs.open(filePath, 'r', function(err, fd) {
      if (err) throw err;
      function readNextChunk() {
        fs.read(fd, buffer, 0, CHUNK_SIZE, null, function(err, nread) {
          if (err) throw err;
    
          if (nread === 0) {
            // done reading file, do any necessary finalization steps
    
            fs.close(fd, function(err) {
              if (err) throw err;
            });
            return;
          }
    
          var data;
          if (nread < CHUNK_SIZE)
            data = buffer.slice(0, nread);
          else
            data = buffer;
    
          // do something with `data`, then call `readNextChunk();`
        });
      }
      readNextChunk();
    });
    

提交回复
热议问题