问题
I am working on a NodeJS project that is using sharp node package
for resizing JPEG/JPG images.
The problem is Node process keeps adding the processed file in memory and never releases it.
As a result, amount of memory consumed increases with every request & it never gets released.
After debugging the application, I realised that sharp toBuffer API is causing the issue.
I tried using alternative solution by creating a custom writable stream
and piping it with sharp Duplex stream
but it ends up with the same problem.
I don't understand if I'm missing anything here or it's a bug.
Sharing the code below (I have removed unwanted code to make it compact) -
const { Writable } = require("stream");
const { createServer } = require("http");
const { readFileSync } = require("fs");
const sharp = require("sharp");
async function resizeJpeg(input_buffer) {
// initialise file, the response object
const file = { normalisedImage: null, originalImage: null };
// initialise sharp instance using original image buffer
let image = sharp(input_buffer);
// set the original image metadata
file.originalImage = await image.metadata();
file.originalImage.quality = 85;
// generate buffer using sharp resize API with default quality & dimensions.
// ############### THIS IS WHERE MEMORY CONSUMPTION OCCURS ###############
// APPROACH 1 (SHARP toBuffer API)
const buffer = await image.resize(2000, 798).jpeg({ quality: 85 }).toBuffer();
// APPROACH 1 ENDS
// APPROACH 2 (CUSTOM WRITABLE STREAM)
// const buffer = await sharpToBuffer(image.resize(2000, 798).jpeg({ quality: 85 }));
// APPROACH 2 ENDS
// set resized image metadata
file.normalisedImage = await sharp(buffer).metadata();
file.normalisedImage.quality = 85;
return file;
}
// converts sharp readable stream to buffer using custom writable buffer
async function sharpToBuffer(readable) {
return new Promise((resolve, reject) => {
const writable = new WriteStream().on("finish", () => resolve(Buffer.concat(writable.bufferChunks)));
readable.pipe(writable);
});
}
// simple writable stream
class WriteStream extends Writable {
constructor() { super(); this.bufferChunks = [] }
_write(chunk, encoding, next) { this.bufferChunks.push(chunk); next(); }
}
createServer(async (request, response) => {
// ignore favicon calls
if (request.url.indexOf("favicon.ico") > -1) { return response.end(); }
// trigger resize call and pass buffer of original image file
const { normalisedImage, originalImage } = await resizeJpeg(readFileSync(`${__dirname}/30mb.jpg`));
// respond stringified json
response.end(
JSON.stringify({
normalisedImage: { size: normalisedImage.size, width: normalisedImage.width, height: normalisedImage.height, quality: normalisedImage.quality },
originalImage: { size: originalImage.size, width: originalImage.width, height: originalImage.height, quality: originalImage.quality }
}, null, 4));
}).listen(3000, () => console.log("server started"));
As you can see, resizeJpeg function
has both the approaches implemented.
To make this work you just need to make sure 30mb.jpg
file exists in same directory.
The image I used is available here
In case you're using linux, following top command
can be used to monitor the memory usage assuming file name is so.js
-
top ps -ef | grep 'so.js' | awk '{print $2}' | sed 's/.*/-p &/' | xargs echo -c
来源:https://stackoverflow.com/questions/58066655/nodejs-sharp-node-package-memory-consumption-issue