FileReader seems to consume all the memory as it is repeatedly used to preload multiple blobs, and never frees it. Any known way to force it to release consumed memory? Setting
Try it like this instead:
function sliceMe() {
var file = document.getElementById('file').files[0],
fr = new FileReader,
chunkSize = 2097152,
chunks = Math.ceil(file.size / chunkSize),
chunk = 0;
function loadNext() {
var start, end,
blobSlice = File.prototype.mozSlice || File.prototype.webkitSlice;
start = chunk * chunkSize;
end = start + chunkSize >= file.size ? file.size : start + chunkSize;
fr.onload = function() {
if (++chunk < chunks) {
//console.info(chunk);
}
};
fr.onloadend = function(e) {
loadNext(); // shortcut here
};
fr.readAsBinaryString(blobSlice.call(file, start, end));
}
loadNext();
}
The onloadend will keep you from overlapping your other reads... (Obviously, you can fix the increment a little better, but you get the idea...)
Bug has been marked as INVALID, since it turned out that I wasn't in fact re-using FileReader object properly.
Here is a pattern, which doesn't hog memory and cpu:
function sliceMe() {
var file = document.getElementById('file').files[0],
fr = new FileReader,
chunkSize = 2097152,
chunks = Math.ceil(file.size / chunkSize),
chunk = 0;
function loadNext() {
var start, end,
blobSlice = File.prototype.mozSlice || File.prototype.webkitSlice;
start = chunk * chunkSize;
end = start + chunkSize >= file.size ? file.size : start + chunkSize;
fr.onload = function() {
if (++chunk < chunks) {
//console.info(chunk);
loadNext(); // shortcut here
}
};
fr.readAsBinaryString(blobSlice.call(file, start, end));
}
loadNext();
}
Another bug report has been filed: https://bugzilla.mozilla.org/show_bug.cgi?id=681479, which is related, but not the evil in this case.
Thanks to Kyle Huey for bringing this to my attention :)