I\'m trying to understand node streams and their life-cycle. So, I want to split the content of a stream for n-parts. The code below is just to explain my intentions and to
I think this is something that could help you out - https://github.com/substack/stream-handbook
It's an amazingly detailed handbook with sample code for various streaming scenarios and I'm using the same as a reference for my own project and have found it useful so far! It has sample code in /examples as well
Using ReadableStream
you could use a single function to check if elements of current chunk of data is equal to expected result.
Create variables, CHUNK
and N
, where CHUNK
is the number of elements to slice or splice from original array, N
is the variable incremented by CHUNK
at each .enqueue()
call within pull()
call.
const [data, CHUNK, result] = [[1,2,3,4,5,6], 3, []];
let N = 0;
const stream = new ReadableStream({
pull(controller) {
if (N < data.length)
// slice `N, N += CHUNK` elements from `data`
controller.enqueue(data.slice(N, N += CHUNK))
else
// if `N` is equal to `data.length` call `.close()` on stream
controller.close()
}
});
const reader = stream.getReader();
const processData = ({value, done}) => {
// if stream is closed return `result`; `reader.closed` returns a `Promise`
if (done) return reader.closed.then(() => result);
if (data.slice(N - CHUNK, N).every((n, index) => n === value[index])) {
console.log(`N: ${N}, value: [${value}]`)
result.push(...value);
return reader.read().then(data => processData(data))
}
}
const readComplete = res => console.log(`result: [${res}]`);
reader.read()
.then(processData)
.then(readComplete)
.catch(err => console.log(err));
Using chained .then()
const [data, CHUNK, result] = [[1,2,3,4,5,6], 3, []];
let N = 0;
const stream = new ReadableStream({
pull(controller) {
if (N < data.length)
// slice `N, N += CHUNK` elements from `data`
controller.enqueue(data.slice(N, N += CHUNK))
else
// if `N` is equal to `data.length` call `.close()` on stream
controller.close()
}
});
const reader = stream.getReader();
const processData = ({value, done}) => {
// if stream is closed return `result`; `reader.closed` returns a `Promise`
if (done) return reader.closed.then(() => result);
if (data.slice(N - CHUNK, N).every((n, index) => n === value[index])) {
console.log(`N: ${N}, value: [${value}]`)
result.push(...value);
return reader.read().then(data => processData(data))
}
}
const readComplete = res => console.log(`result: [${res}]`);
reader.read()
.then(({value, done}) => {
if ([1,2,3].every((n, index) => n === value[index])) {
console.log(`N: ${N}, value: [${value}]`)
result.push(...value);
return reader.read()
}
})
.then(({value, done}) => {
if ([4,5,6].every((n, index) => n === value[index])) {
console.log(`N: ${N}, value: [${value}]`)
result.push(...value);
// return `result`; `reader.closed` returns a `Promise`
return reader.closed.then(() => result);
}
})
.then(readComplete)
.catch(err => console.log(err));
See also Chrome memory issue - File API + AngularJS