2 streams:
Given readable streams stream1
and stream2
, what\'s an idiomatic (concise) way to get a stream containing
this can be done with vanilla nodejs
import { PassThrough } from 'stream'
const merge = (...streams) => {
let pass = new PassThrough()
let waiting = streams.length
for (let stream of streams) {
pass = stream.pipe(pass, {end: false})
stream.once('end', () => --waiting === 0 && pass.emit('end'))
}
return pass
}
The combined-stream package concatenates streams. Example from the README:
var CombinedStream = require('combined-stream');
var fs = require('fs');
var combinedStream = CombinedStream.create();
combinedStream.append(fs.createReadStream('file1.txt'));
combinedStream.append(fs.createReadStream('file2.txt'));
combinedStream.pipe(fs.createWriteStream('combined.txt'));
I believe you have to append all streams at once. If the queue runs empty, the combinedStream
automatically ends. See issue #5.
The stream-stream library is an alternative that has an explicit .end
, but it's much less popular and presumably not as well-tested. It uses the streams2 API of Node 0.10 (see this discussion).
You might be able to make it more concise, but here's one that works:
var util = require('util');
var EventEmitter = require('events').EventEmitter;
function ConcatStream(streamStream) {
EventEmitter.call(this);
var isStreaming = false,
streamsEnded = false,
that = this;
var streams = [];
streamStream.on('stream', function(stream){
stream.pause();
streams.push(stream);
ensureState();
});
streamStream.on('end', function() {
streamsEnded = true;
ensureState();
});
var ensureState = function() {
if(isStreaming) return;
if(streams.length == 0) {
if(streamsEnded)
that.emit('end');
return;
}
isStreaming = true;
streams[0].on('data', onData);
streams[0].on('end', onEnd);
streams[0].resume();
};
var onData = function(data) {
that.emit('data', data);
};
var onEnd = function() {
isStreaming = false;
streams[0].removeAllListeners('data');
streams[0].removeAllListeners('end');
streams.shift();
ensureState();
};
}
util.inherits(ConcatStream, EventEmitter);
We keep track of state with streams
(the queue of streams;push
to the back and shift
from the front), isStreaming
, and streamsEnded
. When we get a new stream, we push it, and when a stream ends, we stop listening and shift it. When the stream of streams ends, we set streamsEnded
.
On each of these events, we check the state we're in. If we're already streaming (piping a stream), we do nothing. If the queue is empty and streamsEnded
is set, we emit the end
event. If there is something in the queue, we resume it and listen to its events.
*Note that pause
and resume
are advisory, so some streams may not behave correctly, and would require buffering. This exercise is left to the reader.
Having done all of this, I would do the n=2
case by constructing an EventEmitter
, creating a ConcatStream
with it, and emitting two stream
events followed by an end
event. I'm sure it could be done more concisely, but we may as well use what we've got.
If you don't care about the ordering of data in the streams, a simple reduce operation should be fine in nodejs!
const {PassThrough} = require('stream')
let joined = [s0, s1, s2, ...sN].reduce((pt, s, i, a) => {
s.pipe(pt, {end: false})
s.once('end', () => a.every(s => s.ended) && pt.emit('end'))
return pt
}, new PassThrough())
Cheers ;)
streamee.js is a set of stream transformers and composers based on node 1.0+ streams and include a concatenate method:
var stream1ThenStream2 = streamee.concatenate([stream1, stream2]);
https://github.com/joepie91/node-combined-stream2 is a drop-in Streams2-compatible replacement for the combined-stream module (which is described above.) It automatically wraps Streams1 streams.
Example code for combined-stream2:
var CombinedStream = require('combined-stream2');
var fs = require('fs');
var combinedStream = CombinedStream.create();
combinedStream.append(fs.createReadStream('file1.txt'));
combinedStream.append(fs.createReadStream('file2.txt'));
combinedStream.pipe(fs.createWriteStream('combined.txt'));