It seems to me that an elegant way to process certain kinds of data in Node.js would be to chain processing objects, like UNIX pipes.
For example, grep:
Although the answer that exists is nice, it still requires a bit of digging around on behalf of those looking for answers.
The following code completes the example the OP gave, using the Node 0.10 stream API.
var stream = require('stream')
var util = require('util')
function Grep(pattern) {
stream.Transform.call(this)
this.pattern = pattern
}
util.inherits(Grep, stream.Transform)
Grep.prototype._transform = function(chunk, encoding, callback) {
var string = chunk.toString()
if (string.match(this.pattern)) {
this.push(chunk)
}
callback()
}
var grep = new Grep(/foo/)
process.stdin.pipe(grep)
grep.pipe(process.stdout)
You are so very close.
Because you are using the very low-level stream class, you need to set the stream writable property to make it a writable stream. If you were reading from the stream, you'd need to set the readable property. Also the end event doesn't have any arguments.
class Forwarder extends stream.Stream
constructor: ->
@writable = true
write: (chunk, encoding) ->
@emit 'data', chunk
end: ->
@emit 'end'
fwd = new Forwarder()
fwd.pipe(process.stdout);
process.stdin.pipe(fwd);
process.stdin.resume();
The answer above applied to V1 streams in Node <= 0.8. If you are using > 0.8, Node has added more specific classes that are designed to be extended, so you would use something more like this:
class Forwarder extends stream.Transform
_transform: (chunk, encoding, callback) ->
this.push(chunk);
callback();
Processing chunk
and pushing the pieces you actually want.