Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

How do I write a Node.js module to handle an incoming piped stream

I'm trying to write a node module that accepts an incoming piped binary (or base-64-encoded) stream, but frankly I don't even know where to start. I can't see any examples in the Node docs about handling incoming streams; I only see examples on consuming them?

Say for example I want to be able to do this:

var asset = new ProjectAsset('myFile', __dirname + '/image.jpg')
var stream = fs.createReadStream(__dirname + '/image.jpg', { encoding: 'base64' }).pipe(asset)
stream.on('finish', function() {
    done()
})

I've gotten ProjectAsset looking like this, but I'm at a loss of where to go next:

'use strict'

var stream = require('stream'),
    util = require('util')

var ProjectAsset = function() {
    var self = this

    Object.defineProperty(self, 'binaryData', {
        configurable: true,
        writable: true
    })

    stream.Stream.call(self)

    self.on('pipe', function(src) {
        // does it happen here? how do I set self.binaryData?
    })

    return self
}

util.inherits(ProjectAsset, stream.Stream)

module.exports = ProjectAsset
module.exports.DEFAULT_FILE_NAME = 'file'
like image 698
brandonscript Avatar asked Feb 09 '23 00:02

brandonscript


2 Answers

It is possible to inherit from stream.Stream and make it work, however based on what's available in the documentation I would suggest inheriting from stream.Writable. Piping into a stream.Writable you'll need to have _write(chunk, encoding, done) defined to handle the piping. Here is an example:

var asset = new ProjectAsset('myFile', __dirname + '/image.jpg')
var stream = fs.createReadStream(__dirname + '/image.jpg', { encoding: 'base64' }).pipe(asset)
stream.on('finish', function() {
    console.log(asset.binaryData);
})

Project Asset

'use strict'

var stream = require('stream'),
    util = require('util')

var ProjectAsset = function() {
    var self = this

    self.data
    self.binaryData = [];

    stream.Writable.call(self)

    self._write = function(chunk, encoding, done) {
        // Can handle this data however you want
        self.binaryData.push(chunk.toString())
        // Call after processing data
        done()
    }
    self.on('finish', function() {
        self.data = Buffer.concat(self.binaryData)
    })

    return self
}

util.inherits(ProjectAsset, stream.Writable)

module.exports = ProjectAsset
module.exports.DEFAULT_FILE_NAME = 'file'

If you're looking to also read from the stream, take a look at inheriting from stream.Duplex and also including the _read(size) method.

There's also the simplified constructors api if you're doing something simpler.

like image 114
pohlman Avatar answered Feb 10 '23 23:02

pohlman


Im not sure if this is exaclty what you were looking for but i think you could handle it using the buffer api with Buffer.concat on an array of buffers that can be retrieved form chunk on the stream data listener

'use strict'

var stream = require('stream'),
    util = require('util');

var ProjectAsset = function() {
    var self = this

    Object.defineProperty(self, 'binaryData', {
        configurable: true,
        writable: true
    })

    stream.Stream.call(self)
    var data;
    var dataBuffer=[];
    self.on('data', function(chunk) {
        dataBuffer.push(chunk);
    }).on('end',function(){
        data=Buffer.concat(dataBuffer);
    });
    self.binaryData=data.toString('binary');
    return self
}

util.inherits(ProjectAsset, stream.Stream)

module.exports = ProjectAsset
module.exports.DEFAULT_FILE_NAME = 'file'
like image 22
Binvention Avatar answered Feb 11 '23 01:02

Binvention