asarfuckery/appasar/development/node_modules/stringstream
2019-01-17 19:22:05 +01:00
..
.npmignore No commit message >:3 2019-01-17 19:22:05 +01:00
.travis.yml No commit message >:3 2019-01-17 19:22:05 +01:00
example.js No commit message >:3 2019-01-17 19:22:05 +01:00
LICENSE.txt No commit message >:3 2019-01-17 19:22:05 +01:00
package.json No commit message >:3 2019-01-17 19:22:05 +01:00
README.md No commit message >:3 2019-01-17 19:22:05 +01:00
stringstream.js No commit message >:3 2019-01-17 19:22:05 +01:00

Decode streams into strings The Right Way(tm)

var fs   = require('fs')
var zlib = require('zlib')
var strs = require('stringstream')

var utf8Stream = fs.createReadStream('massiveLogFile.gz')
  .pipe(zlib.createGunzip())
  .pipe(strs('utf8'))

No need to deal with setEncoding() weirdness, just compose streams like they were supposed to be!

Handles input and output encoding:

// Stream from utf8 to hex to base64... Why not, ay.
var hex64Stream = fs.createReadStream('myFile')
  .pipe(strs('utf8', 'hex'))
  .pipe(strs('hex', 'base64'))

Also deals with base64 output correctly by aligning each emitted data chunk so that there are no dangling = characters:

var stream = fs.createReadStream('myFile').pipe(strs('base64'))

var base64Str = ''

stream.on('data', function(data) { base64Str += data })
stream.on('end', function() {
  console.log('My base64 encoded file is: ' + base64Str) // Wouldn't work with setEncoding()
  console.log('Original file is: ' + new Buffer(base64Str, 'base64'))
})