nodejs streams
// From: http://codewinds.com/blog/2013-08-04-nodejs-readable-streams.html
//Simple example of reading a file and echoing it to stdout:
var fs = require('fs');
var readStream = fs.createReadStream('../sources/bible.txt');
readStream.pipe(process.stdout);
// ====================================================================================
//Creating a sha1 digest of a file and echoing the result to stdout(similar to shasum):
var crypto = require('crypto');
var fs = require('fs');
var readStream = fs.createReadStream('../sources/bible.txt');
var hash = crypto.createHash('sha1');
readStream
.on('data', function(chunk) {
// console.log(chunk.length);
hash.update(chunk);
})
.on('end', function() {
console.log(hash.digest('hex'));
});
// ====================================================================================
// With Node.js 0.10+ there is a better way to consume streams.
// The Readable interface makes it easier to work with streams,
// especially streams where you want to do other things between creating a stream and using the stream.
// These newer Readable streams are `pull` streams where you request the data
// when you are ready for it rather than having the data pushed to you.
var crypto = require('crypto');
var fs = require('fs');
var readStream = fs.createReadStream('../sources/bible.txt');
var hash = crypto.createHash('sha1');
readStream
.on('readable', function () {
var chunk;
while (null !== (chunk = readStream.read())) {
hash.update(chunk);
}
})
.on('end', function () {
console.log(hash.digest('hex'));
});
// The key to understanding this example is that with the new streams2 Readable interface,
// a readable event will be emitted as soon as data is available to be read
// and you can call .read() to read chunks of it.
// Once there is no more data available, .read() returns null,
// but then another readable event is fired again when data is available again.
// This continues until the end of the file when end is fired like before.