Sawtaytoes
12/14/2018 - 11:59 AM

Simple Node.js Transform Stream

A simple Node.js stream that reads a large file and outputs it to the console in chunks. Node.js streams natively handle backpressure when using the .pipe method. This keeps the memory footprint low and allows for a lot of control when handling large amounts of data.

const fs = require('fs')
const { Transform } = require('stream')

const uppercaseChunk = (
  new Transform({
    transform(
      chunk,
      encoding,
      callback,
    ) {
      const stringifiedChunk = (
        chunk
        .toString()
      )

      this
      .push(
        stringifiedChunk
        .toUpperCase()
      )

      callback()
    },
  })
)

const readStream = (
  fs
  .createReadStream('./novel.txt')
)

const writeStream = (
  fs
  .createWriteStream('./uppercaseNovel.txt')
)

readStream
.pipe(uppercaseChunk)
.pipe(writeStream)

// uppercaseNovel.txt
// ... (lots of text) lorem ipsum
// ... (lots of uppercase text) LOREM IPSUM