Node.js HTTP response streams

The response object from a HTTP request is an instance of readable stream. Therefore, you would collect the data with the data event, then use it when the end event fires.

var http = require('http');
var body = '';

http.get(url, function(res) {
  res.on('data', function(chunk) {
    body += chunk;
  });
  res.on('end', function() {
    // all data has been downloaded
  });
});

The readable.pipe(dest) would basically do the same thing, if body in the example above were a writable stream.


Nowadays the recommended way of piping is using the pipeline function. It is supposed to protect you from memory leaks.

const { createReadStream} = require('fs');
const { pipeline } = require('stream')
const { createServer, get } = require('http')

const errorHandler = (err) => err && console.log(err.message);

const server = createServer((_, response) => {
  pipeline(createReadStream(__filename), response, errorHandler)
  response.writeHead(200);
}).listen(8080);

get('http://localhost:8080', (response) => {
  pipeline(response, process.stdout, errorHandler);
  response.on('close', () => server.close())
});

Another way of doing it that has more control would be to use async iterator

async function handler(response){
  let body = ''
  for await (const chunk of response) {
    let text = chunk.toString()
    console.log(text)
    body += text
  }
  console.log(body.length)
  server.close()
}

get('http://localhost:8080', (response) => handler(response).catch(console.warn));