How to read content of JSON file uploaded to google cloud storage using node js

There exists a convenient method:'download' to download a file into memory or to a local destination. You may use download method as follows:

const bucketName='bucket name here';
const fileName='file name here';
const storage = new Storage.Storage();
const file = storage.bucket(bucketName).file(fileName);

file.download(function(err, contents) {
     console.log("file err: "+err);  
     console.log("file data: "+contents);   
}); 

I was using the createWriteStream method like the other answers but I had a problem with the output in that it randomly output invalid characters (�) for some characters in a string. I thought it could be some encoding problems.

I came up with my workaround that uses the download method. The download method returns a DownloadResponse that contains an array of Buffer. We then use Buffer.toString() method and give it an encoding of utf8 and parse the result with JSON.parse().

const downloadAsJson = async (bucket, path) => {
  const file = await new Storage()
    .bucket(bucket)
    .file(path)
    .download();
  return JSON.parse(file[0].toString('utf8'));
}

I've used the following code to read a json file from Cloud Storage:

    'use strict';
    const Storage = require('@google-cloud/storage');
    const storage = Storage();
    exports.readFile = (req, res) => {
            console.log('Reading File');
            var archivo = storage.bucket('your-bucket').file('your-JSON-file').createReadStream();
            console.log('Concat Data');
            var  buf = '';
            archivo.on('data', function(d) {
              buf += d;
            }).on('end', function() {
              console.log(buf);
              console.log("End");
              res.send(buf);
            });     

    };

I'm reading from a stream and concat all the data within the file to the buf variable.

Hope it helps.

UPDATE

To read multiple files:

'use strict';
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
listFiles();

async function listFiles() {
        const bucketName = 'your-bucket'
        console.log('Listing objects in a Bucket');
        const [files] = await storage.bucket(bucketName).getFiles();
        files.forEach(file => {
            console.log('Reading: '+file.name);
            var archivo = file.createReadStream();
            console.log('Concat Data');
            var  buf = '';
            archivo.on('data', function(d) {
                buf += d;
            }).on('end', function() {
                console.log(buf);
                console.log("End");
            });    
        });
};