Azure Functions - NodeJS - Response Body as a Stream

Unfortunately we don't have streaming support implemented in NodeJS just yet - it's on the backlog: https://github.com/Azure/azure-webjobs-sdk-script/issues/1361

If you're not tied to NodeJ open to using a C# function instead, you can use the storage sdk object directly in your input bindings and stream request output, instead of using the intermediate object approach.


I tried @Doug's solution from the last comment above, with a few minor mods in my azure function, and so far, after trying 20 different ideas, this is the only one that actually delivered the file to the browser! Thank you, @Doug...


const fs = require("fs");
const stream = require("stream");

...

                const AzureBlob = require('@[my_private_artifact]/azure-blob-storage');
                const azureStorage = new AzureBlob(params.connectionString);


                //Override the write to store the value to our "contents" <-- Doug's solution
                var outputStream = new stream.Writable();
                outputStream.contents = new Uint8Array(0);//Initialize contents.

                outputStream._write = function (chunk, encoding, done) {
                    var curChunk = new Uint8Array(chunk);
                    var tmp = new Uint8Array(this.contents.byteLength + curChunk.byteLength);
                    tmp.set(this.contents, 0);
                    tmp.set(curChunk, this.contents.byteLength);
                    this.contents = tmp;
                    done();
                };

                let azureSpeedResult = await azureStorage.downloadBlobToStream(params.containerName, params.objectId, outputStream);

                let headers = {
                    "Content-Length": azureSpeedResult.size,
                    "Content-Type": mimeType
                };

                if (params.action == "download") {
                    headers["Content-Disposition"] = "attachment; filename=" + params.fileName;
                } 

                context.res = {
                    status: 200,
                    headers: headers,
                    isRaw: true,
                    body: outputStream.contents
                };

                context.done();
...

While @Matt Manson's answer is definitely correct based on the way I asked my question, the following code snippet might be more useful for someone who stumbles across this question.

While I can't send the Stream to the response body directly, I can use a custom stream which captures the data into a Uint8Array, and then sends that to the response body.

NOTE: If the file is REALLY big, this will use a lot of memory.

'use strict';
const   azure = require('azure-storage'),
        stream = require('stream');
const BLOB_CONTAINER = 'deContainer';

module.exports = function(context){
    var file = context.bindingData.file;
    var blobService = azure.createBlobService();
    var outputStream = new stream.Writable();
    outputStream.contents = new Uint8Array(0);//Initialize contents.

    //Override the write to store the value to our "contents"
    outputStream._write = function (chunk, encoding, done) {
        var curChunk = new Uint8Array(chunk);
        var tmp = new Uint8Array(this.contents.byteLength + curChunk.byteLength);
        tmp.set(this.contents, 0);
        tmp.set(curChunk, this.contents.byteLength);
        this.contents = tmp;
        done();
    };


    blobService.getBlobToStream(BLOB_CONTAINER, file, outputStream, function(error, serverBlob) {
        if(error) {
            FileNotFound(context);
        } else {
            context.res = {
                status: 200,
                headers: {

                },
                isRaw: true,
                body : outputStream.contents
            };
            context.done();
        }
    });//*/
}

function FileNotFound(context){
    context.res =  {
        status: 404,
        headers: {
            "Content-Type" : "application/json"
        },
        body : { "Message" : "No esta aqui!"}
    };
    context.done();
}