Home > Enterprise >  Convert Firebase Function to Async/Await
Convert Firebase Function to Async/Await

Time:03-11

The code below is writing only the header file to the csv file(what is outputted). I am assuming it is because the "get" requests within the function are asynchronous and therefore the function is not waiting for the data to be retrieved etc. How can I refactor this to await the result of the second forEach loop?

newApp.get('/createUtilCSV', (req, res) => {

    try {
        const tempFilePath = path.join(os.tmpdir(), "Util_"   req.query.start   "<->"   req.query.end   ".csv");

        var fromDate = new Date(req.query.start)
        var toDate = new Date(req.query.end)

        var fromD = admin.firestore.Timestamp.fromDate(fromDate);
        var toD = admin.firestore.Timestamp.fromDate(toDate);
        
        firestore.collection('utilisation')
            .where("timestamp", ">=", fromD)
            .where("timestamp", "<=", toD)
            .get()
            .then(snapshot => {

                var output = fs.createWriteStream(tempFilePath);
                var archive = archiver('zip', {
                    gzip: true,
                    zlib: {
                        level: 9
                    }
                });

                var utilText = "App Version,Author,Building,City,Country,Date,Floor ID,Floor Name, Floor Region,Bookable,Business Unit,Capacity,Fixed,Furniture Height,Occupancy,Position X,Position Y,Restricted,Status Date,Status Type,Unique Ref,Workspace Type,Report ID\n";
                let utils = snapshot.docs;
                console.log("util info: "   utils);
                utils.forEach(doc => {
                    var line = doc.data();
                    var utilLine = "";
                    utilLine = utilLine   line.appVersion   ",";
                    utilLine = utilLine   line.authorEmail   ",";
                    utilLine = utilLine   line.building   ",";
                    utilLine = utilLine   line.city   ",";
                    utilLine = utilLine   line.country   ",";
                    utilLine = utilLine   line.timestamp.toDate()   ",";
                    utilLine = utilLine   line.floorId   ",";
                    utilLine = utilLine   line.floorName   ",";
                    utilLine = utilLine   line.region   ",";

                    firestore.collection('utilisation')
                    .doc(line.documentID)
                    .collection('workspaces')
                    .get()
                    .then( snap => {
                            let workspaces = snap.docs;
                            workspaces.forEach(space => {

                            var lineSpace = space.data();
                            var spaceAdd = utilLine;
                            spaceAdd = spaceAdd   lineSpace.bookable   ",";
                            spaceAdd = spaceAdd   lineSpace.businessUnit   ",";
                            spaceAdd = spaceAdd   lineSpace.capacity   ",";
                            spaceAdd = spaceAdd   lineSpace.fixed   ",";
                            spaceAdd = spaceAdd   lineSpace.height   ",";
                            spaceAdd = spaceAdd   lineSpace.occupancy   ",";
                            spaceAdd = spaceAdd   lineSpace.positionX   ",";
                            spaceAdd = spaceAdd   lineSpace.positionY   ",";
                            spaceAdd = spaceAdd   lineSpace.restricted   ",";
                            spaceAdd = spaceAdd   lineSpace.statusDate.toDate()   ",";
                            spaceAdd = spaceAdd   lineSpace.statusType   ",";
                            spaceAdd = spaceAdd   lineSpace.uniqueRef   ",";
                            spaceAdd = spaceAdd   lineSpace.workspaceType   ",";
                            spaceAdd = spaceAdd   line.documentID   "\n";
                            utilText = utilText   spaceAdd;
                        });
                    });
                });
                try {

                    // listen for all archive data to be written
                    // 'close' event is fired only when a file descriptor is involved
                    output.on('close', function() {
                        console.log(archive.pointer()   ' total bytes');
                        console.log('archiver has been finalized and the output file descriptor has closed.');
                        console.log("Util Text: "   utilText);

                        bucket.upload(tempFilePath, {
                            make_public: true,
                            gzip: true,
                            destination: tempFilePath,

                            metadata: metadata,
                        });

                        //Let's get the signed URL
                        const file = admin.storage().bucket().file(tempFilePath);
                        console.log('File: '   file);
                        var expiryDate = new Date();
                        expiryDate.setDate(expiryDate.getDate()   1);
                        file.getSignedUrl({
                            action: 'read',
                            expires: expiryDate
                        }).then(urls => {
                            const signedUrl = urls[0];
                            console.log('Send: '   signedUrl);
                            res.redirect(signedUrl);
                            res.end();
                        });
                    });

                    output.on('end', function() {
                        console.log('Data has been drained');
                    });

                    // good practice to catch warnings (ie stat failures and other non-blocking errors)
                    archive.on('warning', function(err) {
                        if (err.code === 'ENOENT') {
                            // log warning
                        } else {
                            // throw error
                            throw err;
                        }
                    });

                    // good practice to catch this error explicitly
                    archive.on('error', function(err) {
                        throw err;
                    });
                    const metadata = {
                        contentType: "application/zip",
                    };
                    archive.pipe(output);
                    // append a file from string
                    archive.append(utilText, {
                        name: tempFilePath
                    });
                    archive.finalize();
                    console.log("tempFilePath: "   tempFilePath);

                    console.log("Export Util CSV finished");
                } catch (err) {
                    console.log(err);
                }
            })
            .catch(err => console.log(err));
    } catch (error) {
        console.log("Export error:"   error);

    };
});

CodePudding user response:

You could enclose it on an async function and use await. If you want to read the files in sequence, you cannot use forEach. Just use a modern for … of loop instead, in which await will work as expected:

const utilisation = async () => {
    let utilisationRef = db.collection('utilisation')
    .where("timestamp", ">=", fromD)
    .where("timestamp", "<=", toD)
    let allUtilisations = await utilisationRef.get();

    // ... some code

    for(const doc of allUtilisations.docs){
        console.log(doc.data());
        // ... some code
    }
}

utilisation();

or you can even use .map:

const utilisation = async () => {
    let utilisationRef = db.collection('utilisation')
    .where("timestamp", ">=", fromD)
    .where("timestamp", "<=", toD)
    let allUtilisations = await utilisationRef.get();
    const documents = allUtilisations.docs.map((doc) => ({ id: doc.id, ...doc.data() }));
    // ... some code

    for(const doc of documents){
        console.log(doc);
        // ... some code
    }
}

utilisation();

From the above code, you can now refactor your code and even for your second loop.

CodePudding user response:

Thanks to all those with advice. I have refined this code and the following works now as expected:

async function utilCSV(req, res) {
          
            const tempFilePath = path.join(os.tmpdir(), "Util_"   req.query.date   ".csv");

            const minDate = new Date(req.query.date   " 00:00:00");
            const maxDate = new Date(req.query.date   " 23:59:59");

            const minTimestamp = admin.firestore.Timestamp.fromDate(minDate)
            const maxTimestamp = admin.firestore.Timestamp.fromDate(maxDate)

            const city = req.query.city;

            var output = fs.createWriteStream(tempFilePath);
            var archive = archiver('zip', {
                gzip: true,
                zlib: {
                    level: 9
                }
            });
            const snapshot = await firestore.collection("utilisation")
            .where("timestamp", ">", minTimestamp)
            .where("timestamp", "<", maxTimestamp)
            .get();
            
            var utilText = "App Version,Author,Building,City,Country,Date,Floor ID,Floor Name, Floor Region,Bookable,Business Unit,Capacity,Fixed,Furniture Height,Occupancy,Position X,Position Y,Restricted,Status Date,Status Type,Unique Ref,Workspace Type,Report ID\n";
            let utils = snapshot.docs;
            console.log("util info: "   utils);
            
            for (const doc of utils) {
                var line = doc.data();
                var utilLine = "";
                utilLine = utilLine   line.appVersion   ",";
                utilLine = utilLine   line.authorEmail   ",";
                utilLine = utilLine   line.building   ",";
                utilLine = utilLine   line.city   ",";
                utilLine = utilLine   line.country   ",";
                utilLine = utilLine   line.timestamp.toDate()   ",";
                utilLine = utilLine   line.floorId   ",";
                utilLine = utilLine   line.floorName   ",";
                utilLine = utilLine   line.region   ",";

                let spacesRef = firestore.collection('utilisation')
                    .doc(line.documentID)
                    .collection('workspaces');
                const snap = await spacesRef.get();
                
                let workspaces = snap.docs;
                for (const space of workspaces) {
                   
                    var lineSpace = space.data();
                    var spaceAdd = utilLine;
                    spaceAdd = spaceAdd   lineSpace.bookable   ",";
                    spaceAdd = spaceAdd   lineSpace.businessUnit   ",";
                    spaceAdd = spaceAdd   lineSpace.capacity   ",";
                    spaceAdd = spaceAdd   lineSpace.fixed   ",";
                    spaceAdd = spaceAdd   lineSpace.height   ",";
                    spaceAdd = spaceAdd   lineSpace.occupancy   ",";
                    spaceAdd = spaceAdd   lineSpace.positionX   ",";
                    spaceAdd = spaceAdd   lineSpace.positionY   ",";
                    spaceAdd = spaceAdd   lineSpace.restricted   ",";
                    spaceAdd = spaceAdd   lineSpace.statusDate.toDate()   ",";
                    spaceAdd = spaceAdd   lineSpace.statusType   ",";
                    spaceAdd = spaceAdd   lineSpace.uniqueRef   ",";
                    spaceAdd = spaceAdd   lineSpace.workspaceType   ",";
                    spaceAdd = spaceAdd   line.documentID   "\n";
                    utilText = utilText   spaceAdd;
                };
            };
            console.log("Complete string: "   utilText);
            try {

                // listen for all archive data to be written
                // 'close' event is fired only when a file descriptor is involved
                output.on('close', function () {
                    console.log(archive.pointer()   ' total bytes');
                    console.log('archiver has been finalized and the output file descriptor has closed.');
                    console.log("Util Text: "   utilText);

                    bucket.upload(tempFilePath, {
                        make_public: true,
                        gzip: true,
                        destination: tempFilePath,

                        metadata: metadata,
                    });

                    //Let's get the signed URL
                    const file = admin.storage().bucket().file(tempFilePath);
                    console.log('File: '   file);
                    var expiryDate = new Date();
                    expiryDate.setDate(expiryDate.getDate()   1);
                    file.getSignedUrl({
                        action: 'read',
                        expires: expiryDate
                    }).then(urls => {
                        const signedUrl = urls[0];
                        console.log('Send: '   signedUrl);
                        res.redirect(signedUrl);
                        res.end();
                    });
                });

                output.on('end', function () {
                    console.log('Data has been drained');
                });

                // good practice to catch warnings (ie stat failures and other non-blocking errors)
                archive.on('warning', function (err) {
                    if (err.code === 'ENOENT') {
                        // log warning
                    } else {
                        // throw error
                        throw err;
                    }
                });

                // good practice to catch this error explicitly
                archive.on('error', function (err) {
                    throw err;
                });
                const metadata = {
                    contentType: "application/zip",
                };
                archive.pipe(output);
                // append a file from string
                archive.append(utilText, {
                    name: tempFilePath
                });
                archive.finalize();
                console.log("tempFilePath: "   tempFilePath);

                console.log("Export Util CSV finished");
            } catch (err) {
                console.log(err);
            }
        }

  • Related