I have a Cloud Function written in Node JS that accesses data from BigQuery, converts this to CSV and exports to a Google Storage bucket.
Currently it executes and returns a 200, but does not run any of the code within my try/catch.
When testing it just returns:
Function execution took x ms. Finished with status code: 200
I've attempted to debug by adding console logs at various points, but it doesn't log anything - it just returns a 200.
exports.run_checks = (req, res) => {
"use strict";
let parsedBody = req.body;
let startCount = parsedBody.start;
let endCount = parsedBody.end;
(async function () {
try {
for (let i = startCount; i < endCount; i = 1) {
//Exclude overly large files here
if (i != 100) {
const query =
`SELECT *
FROM \`bq_dataset\`
WHERE id_number = ${i}`;
const options = {
query: query,
location: "europe-west2",
};
const [job] = await bigquery.createQueryJob(options);
console.log(`Job ${job.id} started.`);
const [rows] = await job.getQueryResults();
let id = rows[0].id;
const createFile = storage.bucket(bucketName).file(`${id}.csv`);
const csv = parse(rows, { fields });
const dataStream = new stream.PassThrough();
dataStream.push(csv);
dataStream.push(null);
await new Promise((resolve, reject) => {
console.log(`Writing ${id} to GCS`);
dataStream
.pipe(
createFile.createWriteStream({
resumable: false,
validation: false,
metadata: { "Cache-Control": "public, max-age=31536000" },
})
)
.on("error", (error) => {
console.error("Stream failed", error);
reject(error);
})
.on("finish", () => {
resolve(true);
});
});
}
}
res.status(200).send();
} catch (err) {
res.send(err);
}
})();
};
CodePudding user response:
Your function is not async. The host has no idea that you are still doing something in your function, it returns without any error.
Modify your arrow function to async, and no need for IIFE, remove it, or await it, that is also important!
exports.run_checks = async (req, res) => {
"use strict";
let parsedBody = req.body;
let startCount = parsedBody.start;
let endCount = parsedBody.end;
try {
for (let i = startCount; i < endCount; i = 1) {
//Exclude overly large files here
if (i != 100) {
const query =
`SELECT *
FROM \`bq_dataset\`
WHERE id_number = ${i}`;
const options = {
query: query,
location: "europe-west2",
};
const [job] = await bigquery.createQueryJob(options);
console.log(`Job ${job.id} started.`);
const [rows] = await job.getQueryResults();
let id = rows[0].id;
const createFile = storage.bucket(bucketName).file(`${id}.csv`);
const csv = parse(rows, { fields });
const dataStream = new stream.PassThrough();
dataStream.push(csv);
dataStream.push(null);
await new Promise((resolve, reject) => {
console.log(`Writing ${id} to GCS`);
dataStream
.pipe(
createFile.createWriteStream({
resumable: false,
validation: false,
metadata: { "Cache-Control": "public, max-age=31536000" },
})
)
.on("error", (error) => {
console.error("Stream failed", error);
reject(error);
})
.on("finish", () => {
resolve(true);
});
});
}
}
res.status(200).send();
} catch (err) {
res.send(err);
}
};