I have a list from the request body that I want to parse all links in request async.
When all tasks are finished, I then want to return a response, but my response is always empty because none of the async tasks are finishing.
var index = 0;
let items = [];
exports.feed = async (req, res) => {
(async () => {
await getSiteData(req.body.sites)
})();
if(typeof items !== 'undefined' && items.length > 0){
res.status(200).json(items);
} else {
res.status(404).json({ error: "Something went wrong", success: false });
}
}
async function getSiteData(list){
try{
if(index == list.length-1){
return items;
}
await parser.parseURL(list[index], async function(err, feed){
if(err != null){
console.log(err);
return false;
}
items.push(feed.items)
index
await getSiteData(list);
});
} catch(err){
console.log(err);
return false;
}
}
CodePudding user response:
Firstly, there's no recursion required. It (almost?) never makes sense to recursively process a flat list (Array)
Your main issue is that parser.parseURL
does NOT return a Promise, therefore await
ing it makes no sense, since await
only waits for Promises to settle
So, let's fix that by creating a "promisified" parser.parseURL
, which is easy with nodes util.promisify
const { promisify } = require('util');
const parseURL = promisify(parser.parseURL);
now you can await parseURL(url)
if you need to parse.parseURL
in series, i.e. one at a time
async function getSiteData(list) {
const result = [];
for (const url of list) {
const { items } = await parseURL(url);
result.push(items);
}
return result;
}
to parse.parseURL
in parallel
function getSiteData(list) {
return Promise.all(list.map(parseURL));
}
Some people think that this should be
async function getSiteData(list) {
return await Promise.all(list.map(parseURL));
}
I'm not one of those people. Their argument is,
getSiteData
returns a Promise, so should be marked as such, some malarkey about IDE hints or some such garbage. While the code is equivalent (not identical of course), it really isn't necessary in my opinion.
Note: now getSiteData
(in either series or parallel form) returns a Promise that will resolve to an array of results, and, further more, neither version does any error handling. This is deliberate, so the function that calls getSiteData
determine what to do with errors.
The other issus is in your feed
export
(async () => {
await getSiteData(req.body.sites)
})();
//
// the code down here runs without waiting for getSiteData to complete
this runs that asynchronous IIFE, but the code afterwards does not wait for it to complete
You could
await (async () => {
await getSiteData(req.body.sites)
})();
But then, that's just
await getSiteData(req.body.sites);
Without the IIFE
Since the getSiteData
now returns the list (rather than using a "global" or external array to hold the list), the code is now:
exports.feed = async (req, res) => {
try {
const items = await getSiteData(req.body.sites);
res.status(200).json(items);
} catch (err) {
res.status(404).json({error: "Something went wrong", success: false});
}
};
CodePudding user response:
I fixed my issue with using for loop and promise.
exports.feed = async (req,res) => {
var items = [];
for(const elem of req.body.sites){
const newElement = await getSiteData(elem)
items.push(newElement)
}
if(typeof items !== 'undefined' && items.length > 0){
res.status(200).json(items);
}else{
res.status(404).json({ error: "Something went wrong", success: false });
}
}
function getSiteData(url) {
return new Promise((resolve) => {
setTimeout(()=>{
parser.parseURL(url, function(err, feed) {
if(err != null){
return false;
}
resolve(feed.items)
})
});
});
}