I want to use HTML multiple file input and save to CLoudant. When I do this the code completes before the files are processed to be ready to send. Also, I am finding every file that is processed looks the same in base64.
I have tried a few methods and have been studying promises. Still have not been able to get the order of operations to work.
In the code below:
HTML uses multiple file input to "fileName" The console.log "doc1" and "doc2" runs before console.log "blob"
function sendFile() {
var doc = {};
doc._attachments = {};
doc._id = "testing::" new Date().getTime();
doc.type = "testing attachment";
var blob = null;
var url = fileName;
url._rawValue.forEach(function (item) {
console.log("item: ", item);
fetch(item)
.then((r) => r.blob())
.then((b) => {
blob = b;
console.log("blob: ", blob);
return getBase64(blob);
})
.then((blob2) => {
console.log(blob2);
let name = item.name;
doc._attachments[name] = {};
doc._attachments[name].content_type = item.type;
doc._attachments[name].data = blob2.split(",")[1];
});
console.log("doc1: ", doc);
});
let doc2 = doc;
console.log("doc2: ", doc2);
api({
method: "POST",
url: "/webdata",
data: doc,
})
.then((response) => {
console.log("result: ", response);
alert("Test has been submitted!");
})
.catch((e) => {
console.log("e: ", e);
alert(e);
});
console.log("finished send test");
}
CodePudding user response:
There are a couple problems. First, .forEach()
is not promise aware so it will just run to completion without waiting for any of your asynchronous operations to complete. Second, you have multiple asynchronous operations that are not chained together. So, even a single iteration of your loop is not running sequentially.
The simplest thing here would be to use async/await so you can sequence your asynchronous operations:
async function sendFile() {
try {
const url = fileName;
const doc = {};
doc._attachments = {};
doc._id = "testing::" new Date().getTime();
doc.type = "testing attachment";
for (let item of url._rawValue) {
console.log("item: ", item);
const r = await fetch(item);
const blob = await r.blob();
console.log("blob: ", blob);
const blob2 = getBase64(blob);
console.log(blob2);
let name = item.name;
doc._attachments[name] = {};
doc._attachments[name].content_type = item.type;
doc._attachments[name].data = blob2.split(",")[1];
console.log("doc1: ", doc);
}
const response = await api({
method: "POST",
url: "/webdata",
data: doc,
});
console.log("result: ", response);
} catch (e) {
console.log(e);
throw e; // throw error so caller can see the error
}
console.log("finished send test");
}
You could also run them in parallel like this since none of the items in the loop seem to depend upon previous items. The one danger here is if url._rawValue
is a large array, you might overwhelm the target server with too many requests at once. That obviously depends upon your data size and the target server. Running them serially (code block above) is a safer option if you don't know the limits of the target server or the array could be large:
async function sendFile() {
try {
const url = fileName;
const doc = {};
doc._attachments = {};
doc._id = "testing::" new Date().getTime();
doc.type = "testing attachment";
await Promise.all(url._rawValue.map(async item => {
console.log("item: ", item);
const r = await fetch(item);
const blob = await r.blob();
console.log("blob: ", blob);
const blob2 = getBase64(blob);
console.log(blob2);
let name = item.name;
doc._attachments[name] = {};
doc._attachments[name].content_type = item.type;
doc._attachments[name].data = blob.split(",")[1];
console.log("doc1: ", doc);
}));
const response = await api({
method: "POST",
url: "/webdata",
data: doc,
});
console.log("result: ", response);
} catch (e) {
console.log(e);
throw e; // throw error so caller can see the error
}
console.log("finished send test");
}
Notes/Issues:
- Your original code assigned
doc2
as the same asdoc
, but didn't use it also so I removed it.