I want to read the values of key
, access
, path
, bucket
and bucketPath
and use them in the JSON file test.json
.
I have a function that reads the content of configuration.js
and attempts to write to test.json
. Currently, I am able to write the values of bucket
.I get the changed/new values and lines of null
for the rest of the json.
I want to always return the new values and the other objects in the file. Also, in cases where bucket
already has a value, I want it replaced by whatever is read from configuration.json
How can I fix this, and how can i change the values for the rest access
, key
, path
and bucketpath
?
index.js
const fs = require("fs").promises;
async function readJSON(filePath, values) {
const data = await fs.readFile(filePath);
try {
return JSON.parse(data);
} catch (err) {
console.log(err);
}
}
(async() => {
const credentials = await readJSON("./configuration.json");
const path = credentials.path;
const bucket = credentials.bucket;
const access = credentials.access;
const key = credentials.key;
const bucketPath = credentials.bucketPath;
const data = await jsonReader("./test.json");
const finalJSON = data.data ? .map((x) => {
if (x.type == "s3 credentials") return { ...x, bucket };
});
await fs.writeFile(
"./test.json",
JSON.stringify({
data: finalJSON
})
);
})();
test.json
{
"label": "storage record",
"data": [{
"id": "8902uw",
"type": "config",
"values": {
"access": "$access",
"key": "$key"
}
},
{
"id": "893wh002jei",
"type": "s3 credentials",
"bucket": ""
},
{
"id": "90yueps",
"type": "upload",
"input": "localhost: `$path`"
},
{
"id": "9028901",
"type": "change",
"name": "Adjust data",
"measure": [{
"t": "setter"
},
{
"p": "filename",
"to": "$join([\"$bucketPath\", data])"
}
],
"fixed": ""
}
]
}
configuration.json
{
"key": "880082",
"access": "793082",
"path": "/store",
"bucket": "testBucket",
"bucketPath": "/record"
}
Currently, when I run this, I get:
{
null,
"data": [{
null,
null,
null,
null
{
"id": "893wh002jei",
"type": "s3 credentials",
"bucket": ""
},
{
null,
null,
null
]
}
CodePudding user response:
might this be a solution !
const fs = require('fs');
const fileName = './file.json';
const file = require(fileName);
file.key = "new value";
fs.writeFile(fileName, JSON.stringify(file), function writeJSON(err) {
if (err) return console.log(err);
console.log(JSON.stringify(file));
console.log('writing to ' fileName);
});
CodePudding user response:
[Updated answer]
From what you comment:
it's the same question. So when I run what I have, I get null for the other objects. I want test.json to remain the same just with updated values.
const testObj = await jsonReader("./test.json");
const finalJSON = {
...testObj,
data: testObj.data?.map((x) => {
if (x.type === 's3 credentials') {
return { ...x, bucket };
} else {
return x;
}
})
}
// which the code I gave above,
// the `finalJSON` now is the clone of the original from `test.json`
// plus updated values
await fs.writeFile(
"./test.json",
JSON.stringify(finalJSON)
);
[Original answer]
There is a problem with the function you pass to the map
function.
The condition if
without else
.
I think you need else { return x; }
to return original data if x.type
is not what you expected.