I tried to encode things from string to BigUint64 and write them to a file for further use. BigInt cannot be stringified using JSON, so I directly write as BigUint64Array.
However when I would like to read the file and recover BigUint64Array, I received a buffer and problem is that I don't know how to convert the buffer to BigUint64Array
const { writeFileSync, readFileSync } = require('node:fs')
const st = new Set([1n, 2n, 3n, 1n, 1n, 1n])
const A = BigUint64Array.from(st)
console.log(A)
writeFileSync('./abc', A)
let x = readFileSync('./abc')
x = BigUint64Array.from(x)
console.log(x)
when I try to run this code, I receive
node .\test.js
BigUint64Array(3) [ 1n, 2n, 3n ]
R:\mahjong\FYP22070\test.js:9
x = BigUint64Array.from(x)
^
TypeError: Cannot convert 1 to a BigInt
at Function.from (<anonymous>)
at Object.<anonymous> (R:\mahjong\FYP22070\test.js:9:20)
at Module._compile (node:internal/modules/cjs/loader:1159:14)
at Module._extensions..js (node:internal/modules/cjs/loader:1213:10)
at Module.load (node:internal/modules/cjs/loader:1037:32)
at Module._load (node:internal/modules/cjs/loader:878:12)
at Function.executeUserEntryPoint [as runMain] (node:internal/modules/run_main:81:12)
at node:internal/main/run_main_module:23:47
CodePudding user response:
You can convert your BigInts to string before providing them to JSON.stringify
something like
const st = new Set([1n, 2n, 3n, 1n, 1n, 1n])
const converted = Array.from(st.values()).map(current => current.toString());
writeFileSync('./abc', JSON.stringify(A));
And to convert in the other direction you can just do BigInt(yourString)
to convert them back to BigInts
CodePudding user response:
writeFileSync
allows you to specify any typed array and writes out its buffer (the bytes of the array) to the file, but what you get back from readFileSync
is a Buffer
, and as far as I can tell, there's no direct way to convert that buffer's bytes into a BigUint64Array
. But there's a very nearly direct route: Through Uint8Array
, and using that array's buffer to build the BigUint64Array
(so we don't make an extra copy of the data):
function readBigUint64FromFileSync(fileName) {
const bytes = Uint8Array.from(readFileSync(fileName));
return new BigUint64Array(bytes.buffer);
}
That doesn't make an extra copy for the conversion from Uint8Array
to BigUint64Array
, but it does make a copy (the Buffer
=> Uint8Array
). We can avoid that by seeing how large the file is, allocating a Uint8Array
of that size, and reading the file into it:
function readBigUint64FromFileSync(fileName) {
const { size } = statSync(fileName);
const bytes = new Uint8Array(size);
const handle = openSync(fileName);
try {
readSync(handle, bytes);
return new BigUint64Array(bytes.buffer);
} finally {
closeSync(handle);
}
}
If you're doing this in an environment where the file may be actively written by another process, you'll want to implement some kind of guard around that so that we don't see the size of one version of the file but the data of another version of the file. But if not, the above is sufficient.
Complete example:
const { writeFileSync, openSync, statSync, readSync, closeSync } = require('node:fs');
function writeBigUint64ToFileSync(array, fileName) {
writeFileSync(fileName, array);
}
function readBigUint64FromFileSync(fileName) {
const { size } = statSync(fileName);
const bytes = new Uint8Array(size);
const handle = openSync(fileName);
try {
readSync(handle, bytes);
return new BigUint64Array(bytes.buffer);
} finally {
closeSync(handle);
}
}
const st = new Set([1n, 2n, 3n, 1n, 1n, 1n]);
const original = BigUint64Array.from(st);
writeBigUint64ToFileSync(original, "./abc");
console.log(original);
const fromFile = readBigUint64FromFileSync("./abc");
console.log(fromFile);
console.log(`Same? ${original.length === fromFile.length && original.every((n, i) => fromFile[i] === n)}`);
(Or swap in the simpler version of readBigUint64FromFileSync
if making one copy of the data is okay.)