I'm processing csv
file data coming from request to create json object using multer
,csv-parser
. but can't process from long time. please help and thanks.following is example of csv file processing.
const multer = require('multer');
const fs = require('fs')
const csv = require('csv-parser')
const fileStorageEngine = multer.memoryStorage({
destination: (req, file, cb) => {
cb(null, './csv');
}
,
filename: (req, file, cb) => {
cb(null, file.originalname);
},
});
const upload = multer({storage:fileStorageEngine});
app.post('/uploadcsv',upload.single("upfile"),async(req,res)=>{
const file = req.file;
fs.createReadStream(`./csv/${file.originalname}`)
.pipe(csv())
.on("data",(data)=>console.log(data));
res.send("file uploaded")
})
here I'm using fs
module that works for me but I don't want to store data in file instead I want to process buffer data coming from req.file.buffer
in chunk
I'm stuck here please help. because storing file and reading same file getting process slow because csv file have thousands of data.
CodePudding user response:
The idea is to create a readable stream from the buffer instead of writing it to the file as follows:
const multer = require('multer');
const { Readable } = require('stream');
const fs = require('fs')
const csv = require('csv-parser')
const fileStorageEngine = multer.memoryStorage({
destination: (req, file, cb) => {
cb(null, './csv');
}
,
filename: (req, file, cb) => {
cb(null, file.originalname);
},
});
const upload = multer({storage:fileStorageEngine});
app.post('/uploadcsv',upload.single("upfile"),async(req,res)=>{
const file = req.file;
const stream = Readable.from(file.buffer);
stream.pipe(csv()).on("data",(data)=>console.log(data));
res.send("file uploaded")
})