I have some huge files which are difficult to read in memory. I need to read each line and then replace double quotes if found and edit the same file. Right now, I am reading the file line by line, storing in an array and overwriting the same file. But, that’s giving memory issue for big files. Any pointers ? Here is my present implementation :
var allData = fs.readFileSync(fileName, { encoding: 'utf8' }).toString().split("n"); var finalString = ""; for (i in allData) { allData[i] = allData[i].replace(/"/g, '""'); finalString = finalString.concat(allData[i]); finalString = finalString.concat("n"); } fs.writeFileSync(fileName, finalString);
Is there a way to edit by reading one line at a time and changing that in the file?
I have seen the similar question with scramjet, but that gives an error and is not compatible with all nodejs versions : node.js modify file data stream?
Advertisement
Answer
After going through a lot of answers, this worked for me which took care of the required synchronous and asynchronous behaviour, large file and keeping the name same.
function format_file(fileName) { return new Promise((resolve, reject) => { if (fs.existsSync(fileName)) { var fields = fileName.split('/'); var tempFile = ""; var arrayLength = fields.length; for (var i = 0; i < arrayLength - 1; i++) { tempFile = tempFile + fields[i] + "/"; } tempFile = tempFile + "tempFile" + fields[arrayLength - 1]; console.log("tempfile name is : " + tempFile + " actualFileName is :" + fileName); var processStream = new ProcessStream(); fs.createReadStream(fileName, { bufferSize: 128 * 4096 }) .pipe(processStream) .pipe(fs.createWriteStream(tempFile)).on('finish', function() { // finished fs.renameSync(tempFile, fileName); console.log('done encrypting'); resolve('done'); }); } else { reject('path not found') } }); }