I tried to read a file line by line, and output it to another file, using Node.js.
My problem is the sequence of lines sometimes messed up due to async nature of Node.js.
eg my input file is like: line 1 line 2 line 3
but output file could be like: line 1 line 3 line 2
Below is my code.
var fs = require("fs");
var index = 1;
fs.readFileSync('./input.txt').toString().split('\n').forEach(
function (line) {
console.log(line);
fs.open("./output.txt", 'a', 0666, function(err, fd) {
fs.writeSync(fd, line.toString() + "\n", null, undefined, function(err, written) {
})});
}
);
Any thoughts would be appreciated, thanks.
Method 1: Using the Readline Module: Readline is a native module of Node. js, it was developed specifically for reading the content line by line from any readable stream. It can be used to read data from the command line. const readline = require('readline');
var fs = require('fs'); var readline = require('readline'); var stream = require('stream'); var instream = fs. createReadStream('your/file'); var outstream = new stream; var rl = readline. createInterface(instream, outstream); rl. on('line', function(line) { // process line here }); rl.
You can use the fs module to read and write files in Node. js. The fs module is a built-in module in Node. js that provides both asynchronous and synchronous functions to read, write, and watch files.
In all current versions of Node. js, readline. createInterface can be used as an async iterable, to read a file line by line - or just for the first line.
If you're writing a synchronous code, use only the synchronous functions:
var fs = require("fs"); fs.readFileSync('./input.txt').toString().split('\n').forEach(function (line) { console.log(line); fs.appendFileSync("./output.txt", line.toString() + "\n"); });
For asynchronous approach you could write something like
var fs = require('fs'), async = require('async'), carrier = require('carrier'); async.parallel({ input: fs.openFile.bind(null, './input.txt', 'r'), output: fs.openFile.bind(null, './output.txt', 'a') }, function (err, result) { if (err) { console.log("An error occured: " + err); return; } carrier.carry(result.input) .on('line', result.output.write) .on('end', function () { result.output.end(); console.log("Done"); }); });
I suppose you want to perform some calculations and/or transformations on every line. If not, simple copy is one-liner (take a look at createReadStream documentation)
fs.createReadStream('./input.txt').pipe(fs.createWriteStream('./output.txt'));
Now, you are trying to open file each time you want to write line, and yes, order is unpredictable here. More correct version of your code:
var lines = fs.readFileSync('./input.txt').toString().split('\n') function writeLineFromArray(lines) { var line = arr.shift(); fs.open("./output.txt", 'a', 0666, function(err, fd) { fs.writeSync(fd, line + '\n', null, undefined, function(err, written) { writeLineFromArray(lines); }); }); } writeLinesFromArray();
I'd probably use one of 'given input stream, notify me on each line' modules, for example node-lazy or byline:
var fs = require('fs'), byline = require('byline'); var stream = byline(fs.createReadStream('sample.txt')); stream.on('line', function(line) { // do stuff with line }); stream.pipe(fs.createWriteStream('./output');
Why do not you use node.js plugin for the same: https://github.com/pvorb/node-read-files
Installation: npm install read-files
Usage:
var fs = require("fs");
require("read-files");
fs.readFiles([ "file1", "file2" ], "utf8", function(err, data) {
if (err) throw err;
console.log(data);
});
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With