Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

NodeJS concatenate all files in a directory

Is there a faster or more succinct way to concatenate all of the files located in a directory using NodeJS?

In bash I could do something like this:

for file in $1
do
  cat "$file"
  echo
done > $2;

Here is what I'm doing now:

var fs = require('fs');
var Promise = require('bluebird');

module.exports = function(directory, destination) {
    return new Promise((resolve, reject) => {
        fs.readdir(directory, (err, files) => {
            if (err) {
                return reject(err);
            }

            (function next() {
                var file = files.shift();
                if (!file) {
                    return resolve();
                }

                fs.readFile(directory + '/' + file, (err, content) => {
                    if (err) {
                        return reject(err);
                    }

                    fs.appendFile(destination, '\n' + content, (err) => {
                        if (err) {
                            return reject(err);
                        }

                        return next();
                    });
                });
            })();
        });
    });
};
like image 591
d-_-b Avatar asked May 02 '17 00:05

d-_-b


3 Answers

That ?

require('child_process').execSync('cat *').toString('UTF-8')

:D

like image 160
JLM Avatar answered Oct 16 '22 14:10

JLM


If you're going to use bluebird than you get the benefit of promisification. You can use promisifyAll() to convert all error first callback accepting async functions in the fs module to return a promise. You can read more about in the above promisification link.

The below code reads in all of the files as strings and then reduces all of their contents into a single string and writes that string to the destination.

Its probably best to not catch() any returned errors here. Rather, the caller should attach a catch() to handle any returned errors as they need.

const Promise = require('bluebird')
const fs = Promise.promisifyAll(require('fs'))
const path = require('path')

module.exports = (directory, destination) => {
  return fs.readdirAsync(directory)
    .map(file => fs.readFileAsync(path.join(directory, file), 'utf8'))
    .then(contents => fs.writeFileAsync(destination, contents.join('\n')))
}
like image 40
peteb Avatar answered Oct 16 '22 13:10

peteb


Using the async library you can easily read files in parallel and then join the results.

const fs = require("fs");
const async = require("async");
const path = require("path");

module.exports = function(directory, destination) {

  return new Promise((resolve, reject) => {

    fs.readdir(directory, (err, files) => {
        if (err)
            return reject(err);

        files = files.map(file => path.join(directory,file));

        //Read all files in parallel
        async.map(files, fs.readFile, (err, results) => {
            if (err)
                return reject(err);

           //results[0] contents of file #1
           //results[1] contents of file #2
           //results[n] ...

            //Write the joined results to destination
            fs.writeFile(destination, results.join("\n"), (err) => {
                if (err)
                    return reject(err);

                resolve();
            });
        });

    });
  });
}
like image 4
Marcos Casagrande Avatar answered Oct 16 '22 13:10

Marcos Casagrande