I'm using node-webkit, and am trying to have a user select a folder, and I'll return the directory structure of that folder and recursively get its children.
I've got this working fairly simply with this code (in an Angular Controller).
var fs = require('fs'); $scope.explorer=[]; $scope.openFile = function(){ $scope.explorer = [tree_entry($scope.path)]; get_folder($scope.path, $scope.explorer[0].children); }; function get_folder(path, tree){ fs.readdir(path, function(err,files){ if (err) return console.log(err); files.forEach( function (file,idx){ tree.push(tree_entry(file)); fs.lstat(path+'/'+file,function(err,stats){ if(err) return console.log(err); if(stats.isDirectory()){ get_folder(path+'/'+file,tree[idx].children); } }); }); }); console.log($scope.explorer); return; } function tree_entry(entry){ return { label : entry, children: []} }
Taking a moderate sized folder with 22 child folders and about 4 levels deep, it is taking a few minutes to get the entire directory structure.
Is there something that I'm obviously doing wrong here? I can't believe it takes that long, seeing as I'm using the built in Node fs methods. Or is there a way to get the entire contents of a directory without touching each and every file?
I'm going to want to be able to use an Angular filter on the file names all the way down the tree, and possibly on the contents too, so delaying processing the entire tree isn't likely a solution that would work.
To get a list of the names of all files present in a directory in Node. js, we can call the readdir method. const testFolder = './folder/path'; const fs = require('fs'); fs. readdir(testFolder, (err, files) => { files.
In my project I use this function for getting huge amount of files. It's pretty fast (put require("fs")
out to make it even faster):
var _getAllFilesFromFolder = function(dir) { var filesystem = require("fs"); var results = []; filesystem.readdirSync(dir).forEach(function(file) { file = dir+'/'+file; var stat = filesystem.statSync(file); if (stat && stat.isDirectory()) { results = results.concat(_getAllFilesFromFolder(file)) } else results.push(file); }); return results; };
usage is clear:
_getAllFilesFromFolder(__dirname + "folder");
fs/promises and fs.Dirent
Here's an efficient, non-blocking ls
program using Node's fast fs.Dirent objects and fs/promises module. This approach allows you to skip wasteful fs.exist
or fs.stat
calls on every path -
// main.js import { readdir } from "fs/promises" import { join } from "path" async function* ls (path = ".") { yield path for (const dirent of await readdir(path, { withFileTypes: true })) if (dirent.isDirectory()) yield* ls(join(path, dirent.name)) else yield join(path, dirent.name) } async function* empty () {} async function toArray (iter = empty()) { let r = [] for await (const x of iter) r.push(x) return r } toArray(ls(".")).then(console.log, console.error)
Let's get some sample files so we can see ls
working -
$ yarn add immutable # (just some example package) $ node main.js
[ '.', 'main.js', 'node_modules', 'node_modules/.yarn-integrity', 'node_modules/immutable', 'node_modules/immutable/LICENSE', 'node_modules/immutable/README.md', 'node_modules/immutable/contrib', 'node_modules/immutable/contrib/cursor', 'node_modules/immutable/contrib/cursor/README.md', 'node_modules/immutable/contrib/cursor/__tests__', 'node_modules/immutable/contrib/cursor/__tests__/Cursor.ts.skip', 'node_modules/immutable/contrib/cursor/index.d.ts', 'node_modules/immutable/contrib/cursor/index.js', 'node_modules/immutable/dist', 'node_modules/immutable/dist/immutable-nonambient.d.ts', 'node_modules/immutable/dist/immutable.d.ts', 'node_modules/immutable/dist/immutable.es.js', 'node_modules/immutable/dist/immutable.js', 'node_modules/immutable/dist/immutable.js.flow', 'node_modules/immutable/dist/immutable.min.js', 'node_modules/immutable/package.json', 'package.json', 'yarn.lock' ]
For added explanation and other ways to leverage async generators, see this Q&A.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With