P粉6961462052023-09-05 18:06:04
Unfortunately, Async
is slower. So we need to optimize your code. You can do this using the {withFileTypes:true}
option, which is 2x faster.
I also tried the {recursive:true}
option for node v20, but it was even slower than your solution. It does not work with withFileTypes
.
Maybe a better SSD with high read speeds would help. While I'm guessing the file entries are read from the file system index, not sure how the hardware affects that.
import fs from 'fs'; const DIR = '/bytex'; function getFiles(dir, files = []) { // Get an array of all files and directories in the passed directory using fs.readdirSync const fileList = fs.readdirSync(dir); // Create the full path of the file/directory by concatenating the passed directory and file/directory name for (const file of fileList) { const name = `${dir}/${file}`; // Check if the current file/directory is a directory using fs.statSync if (fs.statSync(name).isDirectory()) { // If it is a directory, recursively call the getFiles function with the directory path and the files array getFiles(name, files); } else { // If it is a file, push the full path to the files array files.push(name); } } return files; } function getFiles2(dir, files = []) { const fileList = fs.readdirSync(dir, { withFileTypes: true }); fileList.forEach(file => file.isDirectory() ? getFiles2(`${dir}/${file.name}`, files) : files.push(`${dir}/${file.name}`)); return files; } let start = performance.now(); let files = getFiles(DIR); console.log(performance.now() - start); console.log(files.length); start = performance.now(); files = getFiles2(DIR); console.log(performance.now() - start); console.log(files.length);
Output:
171.66947209835052 64508 68.24071204662323 64508