在Node.js同步中移动10万多个文件的异步性能和速度[已关闭]。

问题描述 投票:5回答:1

我想找出一个最快的方法,在不影响性能的情况下,用Node.js同步或异步移动100.000多个文件。

我用3种不同的循环方式做了同步和异步测试 forEach async.eachfor. 循环迭代对时序结果影响不大。而影响性能和速度最重的逻辑是同步或异步移动文件。

测试案例显示,同步 renameSync() 在20%的速度下,异步的速度要慢一些。rename(). 同时,异步 rename() 使用3倍以上的CPU功率,然后同步处理。renameSync().


我是否可以认为,使用同步式的 "大数据 "是比较明智的。renameSync() 因为在异步中提速 rename() 并不那么显著,相当于20%。而async版本的CPU使用开销似乎很大--300%?

还是我漏掉了什么?也许在速度性能方面有更好的变通办法。

计时。

eachAsyncRenameAsync()

node rename.js  1.60s user 11.20s system 280% cpu 4.559 total
node rename.js  1.65s user 11.82s system 284% cpu 4.732 total
node rename.js  1.64s user 11.84s system 292% cpu 4.606 total


eachSyncRenameSync()

node rename.js  0.69s user 5.01s system 97% cpu 5.851 total
node rename.js  0.67s user 4.88s system 97% cpu 5.687 total
node rename.js  0.68s user 5.01s system 99% cpu 5.734 total


eachAsyncRenameSync()

node rename.js  0.67s user 4.99s system 97% cpu 5.797 total
node rename.js  0.68s user 4.95s system 97% cpu 5.754 total
node rename.js  0.66s user 4.89s system 97% cpu 5.690 total


eachSyncRenameAsync()

node rename.js  1.63s user 11.12s system 274% cpu 4.638 total
node rename.js  1.66s user 12.29s system 286% cpu 4.874 total
node rename.js  1.66s user 12.23s system 289% cpu 4.795 total


forSyncRenameAsync()

node rename.js  1.72s user 12.04s system 283% cpu 4.862 total
node rename.js  1.69s user 11.88s system 276% cpu 4.904 total
node rename.js  1.64s user 11.89s system 287% cpu 4.712 total


forSyncRenameSync()

node rename.js  0.64s user 4.94s system 97% cpu 5.715 total
node rename.js  0.66s user 5.01s system 97% cpu 5.807 total
node rename.js  0.65s user 4.93s system 99% cpu 5.616 total

代码

const fs = require('fs')
const path = require('path')
const { each } = require('async')

let dir = '/opt/bin/test/Random 100000'
let dir2 = '/opt/bin/test/Random 100000 2'

function eachAsyncRenameAsync(files) {
        each(files, file => {
              fs.rename(path.join(dir,file), path.join(dir2,file), err => {
                if(err) { console.log(err) }
              })
        })
}

function eachSyncRenameSync(files) {
    files.forEach(file => {
            fs.renameSync(path.join(dir,file), path.join(dir2,file))
    })
}

function eachAsyncRenameSync(files) {
    each(files, file => {
            fs.renameSync(path.join(dir,file), path.join(dir2,file))
    })
}

function eachSyncRenameAsync(files) {
    files.forEach(file => {
        fs.rename(path.join(dir,file), path.join(dir2,file), err => {
                if(err) { console.log(err) }
        })
    })
}

function forSyncRenameAsync(files) {
    for (i=0; i<files.length; i++) {
        fs.rename(path.join(dir, files[i]), path.join(dir2, files[i]), err => {
                if(err) { console.log(err) }
        })
    }

}

function forSyncRenameSync(files) {
    for (i=0; i<files.length; i++) {
        fs.renameSync(path.join(dir, files[i]), path.join(dir2, files[i]))
    }
}

// Reading dir asynchronously and moving files

fs.readdir(dir, (err, files) => {
    if (err) { console.log(err) }
    else {
        console.log('eachAsyncRenameAsync()')
        eachAsyncRenameAsync(files)
    }
})
javascript node.js performance asynchronous node-async
1个回答
-2
投票

对于异步执行的比较优化,做一个池子来限制并发操作的数量会让你找到最佳结果。

你做的基准显示,最佳结果是在1到n个异步并发操作之间。

© www.soinside.com 2019 - 2024. All rights reserved.