Node.js (libuv)kqueue():尝试在节点Js中写入文件时,系统中打开的文件过多

Node.js (libuv)kqueue():尝试在节点Js中写入文件时,系统中打开的文件过多,node.js,Node.js,当我使用fs.appendFile保存数据库中的大量记录时,我得到一个错误。代码如下: var query = 'SELECT * FROM Messages LIMIT 10000'; connection.query(query, function(err, rows, fields){ if(err) throw err; connection.end(); for (var i = 0; i < rows.length; i++)

当我使用fs.appendFile保存数据库中的大量记录时,我得到一个错误。代码如下:

var query = 'SELECT * FROM Messages LIMIT 10000';
    connection.query(query, function(err, rows, fields){
        if(err) throw err;
        connection.end();

    for (var i = 0; i < rows.length; i++) {

        fs.appendFile('somefile.json', '{"index" : {"_index" : "mymessages" , "_type" : "data" , "_id" : "'+rows[i].message_id+'"}} \n' + JSON.stringify(rows[i]) + '\n', function (err) {
        if (err) return console.log(err);
            //console.log(err);
        });


    }

    console.log('Added: ' + rows.length);


});
错误:

node es.js (libuv) kqueue(): Too many open files in system (libuv) kqueue(): Too many open files in system /private/var/www/mail-listener/node_modules/mysql/lib/protocol/Parser.js:77 throw err; // Rethrow non-MySQL errors ^ Error: write EBADF at exports._errnoException (util.js:746:11) at WriteStream.Socket._writeGeneric (net.js:681:26) at WriteStream.Socket._write (net.js:700:8) at doWrite (_stream_writable.js:301:12) at writeOrBuffer (_stream_writable.js:288:5) at WriteStream.Writable.write (_stream_writable.js:217:11) at WriteStream.Socket.write (net.js:625:40) at Console.log (console.js:55:16) at Query._callback (/private/var/www/mail-listener/es.js:114:11) at Query.Sequence.end (/private/var/www/mail-listener/node_modules/mysql/lib/protocol/sequences/Sequence.js:96:24) fs.appendFile每次都尝试重新打开该文件。由于您使用同步循环来执行文件中的异步命令,因此您需要排队等待大量打开该文件的尝试

在for循环之外打开一个文件,并重新使用该写流而不是appendFile

以下是一个例子:

var query = 'SELECT * FROM Messages LIMIT 10000';
connection.query(query, function(err, rows, fields) {
    if (err)
        throw err; // may want to look into making this async as well.

    connection.end();

    // may need to use a flag to open this as append, if that is your intent
    var stream = fs.createWriteStream('somefile.json');

    for (var i = 0; i < rows.length; i++) {
        var msg = '{"index" : {"_index" : "mymessages" , "_type" : "data" , "_id" : "'+rows[i].message_id+'"}} \n' + JSON.stringify(rows[i]) + '\n';
        stream.write(msg); // not back-pressure sensitive, see link below
    }

    // all writes have been buffered at this point
    // note: they have not necessarily been *written* yet.
    // if necessary, you may want to look into properly
    // handling backpressure as well, since this won't do it.
    // see: https://nodejs.org/api/stream.html#stream_class_stream_writable

    stream.end();

    stream.on('finish', function() {
        // writes are all *actually* finished at this point.
        console.log('added: ' + rows.length);
    });
});

如果你不介意的话,还有一个问题。这是转储n个记录的好方法吗?比如1000万?绝对是。我有一种预感,您将使用非常大的数据集,而streams将是最好的解决方案。不过,你绝对应该看看背压。并可能进入您正在使用的任何mysql库。它们可能已经支持流式传输,这比一次将10000条记录加载到内存要好得多。你可以把它们拿出来,转换它们,然后在它们出现时把它们写出来。