elasticsearch,Node.js,Express,elasticsearch" /> elasticsearch,Node.js,Express,elasticsearch" />

Econn重置并在与elasticsearch连接的node.js客户端中将内存堆出空间

Econn重置并在与elasticsearch连接的node.js客户端中将内存堆出空间,node.js,express,elasticsearch,Node.js,Express,elasticsearch,我有一个代码可以解析70个日志文件(每个文件大约15MB)。我从日志中提取特定字段,并向elasticsearch发送批量请求。下面是代码 //looks for all the files with .log extension glob(__dirname + "/../logs/*.log", function (er, files) { for (var i = 0; i < files.length; i++) { lr = new LineByLineReader(

我有一个代码可以解析70个日志文件(每个文件大约15MB)。我从日志中提取特定字段,并向elasticsearch发送批量请求。下面是代码

//looks for all the files with .log extension

glob(__dirname + "/../logs/*.log", function (er, files) {
for (var i = 0; i < files.length; i++) {

    lr = new LineByLineReader(files[i]);
    lr.on('error', function (err) {
        console.error(err.stack);
    });

//each line of the file is taken and certain fields are taken from it and pushed into an array
    lr.on('line', function (line) {
        arr = line.replace(/['"]+/g, '').split(" ");
        var jsonArg1 = new Object();
        var temparr = arr[13].split("?");
        jsonArg1.url = temparr[0];
        jsonArg1.method = arr[12];
        pluginArrayArg.push(jsonArg1);
    });
//once the entire file is parsed, the array is pushed into elasticsearch bulk api

    lr.on('end', function () {
        // All lines are read, file is closed now.
        data = (function () {
            var x = '';
            for (var i = 0; i < pluginArrayArg.length; i++) {
                x = x + '{ "index" :  { "_index" : "nodeclient" , "_type": "logs"} }\n' + '{"method" : "' + pluginArrayArg[i].method + '", "url" : "' + pluginArrayArg[i].url + '"}\n';
            }
            return x;
        })();

        client.bulk({
            body: [
                    data
                    ]
        }, function (err, resp) {
            if (err) {
                console.log(err);
            }
            console.log(resp);
            //console.log(JSON.stringify(resp));
        });
    });
    }
});
我尝试过在命令行中增加堆空间,如下所示,但仍然不起作用

节点--max_old_space_size=4096 server.js

分别搜索这些错误,人们谈论增加maxSockets和重用池连接。我不知道如何对我的代码进行这些更改


我应该如何着手解决这些错误?这两个错误是否相关?

我刚刚使用自定义批量插入脚本批处理了许多25-40mb的文件,收到了EconReset错误

我将虚拟盒安装上的内存从4mb增加到8mb左右,我不再有这个问题

Elasticsearch ERROR: 2016-10-28T21:09:21Z
  Error: Request error, retrying
  POST http:/localhost:9200/_bulk => read ECONNRESET
      at Log.error (/home/apitestcoverage/apicoverage_node/node_modules/elasticsearch/src/lib/log.js:225:56)
      at checkRespForFailure (/home/apitestcoverage/apicoverage_node/node_modules/elasticsearch/src/lib/transport.js:240:18)
      at HttpConnector.<anonymous> (/home/apitestcoverage/apicoverage_node/node_modules/elasticsearch/src/lib/connectors/http.js:162:7)
      at ClientRequest.wrapper (/home/apitestcoverage/apicoverage_node/node_modules/lodash/index.js:3095:19)
      at emitOne (events.js:96:13)
      at ClientRequest.emit (events.js:188:7)
      at Socket.socketErrorListener (_http_client.js:308:9)
      at emitOne (events.js:96:13)
      at Socket.emit (events.js:188:7)
      at emitErrorNT (net.js:1271:8)
FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed - JavaScript heap out of memory