Warning: file_get_contents(/data/phpspider/zhask/data//catemap/8/mysql/67.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Javascript Nodejs冻结了MySQL,将大量结果馈送到REDIS-HMSET_Javascript_Mysql_Node.js_Redis - Fatal编程技术网

Javascript Nodejs冻结了MySQL,将大量结果馈送到REDIS-HMSET

Javascript Nodejs冻结了MySQL,将大量结果馈送到REDIS-HMSET,javascript,mysql,node.js,redis,Javascript,Mysql,Node.js,Redis,我有一个脚本,在MySQL中使用SELECT执行两个查询,每个查询产生一个226393行的结果集,另一个是529976行的结果集。当馈入REDIS时,每行添加4次,每个要存储的唯一数据一次 第一个查询似乎已经完成,没有问题,但是第二个查询似乎冻结在中间,停留在那里。我对NODEjs和REDIS都是新手,来自传统的MySQL背景 代码如下: // Start the server http.createServer(function(req, res){ console.log('Requ

我有一个脚本,在MySQL中使用SELECT执行两个查询,每个查询产生一个226393行的结果集,另一个是529976行的结果集。当馈入REDIS时,每行添加4次,每个要存储的唯一数据一次

第一个查询似乎已经完成,没有问题,但是第二个查询似乎冻结在中间,停留在那里。我对NODEjs和REDIS都是新手,来自传统的MySQL背景

代码如下:

// Start the server
http.createServer(function(req, res){
    console.log('Request received');

    for(var n = 0; n < qryArray.length; n++) {
        var qry = qryArray[n];

        // Send the query
        //console.log( n + ' :: ' + qry);
        connection.query(qry, function(err, rows, fields){
            if(err){
                console.log(err);
            }else{
                console.log('Query response ' + rows.length + ' rows');
                //console.log(util.inspect(process.memoryUsage()));

                errorCount = 0;
                goodCount  = 0;
                for(var i = 0; i < rows.length; i++){
                    var row = rows[i];

                    j = 0;
                    //var fields = result.fields.map(function(f) { return f.name; })
                    fields.forEach(function(f){
                        if(f.name != 'latlng' && f.name != 'market' && f.name != 'technology'){
                            j++;

                            //console.log(('' + row[f.name]).toUpperCase() + '~' + i + '-' + j + '~' + row.latlng);
                            redisClient.hmset(('' + row[f.name]).toUpperCase() + '~' + i + '-' + j + '~' + row.latlng, row, function(error, result){
                                if(error){
                                    errorCount++;
                                    console.log(errorCount + ' ' + error + ' ' + f.name.toUpperCase() + ' : ' + ('' + row[f.name]).toUpperCase() + '~' + i + '-' + j + '~' + row.latlng + ' :: ' + JSON.stringify(row));
                                }else{
                                    goodCount++;
                                    console.log(goodCount);
                                    //console.log(goodCount + ' Redis stored: ' + f.name.toUpperCase() + ' : ' + ('' + row[f.name]).toUpperCase() + ' ' + result);
                                }
                            });
                        }

                    });
                    //goodCount++;
                    //console.log(goodCount);
                }
                console.log('Finished indexing ' + rows.length + ' rows ' + goodCount);
            }
        });
    }
    console.log('Queries processed');

}).listen(port);
console.log('Server running on port ' + port);
//启动服务器
http.createServer(函数(req,res){
log(“收到请求”);
对于(var n=0;n
有人能告诉我什么是错误的,或者如何对大型数据集进行改进吗?谢谢

在使用指针添加异步库以使用队列后,我修改了代码,但返回了一个严重错误:

{[错误:连接丢失:服务器关闭了连接。]致命:true,代码:'PROTOCOL_Connection_lost'}

以下是添加了功能的新代码,请帮助:

// Initialize the queue
var q = async.queue(function(task){

    //console.log(('' + row[f.name]).toUpperCase() + '~' + i + '-' + j + '~' + row.latlng);
    redisClient.hmset(task.hk, task.r, function(error, result){
        if(error){
            errorCount++;
            console.log('Error: ' + errorCount);
            //console.log(errorCount + ' ' + error + ' ' + f.name.toUpperCase() + ' : ' + ('' + row[f.name]).toUpperCase() + '~' + i + '-' + j + '~' + row.latlng + ' :: ' + JSON.stringify(row));
        }else{
            goodCount++;
            console.log('Good: ' + goodCount);
            //console.log(goodCount + ' Redis stored: ' + f.name.toUpperCase() + ' : ' + ('' + row[f.name]).toUpperCase() + ' ' + result);
        }
    });

}, 50000);

// Assign callback for when all items in queue have been processed
q.drain = function(){
    console.log('All queue items have been processed ' + goodCount);
}

// Start the server
http.createServer( function( req, res ){
    console.log('Request received');

    for(var n = 0; n < qryArray.length; n++) {
        var qry = qryArray[n];

        // Send the query
        //console.log( n + ' :: ' + qry);
        connection.query(qry, function(err, rows, fields){
            if(err){
                console.log(err);
            }else{
                console.log('Query response ' + rows.length + ' rows');
                //console.log(util.inspect(process.memoryUsage()));

                errorCount = 0;
                goodCount  = 0;
                for(var i = 0; i < rows.length; i++){
                    var row = rows[i];

                    var j = 0;
                    //var fields = result.fields.map(function(f) { return f.name; })
                    fields.forEach(function(f){
                        if(f.name != 'latlng' && f.name != 'market' && f.name != 'technology'){
                            j++;

                            var hkey = ('' + row[f.name]).toUpperCase() + '~' + i + '-' + j + '~' + row.latlng;
                            var task = {r: row, hk: hkey};
                            q.push(task, function(err){
                                if(err) console.log(err);
                            });
                        }
                    });

                }
                console.log('Finished indexing ' + rows.length + ' rows');
            }
        });
    }
    console.log('Queries processed');

}).listen(port);
console.log('Server running on port ' + port);
//初始化队列
var q=async.queue(函数(任务){
//console.log((''+行[f.name]).toUpperCase()+'~'+i+'-'+j+'~'+row.latlng);
redisClient.hmset(task.hk、task.r、函数(错误、结果){
如果(错误){
errorCount++;
log('Error:'+errorCount);
//console.log(errorCount++'+error++'+f.name.toUpperCase()+:'+('+row[f.name]).toUpperCase()+'~'+i+'-'+j+'~'+row.latlng+':'+JSON.stringify(row));
}否则{
goodCount++;
console.log('Good:'+goodCount);
//log(goodCount+'Redis存储:'+f.name.toUpperCase()+':'+(''+行[f.name]).toUpperCase()+''+结果);
}
});
}, 50000);
//处理队列中的所有项目时为分配回调
q、 drain=函数(){
console.log('所有队列项目都已处理'+goodCount);
}
//启动服务器
http.createServer(函数(req,res){
log(“收到请求”);
对于(var n=0;n
该错误发生在50K队列项目成功完成之后(队列已初始化为50K并发项目)

为了解决断开连接的问题,我改为使用MySQL池机制,但我仍然有discone
// Start the server
http.createServer(function(req, res){
    res.writeHead(200);
    res.end();

    console.log('Request received');
    pool.getConnection(function(err, connection){
        if(err){
            connection.release();
            return console.log('Database connection error ' + err);
        }
        console.log('Database is connected ' + connection.threadId + ' ...');

        for(var n = 0; n < qryArray.length; n++){
            var qry = qryArray[n];

            // Send the query
            connection.query(qry, function(err, rows, fields){
                connection.release();
                if(err){
                    return console.log('Query error: ' + err);
                }
                console.log('Query response ' + rows.length + ' rows');

                errorCount = 0;
                goodCount  = 0;
                for(var i = 0; i < rows.length; i++){
                    var row = rows[i];

                    var j = 0;
                    fields.forEach(function(f){
                        if(f.name != 'latlng' && f.name != 'market' && f.name != 'technology'){
                            j++;

                            var hkey = ('' + row[f.name]).toUpperCase() + '~' + i + '-' + j + '~' + row.latlng;
                            var task = {r: row, hk: hkey};
                            q.push(task, function(err){
                                if(err) console.log(err);
                            });
                        }
                    });
                }
                console.log('Finished indexing ' + rows.length + ' rows');
            });

            connection.on('error', function(err){
                return console.log('Database connection error ' + err);
            });
        }
        console.log('Queries processed');
    });

}).listen(port);
console.log('Server running on port ' + port);
// MySQL initialization
var pool = mysql.createPool({
    connectionLimit : 10,
    host     : 'localhost',
    user     : 'gta_ro',
    password : 'glacier',
    database : 'tower'
});

// Async Queue initialization
var insertCount = 0;
var q = async.queue(function(task, callback){

    redisClient.hmset(task.hk, task.r, function(error, result){
        if(error){
            errorCount++;
            console.log('Error: ' + errorCount);
            //console.log(errorCount + ' ' + error + ' ' + f.name.toUpperCase() + ' : ' + ('' + row[f.name]).toUpperCase() + '~' + i + '-' + j + '~' +     row.latlng + ' :: ' + JSON.stringify(row));
        }else{
            goodCount++;
            console.log('Good: ' + goodCount);
            //console.log(goodCount + ' Redis stored: ' + f.name.toUpperCase() + ' : ' + ('' + row[f.name]).toUpperCase() + ' ' + result);
        }
    });
    return setImmediate(function() { callback() });
}, 1000);

// Async Queue Empty callback (triggered after last item has been consumed)
q.drain = function(){
    console.log('All queue items have been processed ' + goodCount);
}

// Start the server
httpStarted = false;
http.createServer(function(req, res){
    dispatcher.dispatch(req, res);

    if(!httpStarted){
        httpStarted = true;

        console.log('Server running on port ' + port);
        pool.getConnection(function(err, connection){
            if(err){
                connection.release();
                return console.log('Database connection error ' + err);
            }
            console.log('Database is connected ' + connection.threadId + ' ...');

            for(var n = 0; n < qryArray.length; n++){
                var qry = qryArray[n];

                // Send the query
                connection.query(qry, function(err, rows, fields){
                    if(err){
                        return console.log('Query error: ' + err);
                    }

                    //connection.release();
                    console.log('Query response ' + rows.length + ' rows');

                    errorCount = 0;
                    goodCount  = 0;
                    for(var i = 0; i < rows.length; i++){
                        var row = rows[i];

                        var j = 0;
                        fields.forEach(function(f){
                            if(f.name != 'latlng' && f.name != 'market' && f.name != 'technology'){
                                j++;

                                q.push({r: row, hk: ('' + row[f.name]).toUpperCase() + '~' + i + '-' + j + '~' + row.latlng}, function(err){
                                    if(err) console.log(err);
                                });
                            }
                        });
                    }
                    console.log('Finished indexing');
                });

                connection.on('error', function(err){
                    return console.log('Database connection error ' + err);
                });
            }
            console.log('Queries processed');
        });
    }
}).listen(port);

dispatcher.onGet('/', function(req, res){
    if(httpStarted){
        res.writeHead(200, {'Content-Type': 'text/html'});
        res.end('MySQL Indexer running' + '\n');

        console.log('MySQL Indexer running');
    }
});

dispatcher.onGet('/status', function(req, res){
    res.writeHead(200, {'Content-Type': 'text/html'});
    res.end('Status ' + q.length() + '\n');

    console.log('Status ' + q.length());
});