Asynchronous MySQL和Redis的Tornado性能问题

Asynchronous MySQL和Redis的Tornado性能问题,asynchronous,websocket,tornado,python-asyncio,tornado-redis,Asynchronous,Websocket,Tornado,Python Asyncio,Tornado Redis,我有一个tornado服务器,运行MySQL for DB和Redis for cache。我正在使用web套接字发送/接收数据。我的代码如下: 服务器 import logging import os.path import uuid import sys import json import tornadis import tormysql import tornado.escape import tornado.ioloop import tornado.options import t

我有一个tornado服务器,运行MySQL for DB和Redis for cache。我正在使用web套接字发送/接收数据。我的代码如下:

服务器

import logging
import os.path
import uuid
import sys
import json

import tornadis
import tormysql
import tornado.escape
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.websocket
from tornado import gen
from tornado.concurrent import Future
from tornado.options import define, options

@gen.coroutine
def getFromDB(query):
    with (yield dbPool.Connection()) as conn:
        with conn.cursor() as cursor:
            yield cursor.execute(query)
            datas = cursor.fetchall()
            return datas
    return None

@gen.coroutine
def getFromCache(cmd):
    pipeline = tornadis.Pipeline()
    pipeline.stack_call(cmd)
    with (yield cachePool.connected_client()) as singleClient:
        redisResult = yield singleClient.call(pipeline)
        if isinstance(redisResult, tornadis.TornadisException):
            print("Redis exception: %s"%(redisResult))
        else:
            return redisResult

async def getData(dbQuery, cacheQuery):
    waitDict = {}
    if dbQuery:
        waitDict['db'] = getFromDB(dbQuery)
    if cacheQuery:
        waitDict['cache'] = getFromCache(cacheQuery)
    resultList = []
    if len(waitDict) > 0:
        await gen.multi(waitDict)
        if 'db' in waitDict:
            dbRes = waitDict['db'].result()
            if dbRes:
                for eachResult in dbRes:
                    changeRes = someFunct(eachResult) 
                    resultList.append(changeRes)
        if 'cache' in waitDict:
            cacheRes = waitDict['cache'].result()
            if cacheRes:
                for eachResult in cacheRes:
                    changeRes = someFunct(eachResult) 
                    resultList.append(changeRes)
    return resultList

class SocketHandler(tornado.websocket.WebSocketHandler):
    SUPPORTED_METHODS = ("GET")

    def open(self):
        print("Socket open:%s"%(self))

    def on_close(self):
        print("Socket closed:%s"%(self))

    async def on_message(self, inp):
        if requestForData:
            ret = await getData(dbQuery, cacheQuery)
            self.write_message(ret)

class Application(tornado.web.Application):
    def __init__(self):
        handlers = [
            (r"/sock", SocketHandler),
        ]

define("port", default=8000, help="run on the given port", type=int)
tornado.options.parse_command_line()
app = Application()
app.listen(options.port)
print("PORT:%s"%(options.port))
tornado.ioloop.IOLoop.current().start()
我正在为Redis使用
tornadis
,为MySQL使用
tormysql

我正在amazon linux实例m5.large上运行此安装程序,带有2vCPUs memeory:8Gib。

客户端

import logging
import os.path
import uuid
import sys
import json

import tornadis
import tormysql
import tornado.escape
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.websocket
from tornado import gen
from tornado.concurrent import Future
from tornado.options import define, options

@gen.coroutine
def getFromDB(query):
    with (yield dbPool.Connection()) as conn:
        with conn.cursor() as cursor:
            yield cursor.execute(query)
            datas = cursor.fetchall()
            return datas
    return None

@gen.coroutine
def getFromCache(cmd):
    pipeline = tornadis.Pipeline()
    pipeline.stack_call(cmd)
    with (yield cachePool.connected_client()) as singleClient:
        redisResult = yield singleClient.call(pipeline)
        if isinstance(redisResult, tornadis.TornadisException):
            print("Redis exception: %s"%(redisResult))
        else:
            return redisResult

async def getData(dbQuery, cacheQuery):
    waitDict = {}
    if dbQuery:
        waitDict['db'] = getFromDB(dbQuery)
    if cacheQuery:
        waitDict['cache'] = getFromCache(cacheQuery)
    resultList = []
    if len(waitDict) > 0:
        await gen.multi(waitDict)
        if 'db' in waitDict:
            dbRes = waitDict['db'].result()
            if dbRes:
                for eachResult in dbRes:
                    changeRes = someFunct(eachResult) 
                    resultList.append(changeRes)
        if 'cache' in waitDict:
            cacheRes = waitDict['cache'].result()
            if cacheRes:
                for eachResult in cacheRes:
                    changeRes = someFunct(eachResult) 
                    resultList.append(changeRes)
    return resultList

class SocketHandler(tornado.websocket.WebSocketHandler):
    SUPPORTED_METHODS = ("GET")

    def open(self):
        print("Socket open:%s"%(self))

    def on_close(self):
        print("Socket closed:%s"%(self))

    async def on_message(self, inp):
        if requestForData:
            ret = await getData(dbQuery, cacheQuery)
            self.write_message(ret)

class Application(tornado.web.Application):
    def __init__(self):
        handlers = [
            (r"/sock", SocketHandler),
        ]

define("port", default=8000, help="run on the given port", type=int)
tornado.options.parse_command_line()
app = Application()
app.listen(options.port)
print("PORT:%s"%(options.port))
tornado.ioloop.IOLoop.current().start()
我正在尝试使用web套接字模拟流量。代码如下:

import sys
import json
import asyncio
import websockets

def getData():
    for i in range(100):
        async with websockets.connect(SOCKET_URL, extra_headers=extraHeaders) as websocket:
            for i an range(100):
                await websocket.send("get data")
                reply = await websocket.recv()
                print(reply)

asyncio.get_event_loop().run_until_complete(getData())
我正在运行客户端的多个实例。

服务器运行良好,但只能处理25个连接。连接25次后,服务器回复的延迟会增加。我想服务器的答复是非常快的。如何减少响应延迟?那么代码中有什么问题吗?

首先,您必须找出瓶颈所在。最简单的方法是分别测试函数,也就是说,编写单元测试。一个较长的方法是在每次收益后放置
打印
语句,并
等待
。这还会告诉您哪个
yield/await
花费的时间太长。@xyers我有函数返回的future,我正在等待它们。我还需要测试延迟的单个功能吗?异步函数阻塞了吗?如果您正在等待未来,那么响应的延迟是有意义的。现在你似乎在反驳你的问题。@xyers说到点子上了