Python 烧瓶+;芹菜+;Redis:使用者:无法连接到amqp://guest:**@127.0.0.1:5672/:超时

Python 烧瓶+;芹菜+;Redis:使用者:无法连接到amqp://guest:**@127.0.0.1:5672/:超时,python,flask,celery,celerybeat,Python,Flask,Celery,Celerybeat,我有一个简单的芹菜任务设置。为了运行它,我首先启动了redis服务器,然后激活虚拟环境并输入“芹菜节拍”,打开一个新的终端窗口进入虚拟环境并输入“芹菜工人” 这是随后的错误消息: 消费者:无法连接到amqp://guest:**@127.0.0.1:5672/:定时 出去 这是执行“芹菜节拍”后显示的配置详细信息: 配置-> . 经纪人->amqp://guest:**@本地主机:5672// . 加载器->芹菜.loaders.default.loader . 调度器->芹菜.beat.Per

我有一个简单的芹菜任务设置。为了运行它,我首先启动了redis服务器,然后激活虚拟环境并输入“芹菜节拍”,打开一个新的终端窗口进入虚拟环境并输入“芹菜工人”

这是随后的错误消息:

消费者:无法连接到amqp://guest:**@127.0.0.1:5672/:定时 出去

这是执行“芹菜节拍”后显示的配置详细信息:

配置-> . 经纪人->amqp://guest:**@本地主机:5672// . 加载器->芹菜.loaders.default.loader . 调度器->芹菜.beat.PersistentScheduler . db->celerybeat时间表 . 日志文件->[stderr]@%警告 . 最大间隔->5.00分钟(300秒)

烧瓶项目/app/\uuuuuuuuuuuuuuuuuuuuuu.py

from flask import Flask, request, jsonify
from celery import Celery
import celeryconfig

app = Flask(__name__)
app.config.from_object('config')

def make_celery(app):
    # create context tasks in celery
    celery = Celery(
        app.import_name,
        broker=app.config['BROKER_URL']
    )
    celery.conf.update(app.config)
    celery.config_from_object(celeryconfig)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask

    return celery

celery = make_celery(app)

@app.route("/")
def hello():
    return "Hello World!"
import celery

@celery.task()
def print_hello():
    logger = print_hello.get_logger()
    logger.info("Hello")
import os

REDIS_HOST = "127.0.0.1" REDIS_PORT = 6379 BROKER_URL = environ.get('REDIS_URL', "redis://{host}:{port}/0".format(
    host=REDIS_HOST, port=str(REDIS_PORT))) CELERY_RESULT_BACKEND = BROKER_URL
from celery.schedules import crontab

CELERY_IMPORTS = ('app.tasks.test')
CELERY_TASK_RESULT_EXPIRES = 30
CELERY_TIMEZONE = 'UTC'

CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'

CELERYBEAT_SCHEDULE = {
    'test-celery': {
        'task': 'app.tasks.test.print_hello',
        # Every minute
        'schedule': crontab(minute="*"),
    }
}
烧瓶项目/任务/测试.py

from flask import Flask, request, jsonify
from celery import Celery
import celeryconfig

app = Flask(__name__)
app.config.from_object('config')

def make_celery(app):
    # create context tasks in celery
    celery = Celery(
        app.import_name,
        broker=app.config['BROKER_URL']
    )
    celery.conf.update(app.config)
    celery.config_from_object(celeryconfig)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask

    return celery

celery = make_celery(app)

@app.route("/")
def hello():
    return "Hello World!"
import celery

@celery.task()
def print_hello():
    logger = print_hello.get_logger()
    logger.info("Hello")
import os

REDIS_HOST = "127.0.0.1" REDIS_PORT = 6379 BROKER_URL = environ.get('REDIS_URL', "redis://{host}:{port}/0".format(
    host=REDIS_HOST, port=str(REDIS_PORT))) CELERY_RESULT_BACKEND = BROKER_URL
from celery.schedules import crontab

CELERY_IMPORTS = ('app.tasks.test')
CELERY_TASK_RESULT_EXPIRES = 30
CELERY_TIMEZONE = 'UTC'

CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'

CELERYBEAT_SCHEDULE = {
    'test-celery': {
        'task': 'app.tasks.test.print_hello',
        # Every minute
        'schedule': crontab(minute="*"),
    }
}
烧瓶项目/config.py

from flask import Flask, request, jsonify
from celery import Celery
import celeryconfig

app = Flask(__name__)
app.config.from_object('config')

def make_celery(app):
    # create context tasks in celery
    celery = Celery(
        app.import_name,
        broker=app.config['BROKER_URL']
    )
    celery.conf.update(app.config)
    celery.config_from_object(celeryconfig)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask

    return celery

celery = make_celery(app)

@app.route("/")
def hello():
    return "Hello World!"
import celery

@celery.task()
def print_hello():
    logger = print_hello.get_logger()
    logger.info("Hello")
import os

REDIS_HOST = "127.0.0.1" REDIS_PORT = 6379 BROKER_URL = environ.get('REDIS_URL', "redis://{host}:{port}/0".format(
    host=REDIS_HOST, port=str(REDIS_PORT))) CELERY_RESULT_BACKEND = BROKER_URL
from celery.schedules import crontab

CELERY_IMPORTS = ('app.tasks.test')
CELERY_TASK_RESULT_EXPIRES = 30
CELERY_TIMEZONE = 'UTC'

CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'

CELERYBEAT_SCHEDULE = {
    'test-celery': {
        'task': 'app.tasks.test.print_hello',
        # Every minute
        'schedule': crontab(minute="*"),
    }
}
烧瓶项目/celeryconfig.py

from flask import Flask, request, jsonify
from celery import Celery
import celeryconfig

app = Flask(__name__)
app.config.from_object('config')

def make_celery(app):
    # create context tasks in celery
    celery = Celery(
        app.import_name,
        broker=app.config['BROKER_URL']
    )
    celery.conf.update(app.config)
    celery.config_from_object(celeryconfig)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask

    return celery

celery = make_celery(app)

@app.route("/")
def hello():
    return "Hello World!"
import celery

@celery.task()
def print_hello():
    logger = print_hello.get_logger()
    logger.info("Hello")
import os

REDIS_HOST = "127.0.0.1" REDIS_PORT = 6379 BROKER_URL = environ.get('REDIS_URL', "redis://{host}:{port}/0".format(
    host=REDIS_HOST, port=str(REDIS_PORT))) CELERY_RESULT_BACKEND = BROKER_URL
from celery.schedules import crontab

CELERY_IMPORTS = ('app.tasks.test')
CELERY_TASK_RESULT_EXPIRES = 30
CELERY_TIMEZONE = 'UTC'

CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'

CELERYBEAT_SCHEDULE = {
    'test-celery': {
        'task': 'app.tasks.test.print_hello',
        # Every minute
        'schedule': crontab(minute="*"),
    }
}

如果我需要提供其他详细信息,请告诉我。

amqp是rabbitmq而不是redis

Redis通常是

redis://:password@hostname:端口/db\U编号

我会手动修改配置,看看它是否有效

flask_app.config.update(
    CELERY_BROKER_URL='redis://localhost:6379',
    CELERY_RESULT_BACKEND='redis://localhost:6379'
)

amqp是rabbitmq而不是redis

Redis通常是

redis://:password@hostname:端口/db\U编号

我会手动修改配置,看看它是否有效

flask_app.config.update(
    CELERY_BROKER_URL='redis://localhost:6379',
    CELERY_RESULT_BACKEND='redis://localhost:6379'
)

make_scelly()
函数中删除
cellery.conf.update(app.config)
行,因此它就像

def make_celery(app):
    # create context tasks in celery
    celery = Celery(
        app.import_name,
        broker=app.config['BROKER_URL']
    )
    celery.conf.update(app.config) # remove this line.
    celery.config_from_object(celeryconfig)
    TaskBase = celery.Task
from celery.schedules import crontab

import os

REDIS_HOST = "127.0.0.1"
REDIS_PORT = 6379
BROKER_URL = os.environ.get(
    'REDIS_URL', "redis://{host}:{port}/0".format(
        host=REDIS_HOST, port=str(REDIS_PORT)))
CELERY_RESULT_BACKEND = BROKER_URL

CELERY_IMPORTS = ('app.tasks.test')
CELERY_TASK_RESULT_EXPIRES = 30
CELERY_TIMEZONE = 'UTC'

CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'

CELERYBEAT_SCHEDULE = {
    'test-celery': {
        'task': 'app.tasks.test.print_hello',
        # Every minute
        'schedule': crontab(minute="*"),
    }
}

make_scelly()
函数中删除
cellery.conf.update(app.config)
行,因此它就像

def make_celery(app):
    # create context tasks in celery
    celery = Celery(
        app.import_name,
        broker=app.config['BROKER_URL']
    )
    celery.conf.update(app.config) # remove this line.
    celery.config_from_object(celeryconfig)
    TaskBase = celery.Task
from celery.schedules import crontab

import os

REDIS_HOST = "127.0.0.1"
REDIS_PORT = 6379
BROKER_URL = os.environ.get(
    'REDIS_URL', "redis://{host}:{port}/0".format(
        host=REDIS_HOST, port=str(REDIS_PORT)))
CELERY_RESULT_BACKEND = BROKER_URL

CELERY_IMPORTS = ('app.tasks.test')
CELERY_TASK_RESULT_EXPIRES = 30
CELERY_TIMEZONE = 'UTC'

CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'

CELERYBEAT_SCHEDULE = {
    'test-celery': {
        'task': 'app.tasks.test.print_hello',
        # Every minute
        'schedule': crontab(minute="*"),
    }
}

Django也有同样的问题,但我的问题是在settings.py中使用了“BROKER\u URL”而不是“芹菜\u BROKER\u URL”。Cellery找不到URL,默认为rabbitmq端口而不是redis端口。

在Django中也有同样的问题,但我的问题是在settings.py中使用了“BROKER\u URL”而不是“Cellery\u BROKER\u URL”。芹菜找不到URL,默认为rabbitmq端口,而不是redis端口。

您可以尝试将
Celery.conf.update(app.config)Celery.config\u行的顺序从\u对象(celeryconfig)
更改为
Celery.config\u从\u对象(celeryconfig)芹菜.conf.update(app.config)更改为
Celery.config\u行吗
在_init_u2; py中,您可以尝试将_init_2;object(celeryconfig)
中的
芹菜.conf.update(app.config)
行的顺序从_init_2;对象(celeryconfig)芹菜.conf.update(app.config)更改为芹菜.config\u,从_init_2;对象(celeryceryceryceryceryconfig)芹菜.conf!我认为,因为我没有指定应用程序名称,它以某种方式默认为rabbitmq。当我运行这个:“芹菜节拍-一个应用程序。芹菜”,它的工作。谢谢你的提示!我认为,因为我没有指定应用程序名称,它以某种方式默认为rabbitmq。当我运行这个:“芹菜节拍-一个应用程序。芹菜”,它的工作。谢谢是的,我不得不删除这一行。不仅如此,我没有指定应用程序名,它以某种方式默认为rabbitmq。当我运行这个:“芹菜节拍-一个应用程序。芹菜”,它的工作。谢谢是的,我不得不删除这一行。不仅如此,我没有指定应用程序名,它以某种方式默认为rabbitmq。当我运行这个:“芹菜节拍-一个应用程序。芹菜”,它的工作。