Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/apache-kafka/3.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Go Sarama Kafka consumergroup函数返回_Go_Apache Kafka_Kafka Consumer Api_Sarama - Fatal编程技术网

Go Sarama Kafka consumergroup函数返回

Go Sarama Kafka consumergroup函数返回,go,apache-kafka,kafka-consumer-api,sarama,Go,Apache Kafka,Kafka Consumer Api,Sarama,我对Go Lang非常陌生,并试图对使用Sarama库使用Kafka消息的开源库进行一些调整。可以找到原始代码 最初的包实现了一个PartitionConsumer,如果一个用户不需要在多个使用同一主题的用户之间保持读取一致性,那么它就可以正常工作,但是,这对我来说并不适用 我在同一个应用程序中做了一些工作,使用我在网上找到的一些示例实现了sarama NewConsumerGroup包 以下是我目前正在运行的代码: package main import ( "context"

我对Go Lang非常陌生,并试图对使用Sarama库使用Kafka消息的开源库进行一些调整。可以找到原始代码

最初的包实现了一个PartitionConsumer,如果一个用户不需要在多个使用同一主题的用户之间保持读取一致性,那么它就可以正常工作,但是,这对我来说并不适用

我在同一个应用程序中做了一些工作,使用我在网上找到的一些示例实现了sarama NewConsumerGroup包

以下是我目前正在运行的代码:

package main

import (
    "context"
    // "flag"
    "os"
    "os/signal"
    "sync"
    "syscall"

    "encoding/json"
    "log"
    "strings"

    "github.com/Shopify/sarama"
    // "github.com/Shopify/sarama/mocks"
)

// KafkaInput is used for recieving Kafka messages and
// transforming them into HTTP payloads.
type KafkaInput struct {
    config    *KafkaConfig
    // consumers []sarama.PartitionConsumer
    messages  chan *sarama.ConsumerMessage
}

var (
    brokers  = ""
    version  = ""
    group    = ""
    topics   = ""
    assignor = ""
    oldest   = true
    verbose  = false
)

// Consumer represents a Sarama consumer group consumer
type Consumer struct {
    ready chan bool
}

// NewKafkaInput creates instance of kafka consumer client.
func NewKafkaInput(address string, config *KafkaConfig) *KafkaInput {
    /**
     * Construct a new Sarama configuration.
     * The Kafka cluster version has to be defined before the consumer/producer is initialized.
     */
    c := sarama.NewConfig()
    // Configuration options go here

    log.Println("Starting a new Sarama consumer")

    if verbose {
        sarama.Logger = log.New(os.Stdout, "[sarama] ", log.LstdFlags)
    }

    version, err := sarama.ParseKafkaVersion("2.1.1")
    if err != nil {
        log.Panicf("Error parsing Kafka version: %v", err)
    }

    c.Version = version

    if oldest {
        c.Consumer.Offsets.Initial = sarama.OffsetOldest
    }

    /**
     * Setup a new Sarama consumer group
     */
    consumer := Consumer{ready: make(chan bool)}

    ctx, cancel := context.WithCancel(context.Background())
    client, err := sarama.NewConsumerGroup(strings.Split(config.host, ","), config.group, c)

    if err != nil {
        log.Panicf("Error creating consumer group client: %v", err)
    }

    wg := &sync.WaitGroup{}
    wg.Add(1)
    go func() {
        defer wg.Done()
        for {
            if err := client.Consume(ctx, []string{config.topic}, &consumer); err != nil {
                log.Panicf("Error from consumer: %v", err)
            }

            // check if context was cancelled, signaling that the consumer should stop
            if ctx.Err() != nil {
                return
            }

            consumer.ready = make(chan bool)
        }

    }()

    <-consumer.ready // Await till the consumer has been set up
    log.Println("Sarama consumer up and running!...")

    sigterm := make(chan os.Signal, 1)
    signal.Notify(sigterm, syscall.SIGINT, syscall.SIGTERM)
    select {
    case <-ctx.Done():
        log.Println("terminating: context cancelled")
    case <-sigterm:
        log.Println("terminating: via signal")
    }
    cancel()
    wg.Wait()
    if err = client.Close(); err != nil {
        log.Panicf("Error closing client: %v", err)
    }

    i := &KafkaInput{
        config: config,
        // consumers: make([]sarama.PartitionConsumer, len(partitions)),
        // messages:  make(chan *sarama.ConsumerMessage, 256),
        messages: make(chan *sarama.ConsumerMessage, 256),
    }


    return i
}

// ConsumeClaim must start a consumer loop of ConsumerGroupClaim's Messages().
func (consumer *Consumer) ConsumeClaim(session sarama.ConsumerGroupSession, claim sarama.ConsumerGroupClaim) error {
    // NOTE:
    // Do not move the code below to a goroutine.
    // The `ConsumeClaim` itself is called within a goroutine, see:
    // https://github.com/Shopify/sarama/blob/master/consumer_group.go#L27-L29
    for message := range claim.Messages() {

        log.Printf("Message claimed: value = %s, timestamp = %v, topic = %s", string(message.Value), message.Timestamp, message.Topic)
        session.MarkMessage(message, "")
    }

    return nil
}

// ErrorHandler should receive errors
func (i *KafkaInput) ErrorHandler(consumer sarama.PartitionConsumer) {
    for err := range consumer.Errors() {
        log.Println("Failed to read access log entry:", err)
    }
}

// Read Comment

func (i *KafkaInput) Read(data []byte) (int, error) {
    message := <-i.messages

    if !i.config.useJSON {
        copy(data, message.Value)
        return len(message.Value), nil
    }

    var kafkaMessage KafkaMessage
    json.Unmarshal(message.Value, &kafkaMessage)

    buf, err := kafkaMessage.Dump()
    if err != nil {
        log.Println("Failed to decode access log entry:", err)
        return 0, err
    }

    copy(data, buf)

    return len(buf), nil

}

func (i *KafkaInput) String() string {
    return "Kafka Input: " + i.config.host + "/" + i.config.topic
}

// Setup is run at the beginning of a new session, before ConsumeClaim
func (consumer *Consumer) Setup(sarama.ConsumerGroupSession) error {
    // Mark the consumer as ready
    close(consumer.ready)
    return nil
}

// Cleanup is run at the end of a session, once all ConsumeClaim goroutines have exited
func (consumer *Consumer) Cleanup(sarama.ConsumerGroupSession) error {
    return nil
}
但是,我认为我需要的是
NewKafkaInput
函数返回
*KafkaInput
,并将声明中的消息添加到结构中(如果我在这里使用了错误的术语,请原谅,这是我第一次参加竞技表演)

在此处完成的原始示例中:

func NewKafkaInput(address string, config *KafkaConfig) *KafkaInput {

    ...

    go func(consumer sarama.PartitionConsumer) {
                defer consumer.Close()

                for message := range consumer.Messages() {
                    i.messages <- message
                }
            }(consumer)

    ...

}
这完全有可能是我把这件事搞得一团糟,但任何帮助和投入都是非常感谢的


谢谢

这是解决我问题的办法。我让goroutines阻塞了主功能,需要将它们中断。如果下面的代码没有任何意义,这里有一个指向我正在修改的程序的链接:。如果我能从所有者那里得到响应,我计划清理代码并提交一个pull请求,或者可能发布一个fork

package main

import (
    "context"
    "encoding/json"
    "strings"

    "os"

    "log"

    "github.com/Shopify/sarama"
)

// KafkaInput is used for recieving Kafka messages and
// transforming them into HTTP payloads.
type KafkaInput struct {
    sarama.ConsumerGroup
    config   *KafkaConfig
    consumer Consumer
    messages chan *sarama.ConsumerMessage
}

// Consumer represents a Sarama consumer group consumer
type Consumer struct {
    ready    chan bool
    messages chan *sarama.ConsumerMessage
}

var (
    brokers  = ""
    version  = ""
    group    = ""
    topics   = ""
    assignor = ""
    oldest   = true
    verbose  = false
)

// NewKafkaInput creates instance of kafka consumer client.
func NewKafkaInput(address string, config *KafkaConfig) *KafkaInput {
    /**
     * Construct a new Sarama configuration.
     * The Kafka cluster version has to be defined before the consumer/producer is initialized.
     */
    c := sarama.NewConfig()
    // Configuration options go here

    log.Printf("KafkaConfig: %s", config.host)
    log.Printf("KafkaConfig: %s", config.group)
    log.Printf("KafkaConfig: %s", config.topic)

    log.Println("Starting a new Sarama consumer")

    if verbose {
        sarama.Logger = log.New(os.Stdout, "[sarama] ", log.LstdFlags)
    }

    version, err := sarama.ParseKafkaVersion("2.1.1")
    if err != nil {
        log.Panicf("Error parsing Kafka version: %v", err)
    }

    c.Version = version

    if oldest {
        c.Consumer.Offsets.Initial = sarama.OffsetOldest
    }

    group, err := sarama.NewConsumerGroup(strings.Split(config.host, ","), config.group, c)

    /**
     * Setup a new Sarama consumer group
     */
    consumer := Consumer{
        ready:    make(chan bool),
        messages: make(chan *sarama.ConsumerMessage, 256),
    }

    i := &KafkaInput{
        ConsumerGroup: group,
        config:        config,
        messages:      make(chan *sarama.ConsumerMessage, 256),
        consumer:      consumer,
    }

    go i.loop([]string{config.topic})
    i.messages = consumer.messages
    return i
}

//ConsumeClaim and stuff
func (i *KafkaInput) ConsumeClaim(s sarama.ConsumerGroupSession, c sarama.ConsumerGroupClaim) error {
    for msg := range c.Messages() {
        s.MarkMessage(msg, "")
        i.Push(msg)
    }
    return nil
}

func (i *KafkaInput) loop(topic []string) {
    ctx := context.Background()
    for {
        if err := i.Consume(ctx, []string{i.config.topic}, i); err != nil {
            return
        }
    }
}

// Push Messages
func (i *KafkaInput) Push(m *sarama.ConsumerMessage) {
    if i.consumer.messages != nil {
        log.Printf("MSGPUSH: %s", m)
        i.consumer.messages <- m
    }
}

func (i *KafkaInput) Read(data []byte) (int, error) {

    message := <-i.messages
    log.Printf("Msg: %s", string(message.Value))
    if !i.config.useJSON {
        copy(data, message.Value)
        return len(message.Value), nil
    }

    var kafkaMessage KafkaMessage
    json.Unmarshal(message.Value, &kafkaMessage)

    buf, err := kafkaMessage.Dump()
    if err != nil {
        log.Println("Failed to decode access log entry:", err)
        return 0, err
    }

    copy(data, buf)

    return len(buf), nil

}

func (i *KafkaInput) String() string {
    return "Kafka Input: " + i.config.host + "/" + i.config.topic
}

// Setup is run at the beginning of a new session, before ConsumeClaim
func (i *KafkaInput) Setup(s sarama.ConsumerGroupSession) error {
    return nil
}

// Cleanup is run at the end of a session, once all ConsumeClaim goroutines have exited
func (i *KafkaInput) Cleanup(s sarama.ConsumerGroupSession) error {
    return nil
}
主程序包
进口(
“上下文”
“编码/json”
“字符串”
“操作系统”
“日志”
“github.com/Shopify/sarama”
)
//卡夫卡输入用于接收卡夫卡消息和
//将它们转换为HTTP有效负载。
类型KafkaInput结构{
sarama.ConsumerGroup
配置*Kafkanconfig
消费者
信息chan*sarama.ConsumerMessage
}
//消费者代表Sarama消费者组消费者
类型使用者结构{
陈波准备好了吗
信息chan*sarama.ConsumerMessage
}
变量(
经纪人=“”
version=“”
group=“”
topics=“”
转让人=“”
最老的=真的
冗长=错误
)
//NewKafkaInput创建kafka消费客户端的实例。
func NewKafkaInput(地址字符串,配置*kafkanconfig)*KafkaInput{
/**
*构建一个新的Sarama配置。
*在初始化使用者/生产者之前,必须定义Kafka群集版本。
*/
c:=sarama.NewConfig()
//配置选项在这里
log.Printf(“kafkanconfig:%s”,config.host)
log.Printf(“kafkanconfig:%s”,config.group)
log.Printf(“kafkanconfig:%s”,config.topic)
Println(“启动新的Sarama消费者”)
如果冗长{
sarama.Logger=log.New(os.Stdout,“[sarama]”,log.LstdFlags)
}
version,err:=sarama.ParseKafkaVersion(“2.1.1”)
如果错误!=零{
log.Panicf(“分析卡夫卡版本时出错:%v”,错误)
}
c、 版本=版本
如果最老{
c、 Consumer.Offsets.Initial=sarama.OffsetOldest
}
group,err:=sarama.NewConsumerGroup(strings.Split(config.host,“,”),config.group,c)
/**
*建立一个新的Sarama消费群体
*/
消费者:=消费者{
准备就绪:制作(chan bool),
消息:make(chan*sarama.ConsumerMessage,256),
}
i:=&KafkaInput{
ConsumerGroup:group,
config:config,
消息:make(chan*sarama.ConsumerMessage,256),
消费者:消费者,,
}
go i.loop([]字符串{config.topic})
i、 messages=consumer.messages
返回i
}
//消费主义
func(i*KafkaInput)consumerclaim(s sarama.ConsumerGroupSession,c sarama.ConsumerGroupClaim)错误{
对于消息:=范围c.消息(){
s、 标记消息(msg,“”)
i、 推送(味精)
}
归零
}
func(i*KafkaInput)循环(主题[]字符串){
ctx:=context.Background()
为了{
如果err:=i.Consume(ctx,[]字符串{i.config.topic},i);err!=nil{
返回
}
}
}
//推送消息
func(i*KafkaInput)推送(m*sarama.ConsumerMessage){
如果i.consumer.messages!=nil{
log.Printf(“MSGPUSH:%s”,m)
i、 消费者信息
func NewKafkaInput(address string, config *KafkaConfig) *KafkaInput {

    ...

    go func(consumer sarama.PartitionConsumer) {
                defer consumer.Close()

                for message := range consumer.Messages() {
                    i.messages <- message
                }
            }(consumer)

    ...

}
func (i *KafkaInput) Read(data []byte) (int, error) {
    message := <-i.messages

    if !i.config.useJSON {
        copy(data, message.Value)
        return len(message.Value), nil
    }

    var kafkaMessage KafkaMessage
    json.Unmarshal(message.Value, &kafkaMessage)

    buf, err := kafkaMessage.Dump()
    if err != nil {
        log.Println("Failed to decode access log entry:", err)
        return 0, err
    }

    copy(data, buf)

    return len(buf), nil

}
package main

import (
    "context"
    "encoding/json"
    "strings"

    "os"

    "log"

    "github.com/Shopify/sarama"
)

// KafkaInput is used for recieving Kafka messages and
// transforming them into HTTP payloads.
type KafkaInput struct {
    sarama.ConsumerGroup
    config   *KafkaConfig
    consumer Consumer
    messages chan *sarama.ConsumerMessage
}

// Consumer represents a Sarama consumer group consumer
type Consumer struct {
    ready    chan bool
    messages chan *sarama.ConsumerMessage
}

var (
    brokers  = ""
    version  = ""
    group    = ""
    topics   = ""
    assignor = ""
    oldest   = true
    verbose  = false
)

// NewKafkaInput creates instance of kafka consumer client.
func NewKafkaInput(address string, config *KafkaConfig) *KafkaInput {
    /**
     * Construct a new Sarama configuration.
     * The Kafka cluster version has to be defined before the consumer/producer is initialized.
     */
    c := sarama.NewConfig()
    // Configuration options go here

    log.Printf("KafkaConfig: %s", config.host)
    log.Printf("KafkaConfig: %s", config.group)
    log.Printf("KafkaConfig: %s", config.topic)

    log.Println("Starting a new Sarama consumer")

    if verbose {
        sarama.Logger = log.New(os.Stdout, "[sarama] ", log.LstdFlags)
    }

    version, err := sarama.ParseKafkaVersion("2.1.1")
    if err != nil {
        log.Panicf("Error parsing Kafka version: %v", err)
    }

    c.Version = version

    if oldest {
        c.Consumer.Offsets.Initial = sarama.OffsetOldest
    }

    group, err := sarama.NewConsumerGroup(strings.Split(config.host, ","), config.group, c)

    /**
     * Setup a new Sarama consumer group
     */
    consumer := Consumer{
        ready:    make(chan bool),
        messages: make(chan *sarama.ConsumerMessage, 256),
    }

    i := &KafkaInput{
        ConsumerGroup: group,
        config:        config,
        messages:      make(chan *sarama.ConsumerMessage, 256),
        consumer:      consumer,
    }

    go i.loop([]string{config.topic})
    i.messages = consumer.messages
    return i
}

//ConsumeClaim and stuff
func (i *KafkaInput) ConsumeClaim(s sarama.ConsumerGroupSession, c sarama.ConsumerGroupClaim) error {
    for msg := range c.Messages() {
        s.MarkMessage(msg, "")
        i.Push(msg)
    }
    return nil
}

func (i *KafkaInput) loop(topic []string) {
    ctx := context.Background()
    for {
        if err := i.Consume(ctx, []string{i.config.topic}, i); err != nil {
            return
        }
    }
}

// Push Messages
func (i *KafkaInput) Push(m *sarama.ConsumerMessage) {
    if i.consumer.messages != nil {
        log.Printf("MSGPUSH: %s", m)
        i.consumer.messages <- m
    }
}

func (i *KafkaInput) Read(data []byte) (int, error) {

    message := <-i.messages
    log.Printf("Msg: %s", string(message.Value))
    if !i.config.useJSON {
        copy(data, message.Value)
        return len(message.Value), nil
    }

    var kafkaMessage KafkaMessage
    json.Unmarshal(message.Value, &kafkaMessage)

    buf, err := kafkaMessage.Dump()
    if err != nil {
        log.Println("Failed to decode access log entry:", err)
        return 0, err
    }

    copy(data, buf)

    return len(buf), nil

}

func (i *KafkaInput) String() string {
    return "Kafka Input: " + i.config.host + "/" + i.config.topic
}

// Setup is run at the beginning of a new session, before ConsumeClaim
func (i *KafkaInput) Setup(s sarama.ConsumerGroupSession) error {
    return nil
}

// Cleanup is run at the end of a session, once all ConsumeClaim goroutines have exited
func (i *KafkaInput) Cleanup(s sarama.ConsumerGroupSession) error {
    return nil
}