Jms 如果我的生产者生产,那么为什么消费者不能';不消费?它卡住了@poll()

Jms 如果我的生产者生产,那么为什么消费者不能';不消费?它卡住了@poll(),jms,apache-kafka,kafka-consumer-api,jms-topic,kafka-producer-api,Jms,Apache Kafka,Kafka Consumer Api,Jms Topic,Kafka Producer Api,Im发布到远程kafka服务器,并尝试使用来自该远程服务器的邮件。(卡夫卡v 0.90.1) 出版工作很好,但也不是消费 出版商 package org.test; import java.io.IOException; import java.util.Properties; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRec

Im发布到远程kafka服务器,并尝试使用来自该远程服务器的邮件。(卡夫卡v 0.90.1) 出版工作很好,但也不是消费

出版商

package org.test;

import java.io.IOException;
import java.util.Properties;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;


public class Producer {

    private void generateMessgaes() throws IOException {
        String topic = "MY_TOPIC";

        Properties props = new Properties();

        props.put("bootstrap.servers", "kafka.xx.com:9092");
        props.put("acks", "all");
        props.put("retries", 0);
        props.put("batch.size", 16384);
        props.put("linger.ms", 1);
        props.put("buffer.memory", 33554432);
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("serializer.class", "org.apache.kafka.common.serialization.StringSerializer");


        KafkaProducer<String, String> producer = null;
        try {
             producer = new KafkaProducer<>(props);
            for (int i = 0; i < 10; i++) {
                producer.send(new ProducerRecord<String, String>(topic, "test msg"));
                System.out.println("producing---");
            }

        } catch (Throwable e) {
            e.printStackTrace();
            System.out.println("Error in publishing messages to the topic : " + topic);

        } finally {
            producer.close();
        }
    }

    public static void main(String[] args) throws IOException {
        Producer producer = new Producer();
        producer.generateMessgaes();
        System.out.println("$$$$$");
    }
}
package org.test;

import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;


public class Listener {

    public void start() throws CoreException {

        String topic = "MY_TOPIC";

        List<String> topics = Arrays.asList(topic);

        Properties props = new Properties();
        props.put("bootstrap.servers", "kafka.xx.com:9092");
        props.put("enable.auto.commit", true);
        props.put("receive.buffer.bytes", 262144);
        props.put("consumer.timeout.ms", 10000);
        props.put("session.timeout.ms", 7000);
        props.put("heartbeat.interval.ms", 1000);
        props.put("auto.offset.reset", "earliest");
        props.put("group.id", "test");
        props.put("fetch.min.bytes", 1);
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("serializer.class", "org.apache.kafka.common.serialization.StringDeserializer");
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
        consumer.subscribe(topics);

        try {
            while (true) {

                ConsumerRecords<String, String> records = consumer.poll(100);
                System.out.println("polling msges : " + records.count());
                for (ConsumerRecord<String, String> record : records) {
    System.out.println("kafka record : " + record.value());
                }
            }
        } catch (Throwable e) {
            e.printStackTrace();
            System.out.println("eror in polling");
        } finally {
            consumer.close();
        }
    }

    public static void main(String args[]) throws CoreException {

        Listener listener = new Listener();
        listener.start();

    }
}
package-org.test;
导入java.io.IOException;
导入java.util.Properties;
导入org.apache.kafka.clients.producer.KafkaProducer;
导入org.apache.kafka.clients.producer.ProducerRecord;
公共级制作人{
私有void generateMessgaes()引发IOException{
String topic=“我的主题”;
Properties props=新属性();
props.put(“bootstrap.servers”,“kafka.xx.com:9092”);
道具放置(“阿克斯”、“全部”);
道具放置(“重试”,0);
道具放置(“批量大小”,16384);
道具放置(“玲儿小姐”,1);
props.put(“buffer.memory”,33554432);
put(“key.serializer”、“org.apache.kafka.common.serialization.StringSerializer”);
put(“value.serializer”、“org.apache.kafka.common.serialization.StringSerializer”);
put(“serializer.class”,“org.apache.kafka.common.serialization.StringSerializer”);
KafkaProducer producer=null;
试一试{
制作人=新卡夫卡制作人(道具);
对于(int i=0;i<10;i++){
producer.send(新ProducerRecord(主题“测试消息”);
System.out.println(“生成---”);
}
}捕获(可丢弃的e){
e、 printStackTrace();
System.out.println(“向主题发布消息时出错:“+topic”);
}最后{
producer.close();
}
}
公共静态void main(字符串[]args)引发IOException{
生产者=新生产者();
producer.generateMessgaes();
System.out.println(“$$$$”);
}
}
我可以看到“正在生成”和$$$$打印。但是当我尝试消费时,我看不到“轮询”打印消息。它在轮询时卡住了(超时)

有线索吗

消费者

package org.test;

import java.io.IOException;
import java.util.Properties;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;


public class Producer {

    private void generateMessgaes() throws IOException {
        String topic = "MY_TOPIC";

        Properties props = new Properties();

        props.put("bootstrap.servers", "kafka.xx.com:9092");
        props.put("acks", "all");
        props.put("retries", 0);
        props.put("batch.size", 16384);
        props.put("linger.ms", 1);
        props.put("buffer.memory", 33554432);
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("serializer.class", "org.apache.kafka.common.serialization.StringSerializer");


        KafkaProducer<String, String> producer = null;
        try {
             producer = new KafkaProducer<>(props);
            for (int i = 0; i < 10; i++) {
                producer.send(new ProducerRecord<String, String>(topic, "test msg"));
                System.out.println("producing---");
            }

        } catch (Throwable e) {
            e.printStackTrace();
            System.out.println("Error in publishing messages to the topic : " + topic);

        } finally {
            producer.close();
        }
    }

    public static void main(String[] args) throws IOException {
        Producer producer = new Producer();
        producer.generateMessgaes();
        System.out.println("$$$$$");
    }
}
package org.test;

import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;


public class Listener {

    public void start() throws CoreException {

        String topic = "MY_TOPIC";

        List<String> topics = Arrays.asList(topic);

        Properties props = new Properties();
        props.put("bootstrap.servers", "kafka.xx.com:9092");
        props.put("enable.auto.commit", true);
        props.put("receive.buffer.bytes", 262144);
        props.put("consumer.timeout.ms", 10000);
        props.put("session.timeout.ms", 7000);
        props.put("heartbeat.interval.ms", 1000);
        props.put("auto.offset.reset", "earliest");
        props.put("group.id", "test");
        props.put("fetch.min.bytes", 1);
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("serializer.class", "org.apache.kafka.common.serialization.StringDeserializer");
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
        consumer.subscribe(topics);

        try {
            while (true) {

                ConsumerRecords<String, String> records = consumer.poll(100);
                System.out.println("polling msges : " + records.count());
                for (ConsumerRecord<String, String> record : records) {
    System.out.println("kafka record : " + record.value());
                }
            }
        } catch (Throwable e) {
            e.printStackTrace();
            System.out.println("eror in polling");
        } finally {
            consumer.close();
        }
    }

    public static void main(String args[]) throws CoreException {

        Listener listener = new Listener();
        listener.start();

    }
}
package-org.test;
导入java.util.array;
导入java.util.List;
导入java.util.Properties;
导入org.apache.kafka.clients.consumer.ConsumerRecord;
导入org.apache.kafka.clients.consumer.ConsumerRecords;
导入org.apache.kafka.clients.consumer.KafkaConsumer;
公共类侦听器{
public void start()引发异常{
String topic=“我的主题”;
List topics=Arrays.asList(topic);
Properties props=新属性();
props.put(“bootstrap.servers”,“kafka.xx.com:9092”);
props.put(“enable.auto.commit”,true);
props.put(“receive.buffer.bytes”,262144);
道具放置(“consumer.timeout.ms”,10000);
props.put(“session.timeout.ms”,7000);
道具放置(“心跳间隔毫秒”,1000);
道具放置(“自动偏移重置”、“最早”);
props.put(“group.id”、“test”);
props.put(“fetch.min.bytes”,1);
put(“key.deserializer”、“org.apache.kafka.common.serialization.StringDeserializer”);
put(“value.deserializer”、“org.apache.kafka.common.serialization.StringDeserializer”);
put(“serializer.class”,“org.apache.kafka.common.serialization.StringDeserializer”);
卡夫卡消费者=新卡夫卡消费者(道具);
消费者。订阅(主题);
试一试{
while(true){
ConsumerRecords记录=consumer.poll(100);
System.out.println(“轮询msges:+records.count());
对于(消费者记录:记录){
System.out.println(“卡夫卡记录:+record.value());
}
}
}捕获(可丢弃的e){
e、 printStackTrace();
System.out.println(“轮询中的eror”);
}最后{
consumer.close();
}
}
公共静态void main(字符串args[])引发异常{
Listener=新的Listener();
listener.start();
}
}

免责声明:我不知道卡夫卡,但我知道信息

关于主题的第一件事:默认情况下,订阅不是持久的,因此,如果生产者向主题发送消息,而没有人侦听,则消息将被删除

第二件事是轮询:您正在轮询100ms,然后如果没有任何异常,就会抛出异常,将您从循环中删除

如果消费者启动时没有消息-因为,正如我所描述的,制作人的消息进入了比特桶-然后消费者失败了,因为没有任何东西可以消费。所以我想说一切都按照你的预期工作

两种选择: -添加一个更大的初始轮询,让消费者有机会看到消息(假设您知道生产者将在时间范围内进行生产)
-更改您的逻辑,使异常导致您停留在while循环中并继续消费,并找出一种不同的方法来阻止消费者。

第一件事:>我先启动了侦听器,然后启动了制作者..所以我认为这不是问题。第二件事:>没有例外..我会进一步尝试,看看。问题是,同一个消费者,制作者工作正常使用本地kafka服务器。而不是远程kafka服务器。最令人沮丧的是,它没有向使用者抛出任何异常。您最初没有说它在本地工作,这改变了我的想法。此外,首先启动侦听器会遇到我描述的问题,即轮询超时并在producer开始运行。我唯一能想到的另一个问题是,远程运行时会出现时间差,这会加剧问题,或者在使用本地服务器时会出现一些魔法,从而改变行为。网络连接是稳定的。(producer-producer快速发来消息)我运气不好..这不起作用..Clueless这句话是错误的:关于主题的第一件事:默认情况下,订阅不是持久的,所以如果生产者发送消息