Java 无待定回复:ConsumerRecord

Java 无待定回复:ConsumerRecord,java,apache-kafka,spring-kafka,Java,Apache Kafka,Spring Kafka,我正在尝试使用ReplyingKafkaTemplate,偶尔我会看到下面的消息 没有挂起的回复:ConsumerRecord(主题=请求-回复主题, 分区=8,偏移量=1,CreateTime=1544653843269,序列化键 大小=-1,序列化值大小=1609,标题= RecordHeaders(headers=[RecordHeader(key=kafka\u correlationId,value =[-14,65,21,-118,70,-94,72,87,-113,-91,92,7

我正在尝试使用ReplyingKafkaTemplate,偶尔我会看到下面的消息

没有挂起的回复:ConsumerRecord(主题=请求-回复主题, 分区=8,偏移量=1,CreateTime=1544653843269,序列化键 大小=-1,序列化值大小=1609,标题= RecordHeaders(headers=[RecordHeader(key=kafka\u correlationId,value =[-14,65,21,-118,70,-94,72,87,-113,-91,92,72,-124,-110,-64,-94]),isReadOnly=false),key=null,correlationId:[-18271255759235816475365319231847350110],可能超时,或使用 共享的回复主题

它将源于下面的代码

RequestReplyFuture<K, V, R> future = this.futures.remove(correlationId);
if (future == null) {
  if (this.sharedReplyTopic) {
    if (this.logger.isDebugEnabled()) {
      this.logger.debug(missingCorrelationLogMessage(record, correlationId));
    }
  }
  else if (this.logger.isErrorEnabled()) {
    this.logger.error(missingCorrelationLogMessage(record, correlationId));
  }
}
RequestReplyFuture future=this.futures.remove(correlationId);
if(future==null){
如果(此.sharedReplyTopic){
if(this.logger.isDebugEnabled()){
this.logger.debug(missingCorrelationLogMessage(record,correlationId));
}
}
else if(this.logger.isErrorEnabled()){
this.logger.error(missingCorrelationLogMessage(record,correlationId));
}
}
但这种情况只会在无意中发生

我还将共享replyTopic设置为false,如下所示,并尝试强制更长的超时时间

ReplyingKafkaTemplate<String, Object, Object> replyKafkaTemplate = new ReplyingKafkaTemplate<>(pf, container);
        replyKafkaTemplate.setSharedReplyTopic(false);
        replyKafkaTemplate.setReplyTimeout(10000);
        return replyKafkaTemplate;
ReplyingKafkaTemplate replyKafkaTemplate=新的ReplyingKafkaTemplate(pf,容器);
replyKafkaTemplate.setSharedReplyTopic(false);
replyKafkaTemplate.setReplyTimeout(10000);
返回replyKafkaTemplate;
我的容器如下

@Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, Object>> kafkaListenerContainerFactory() {
    ConcurrentKafkaListenerContainerFactory<String, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
    factory.setConsumerFactory(consumerFactory());

    factory.setBatchListener(false);
    factory.getContainerProperties().setPollTimeout(1000);
    factory.getContainerProperties().setIdleEventInterval(10000L);
    factory.setConcurrency(3);
    factory.setReplyTemplate(kafkaTemplate());
    return factory;
}
@Bean
公共KafkaListenerContainerFactory KafkaListenerContainerFactory(){
ConcurrentKafkListenerContainerFactory=新ConcurrentKafkListenerContainerFactory();
setConsumerFactory(consumerFactory());
factory.setBatchListener(false);
factory.getContainerProperties().setPollTimeout(1000);
factory.getContainerProperties().setIdleEventInterval(10000L);
工厂设置并发(3);
setReplyTemplate(kafkaTemplate());
返回工厂;
}

如果是断断续续的,很可能是回复时间太长了。信息似乎很清楚

可能超时,或使用共享回复主题

每个客户端实例必须使用自己的回复主题或专用分区

编辑

如果收到的邮件的相关id与此.futures(挂起的答复)中当前的条目不匹配,您将获得日志。这只能在以下情况下发生:

  • 请求超时(在这种情况下,将有相应的警告日志)
  • 模板是stop()ped(在这种情况下,this.futures被清除)
  • 由于某种原因(不应该发生),已处理的回复将被重新交付
  • 在将密钥添加到this.futures之前收到回复(因为它是在发送()或删除记录之前插入的,所以无法执行)
  • 服务器端为同一请求发送2个或多个回复
  • 其他一些应用程序正在将数据发送到同一个回复主题。如果您可以使用调试日志记录复制数据,这将有所帮助,因为这样我们也会在发送主题上记录相关键

  • 使用消费者属性@Header(KafkaHeaders.CORRELATION\u ID)修复了问题

    @KafkaListener(topics=“${kafka.topic.model}”)
    @发送至(“replymodeltopic”)
    @凌驾
    公共模型接收(ConsumerRecord记录,@Header(KafkaHeaders.CORRELATION_ID)字节[]相关){
    record.headers().add(KafkaHeaders.CORRELATION\u ID,CORRELATION);
    返回记录.value();
    }
    
    根据我的配置,我有

    @Configuration
    @EnableKafka
    public class ReceiverConfig {
    
        @Value("${kafka.bootstrap-servers}")
        private String bootstrapServers;
    
        @Value("${kafka.tunnel.group}")
        private String tunnelGroup;
    
        @Value("${kafka.topic.json.reply}")
        private String jsonTopicReply;
    
        @Bean
        public Map<String, Object> consumerConfigs() {
            Map<String, Object> props = new HashMap<>();
            props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
            props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
            props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
            props.put(ConsumerConfig.GROUP_ID_CONFIG, tunnelGroup);
    
            return props;
        }
    
        @Bean
        public Map<String, Object> producerConfigs() {
            Map<String, Object> props = new HashMap<>();
            props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
            props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
            props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
            return props;
        }
    
        @Bean
        public ConsumerFactory<String, Model> consumerFactory() {
            return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(), new JsonDeserializer<>(Model.class));
        }
    
        @Bean
        public ConcurrentKafkaListenerContainerFactory<String, Model> kafkaListenerContainerFactory() {
            ConcurrentKafkaListenerContainerFactory<String, Model> factory = new ConcurrentKafkaListenerContainerFactory<>();
            factory.setConsumerFactory(consumerFactory());
            factory.setReplyTemplate(replyTemplate());
            return factory;
        }
    
        @Bean
        public ProducerFactory<String, Model> producerFactory() {
            return new DefaultKafkaProducerFactory<>(producerConfigs());
        }
    
        @Bean
        public KafkaTemplate<String, Model> replyTemplate() {
            KafkaTemplate<String, Model> kafkaTemplate = new KafkaTemplate<>(producerFactory());
            kafkaTemplate.setDefaultTopic(jsonTopicReply);
            return kafkaTemplate;
        }
    
    }
    
    @配置
    @使能卡夫卡
    公共类接收方配置{
    @值(${kafka.bootstrap servers}”)
    私有字符串引导服务器;
    @值(${kafka.tunnel.group}”)
    专用串隧道组;
    @值(${kafka.topic.json.reply}”)
    私有字符串jsonTopicReply;
    @豆子
    公共地图使用者配置(){
    Map props=newhashmap();
    put(ConsumerConfig.BOOTSTRAP\u server\u CONFIG,bootstrapserver);
    put(ConsumerConfig.KEY\u反序列化程序\u类\u配置,StringDeserializer.CLASS);
    put(ConsumerConfig.VALUE\u反序列化程序\u类\u配置,JsonDeserializer.CLASS);
    props.put(ConsumerConfig.GROUP\u ID\u CONFIG,tunnelGroup);
    返回道具;
    }
    @豆子
    公共地图产品配置(){
    Map props=newhashmap();
    put(ProducerConfig.BOOTSTRAP\u server\u CONFIG,bootstrapserver);
    put(ProducerConfig.KEY\u SERIALIZER\u CLASS\u CONFIG,StringSerializer.CLASS);
    put(ProducerConfig.VALUE\u SERIALIZER\u CLASS\u CONFIG,JsonSerializer.CLASS);
    返回道具;
    }
    @豆子
    公共消费者工厂消费者工厂(){
    返回新的DefaultKafkanConsumerFactory(consumerConfigs()、新的StringDeserializer()、新的JsonDeserializer(Model.class));
    }
    @豆子
    公共ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory(){
    ConcurrentKafkListenerContainerFactory=新ConcurrentKafkListenerContainerFactory();
    setConsumerFactory(consumerFactory());
    setReplyTemplate(replyTemplate());
    返回工厂;
    }
    @豆子
    公共生产工厂生产工厂(){
    返回新的DefaultKafkaProducerFactory(producerConfigs());
    }
    @豆子
    公共KafkatTemplate replyTemplate(){
    KafkaTemplate KafkaTemplate=新的KafkaTemplate(producerFactory());
    kafkaTemplate.setDefaultTopic(jsonTopicReply);
    返回卡夫卡模板;
    }
    }
    
    感谢我的日志中的响应。从我的日志中,超时发生得太快了。我看到消费者在2018-12-13 02:37:46713 | INFO | org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1收到请求,而在2018-12-13 02:37:46839 | ERROR | replyContainer-C-1 | IPC | RepLyingKafkatTemplate-309 |无待定回复:如果您有多个客户端,则ConsumerRecord
    sharedReplyTopic
    应为
    true
    @Configuration
    @EnableKafka
    public class ReceiverConfig {
    
        @Value("${kafka.bootstrap-servers}")
        private String bootstrapServers;
    
        @Value("${kafka.tunnel.group}")
        private String tunnelGroup;
    
        @Value("${kafka.topic.json.reply}")
        private String jsonTopicReply;
    
        @Bean
        public Map<String, Object> consumerConfigs() {
            Map<String, Object> props = new HashMap<>();
            props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
            props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
            props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
            props.put(ConsumerConfig.GROUP_ID_CONFIG, tunnelGroup);
    
            return props;
        }
    
        @Bean
        public Map<String, Object> producerConfigs() {
            Map<String, Object> props = new HashMap<>();
            props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
            props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
            props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
            return props;
        }
    
        @Bean
        public ConsumerFactory<String, Model> consumerFactory() {
            return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(), new JsonDeserializer<>(Model.class));
        }
    
        @Bean
        public ConcurrentKafkaListenerContainerFactory<String, Model> kafkaListenerContainerFactory() {
            ConcurrentKafkaListenerContainerFactory<String, Model> factory = new ConcurrentKafkaListenerContainerFactory<>();
            factory.setConsumerFactory(consumerFactory());
            factory.setReplyTemplate(replyTemplate());
            return factory;
        }
    
        @Bean
        public ProducerFactory<String, Model> producerFactory() {
            return new DefaultKafkaProducerFactory<>(producerConfigs());
        }
    
        @Bean
        public KafkaTemplate<String, Model> replyTemplate() {
            KafkaTemplate<String, Model> kafkaTemplate = new KafkaTemplate<>(producerFactory());
            kafkaTemplate.setDefaultTopic(jsonTopicReply);
            return kafkaTemplate;
        }
    
    }