Java 如何配置spring kafka以启动kafka生成器的其他实例

Java 如何配置spring kafka以启动kafka生成器的其他实例,java,spring-boot,apache-kafka,spring-kafka,Java,Spring Boot,Apache Kafka,Spring Kafka,我在请求/应答配置中使用了一个卡夫卡生产者。当一个producer实例启动时,它工作得非常完美。但是,在启动producer的第二个实例时,seconds实例将不起作用。它将正确地将消息写入主题,消费者将处理消息并发送回复,但是生产者将找不到正在等待的回复消息,并且该回复消息超时。消息似乎已由生产者的第一个实例拾取。因为第一个实例不需要此回复消息。请求/答复消息失败。是否缺少使第二个实例工作的任何配置?该POC将在Openshift POD中使用,因此它应该能够扩展到多个生产者和多个消费者实例。

我在请求/应答配置中使用了一个卡夫卡生产者。当一个producer实例启动时,它工作得非常完美。但是,在启动producer的第二个实例时,seconds实例将不起作用。它将正确地将消息写入主题,消费者将处理消息并发送回复,但是生产者将找不到正在等待的回复消息,并且该回复消息超时。消息似乎已由生产者的第一个实例拾取。因为第一个实例不需要此回复消息。请求/答复消息失败。是否缺少使第二个实例工作的任何配置?该POC将在Openshift POD中使用,因此它应该能够扩展到多个生产者和多个消费者实例。以下是我对消费者和生产者的配置。谢谢

卡夫卡制作人配置

public class KafkaConfig {

    @Value("${spring.kafka.bootstrap-servers}")
    private String bootstrapServers;

    @Value("${spring.kafka.consumer.group-id}")
    private String groupId;

    @Value("${kafka.topic.request-reply-topic}")
    String requestReplyTopic;

    @Value("${kafka.request-reply.timeout-ms}")
    private Long replyTimeout;

    @Bean
    public Map<String, Object> consumerConfigs() {
        Map<String, Object> props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);

        return props;
    }

    @Bean
    public Map<String, Object> producerConfigs() {
        Map<String, Object> props = new HashMap<>();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
       // props.put(ProducerConfig.RETRIES_CONFIG, 0);
       // props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
        //props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
        //props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);

        return props;
    }

    @Bean
    public ReplyingKafkaTemplate<String, InGetAccountInfo, AccountInquiryDto> replyKafkaTemplate(ProducerFactory<String, InGetAccountInfo> pf, KafkaMessageListenerContainer<String, AccountInquiryDto> container){
        return new ReplyingKafkaTemplate(pf, container);

    }

    @Bean
    public ProducerFactory<String, InGetAccountInfo> requestProducerFactory() {
        return new DefaultKafkaProducerFactory<>(producerConfigs());
    }

    @Bean
    public ConsumerFactory<String, AccountInquiryDto> replyConsumerFactory() {
        JsonDeserializer<AccountInquiryDto> jsonDeserializer = new JsonDeserializer<>();
        jsonDeserializer.addTrustedPackages(InGetAccountInfo.class.getPackage().getName());
        jsonDeserializer.addTrustedPackages(AccountInquiryDto.class.getPackage().getName());
        return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),jsonDeserializer);
    }

    @Bean
    public KafkaMessageListenerContainer<String, AccountInquiryDto> replyContainer(ConsumerFactory<String, AccountInquiryDto> cf) {
        ContainerProperties containerProperties = new ContainerProperties(requestReplyTopic);
        return new KafkaMessageListenerContainer<>(cf, containerProperties);
    }



    @Bean
    public KafkaAdmin admin() {
        Map<String, Object> configs = new HashMap<>();
        configs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        return new KafkaAdmin(configs);
    }



    @Bean
    public KafkaAsyncService kafkaAsyncService(){
        return new KafkaAsyncService();
    }


}


 
public class KafkaConfig {

  @Value("${spring.kafka.bootstrap-servers}")
  private String bootstrapServers;

  @Value("${spring.kafka.consumer.group-id}")
  private String groupId;

  @Value("${kafka.topic.acct-info.request}")
  private String requestTopic;

  @Value("${kafka.topic.request-reply.timeout-ms}")
  private Long replyTimeout;

  @Bean
  public Map<String, Object> consumerConfigs() {
    Map<String, Object> props = new HashMap<>();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
    props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
    return props;
  }

  @Bean
  public Map<String, Object> producerConfigs() {
    Map<String, Object> props = new HashMap<>();
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
    return props;
  }

  @Bean
  public ConsumerFactory<String, InGetAccountInfo> requestConsumerFactory() {
    JsonDeserializer<InGetAccountInfo> jsonDeserializer = new JsonDeserializer<>();
    jsonDeserializer.addTrustedPackages(InGetAccountInfo.class.getPackage().getName());
    return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),jsonDeserializer);
  }

  @Bean
  public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, InGetAccountInfo>> requestReplyListenerContainerFactory() {
    ConcurrentKafkaListenerContainerFactory<String, InGetAccountInfo> factory = new ConcurrentKafkaListenerContainerFactory<>();
    factory.setConsumerFactory(requestConsumerFactory());
    factory.setConcurrency(3);
    factory.setReplyTemplate(replyTemplate());
    return factory;
  }

  @Bean
  public ProducerFactory<String, AccountInquiryDto> replyProducerFactory() {
    return new DefaultKafkaProducerFactory<>(producerConfigs());
  }

  @Bean
  public KafkaTemplate<String, AccountInquiryDto> replyTemplate() {
    return new KafkaTemplate<>(replyProducerFactory());
  }

  @Bean
  public DepAcctInqConsumerController Controller() {
    return new DepAcctInqConsumerController();
  }
  @Bean

  public KafkaAdmin admin() {
    Map<String, Object> configs = new HashMap<>();
    configs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    return new KafkaAdmin(configs);
  }

  @Bean
  public NewTopic requestTopic() {
    Map<String, String> configs = new HashMap<>();
    configs.put("retention.ms", replyTimeout.toString());
    return new NewTopic(requestTopic, 2, (short) 2).configs(configs);
  }


}
公共类卡夫卡图{
@值(${spring.kafka.bootstrap servers}”)
私有字符串引导服务器;
@值(${spring.kafka.consumer.groupid})
私有字符串groupId;
@值(${kafka.topic.request-reply-topic}”)
字符串requestReplyTopic;
@值(${kafka.request-reply.timeout ms}”)
私人长时间回复;
@豆子
公共地图使用者配置(){
Map props=newhashmap();
put(ConsumerConfig.BOOTSTRAP\u server\u CONFIG,bootstrapserver);
put(ConsumerConfig.KEY\u反序列化程序\u类\u配置,StringDeserializer.CLASS);
put(ConsumerConfig.VALUE\u反序列化程序\u类\u配置,JsonDeserializer.CLASS);
props.put(ConsumerConfig.GROUP\u ID\u CONFIG,groupId);
返回道具;
}
@豆子
公共地图产品配置(){
Map props=newhashmap();
put(ProducerConfig.BOOTSTRAP\u server\u CONFIG,bootstrapserver);
//props.put(ProducerConfig.RETRIES\u CONFIG,0);
//props.put(ProducerConfig.BATCH\u SIZE\u CONFIG,16384);
//props.put(ProducerConfig.LINGER\u MS\u CONFIG,1);
//props.put(ProducerConfig.BUFFER\u MEMORY\u CONFIG,33554432);
put(ProducerConfig.KEY\u SERIALIZER\u CLASS\u CONFIG,StringSerializer.CLASS);
put(ProducerConfig.VALUE\u SERIALIZER\u CLASS\u CONFIG,JsonSerializer.CLASS);
返回道具;
}
@豆子
公共回复KafkatTemplate回复KafkatTemplate(生产厂家pf,KafkamMessageListenerContainer容器){
返回新的回复KafkatTemplate(pf,容器);
}
@豆子
公共生产工厂请求生产工厂(){
返回新的DefaultKafkaProducerFactory(producerConfigs());
}
@豆子
公共消费者工厂回复ConsumerFactory(){
JsonDeserializer JsonDeserializer=新的JsonDeserializer();
jsonDeserializer.addTrustedPackages(InGetAccountInfo.class.getPackage().getName());
jsonDeserializer.addTrustedPackages(AccountInquiryDto.class.getPackage().getName());
返回新的DefaultKafkanConsumerFactory(consumerConfigs()、新的StringDeserializer()、jsonDeserializer);
}
@豆子
public KafkCamessageListenerContainer replyContainer(ConsumerFactory cf){
ContainerProperties ContainerProperties=新的ContainerProperties(requestReplyTopic);
返回新的KafkamessageliesStenerContainer(cf,containerProperties);
}
@豆子
公共卡夫卡行政管理(){
Map configs=new HashMap();
configs.put(ConsumerConfig.BOOTSTRAP\u SERVERS\u CONFIG,bootstrapserver);
返回新的卡夫卡管理员(配置);
}
@豆子
公共KafkaAsyncService KafkaAsyncService(){
返回新的KafkaAsyncService();
}
}
卡夫卡制作人阶层

public AccountInquiryDto getModelResponse(InGetAccountInfo accountInfo) throws Exception{

        LOGGER.info("Received request for request  for account " + accountInfo);

        // create producer record
        ProducerRecord<String, InGetAccountInfo> record = new ProducerRecord<String, InGetAccountInfo>(requestTopic,accountInfo);
        // set reply topic in header
        record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, requestReplyTopic.getBytes()));

        // post in kafka topic
        RequestReplyFuture<String, InGetAccountInfo, AccountInquiryDto> sendAndReceive = kafkaTemplate.sendAndReceive(record);

        // confirm if producer produced successfully
        SendResult<String, InGetAccountInfo> sendResult = sendAndReceive.getSendFuture().get();

       // //print all headers
        sendResult.getProducerRecord().headers().forEach(header -> System.out.println(header.key() + ":" + header.value().toString()));

        // get consumer record
        ConsumerRecord<String, AccountInquiryDto> consumerRecord = sendAndReceive.get();

        ObjectMapper mapper = new ObjectMapper();

        AccountInquiryDto modelResponse = mapper.convertValue(
                consumerRecord.value(),
                new TypeReference<AccountInquiryDto>() { });


        LOGGER.info("Returning record for " + modelResponse);

        return modelResponse;

    }
  @KafkaListener(topics = "${kafka.topic.acct-info.request}", containerFactory = "requestReplyListenerContainerFactory")
  @SendTo
  public Message<?> listenPartition0(InGetAccountInfo accountInfo,
                                     @Header(KafkaHeaders.REPLY_TOPIC) byte[] replyTo,
                                     @Header(KafkaHeaders.RECEIVED_PARTITION_ID) int id) {

    try {

      LOGGER.info("Received request for partition id = " +  id);
      LOGGER.info("Received request for accountInfo = " +  accountInfo.getAccountNumber());

      AccountInquiryDto accountInfoDto  = getAccountInquiryDto(accountInfo);

      LOGGER.info("Returning accountInfoDto = " +  accountInfoDto.toString());

      return MessageBuilder.withPayload(accountInfoDto)
              .setHeader(KafkaHeaders.TOPIC, replyTo)
              .setHeader(KafkaHeaders.RECEIVED_PARTITION_ID, id)
              .build();


    } catch (Exception e) {
      LOGGER.error(e.toString(),e);
    }

    return null;
  }
public AccountInquiryDto getModelResponse(InGetAccountInfo accountInfo)引发异常{
LOGGER.info(“收到账户请求请求”+账户信息);
//创建制作人记录
ProducerRecord记录=新的ProducerRecord(请求主题,accountInfo);
//在标题中设置回复主题
record.headers().add(新的RecordHeader(KafkaHeaders.REPLY_主题,requestReplyTopic.getBytes());
//卡夫卡主题中的帖子
RequestReplyFuture sendAndReceive=kafkaTemplate.sendAndReceive(记录);
//确认生产商是否成功生产
SendResult SendResult=SendReceive.getSendFuture().get();
////打印所有标题
sendResult.getProducerRecord().headers().forEach(header->System.out.println(header.key()+”:“+header.value().toString());
//获取消费者记录
ConsumerRecord ConsumerRecord=sendAndReceive.get();
ObjectMapper mapper=新的ObjectMapper();
AccountInquirryDTO modelResponse=mapper.convertValue(
consumerRecord.value(),
新类型引用(){});
LOGGER.info(“返回“+modelResponse”的记录);
返回模式响应;
}
卡夫卡消费者配置

public class KafkaConfig {

    @Value("${spring.kafka.bootstrap-servers}")
    private String bootstrapServers;

    @Value("${spring.kafka.consumer.group-id}")
    private String groupId;

    @Value("${kafka.topic.request-reply-topic}")
    String requestReplyTopic;

    @Value("${kafka.request-reply.timeout-ms}")
    private Long replyTimeout;

    @Bean
    public Map<String, Object> consumerConfigs() {
        Map<String, Object> props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);

        return props;
    }

    @Bean
    public Map<String, Object> producerConfigs() {
        Map<String, Object> props = new HashMap<>();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
       // props.put(ProducerConfig.RETRIES_CONFIG, 0);
       // props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
        //props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
        //props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);

        return props;
    }

    @Bean
    public ReplyingKafkaTemplate<String, InGetAccountInfo, AccountInquiryDto> replyKafkaTemplate(ProducerFactory<String, InGetAccountInfo> pf, KafkaMessageListenerContainer<String, AccountInquiryDto> container){
        return new ReplyingKafkaTemplate(pf, container);

    }

    @Bean
    public ProducerFactory<String, InGetAccountInfo> requestProducerFactory() {
        return new DefaultKafkaProducerFactory<>(producerConfigs());
    }

    @Bean
    public ConsumerFactory<String, AccountInquiryDto> replyConsumerFactory() {
        JsonDeserializer<AccountInquiryDto> jsonDeserializer = new JsonDeserializer<>();
        jsonDeserializer.addTrustedPackages(InGetAccountInfo.class.getPackage().getName());
        jsonDeserializer.addTrustedPackages(AccountInquiryDto.class.getPackage().getName());
        return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),jsonDeserializer);
    }

    @Bean
    public KafkaMessageListenerContainer<String, AccountInquiryDto> replyContainer(ConsumerFactory<String, AccountInquiryDto> cf) {
        ContainerProperties containerProperties = new ContainerProperties(requestReplyTopic);
        return new KafkaMessageListenerContainer<>(cf, containerProperties);
    }



    @Bean
    public KafkaAdmin admin() {
        Map<String, Object> configs = new HashMap<>();
        configs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        return new KafkaAdmin(configs);
    }



    @Bean
    public KafkaAsyncService kafkaAsyncService(){
        return new KafkaAsyncService();
    }


}


 
public class KafkaConfig {

  @Value("${spring.kafka.bootstrap-servers}")
  private String bootstrapServers;

  @Value("${spring.kafka.consumer.group-id}")
  private String groupId;

  @Value("${kafka.topic.acct-info.request}")
  private String requestTopic;

  @Value("${kafka.topic.request-reply.timeout-ms}")
  private Long replyTimeout;

  @Bean
  public Map<String, Object> consumerConfigs() {
    Map<String, Object> props = new HashMap<>();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
    props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
    return props;
  }

  @Bean
  public Map<String, Object> producerConfigs() {
    Map<String, Object> props = new HashMap<>();
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
    return props;
  }

  @Bean
  public ConsumerFactory<String, InGetAccountInfo> requestConsumerFactory() {
    JsonDeserializer<InGetAccountInfo> jsonDeserializer = new JsonDeserializer<>();
    jsonDeserializer.addTrustedPackages(InGetAccountInfo.class.getPackage().getName());
    return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),jsonDeserializer);
  }

  @Bean
  public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, InGetAccountInfo>> requestReplyListenerContainerFactory() {
    ConcurrentKafkaListenerContainerFactory<String, InGetAccountInfo> factory = new ConcurrentKafkaListenerContainerFactory<>();
    factory.setConsumerFactory(requestConsumerFactory());
    factory.setConcurrency(3);
    factory.setReplyTemplate(replyTemplate());
    return factory;
  }

  @Bean
  public ProducerFactory<String, AccountInquiryDto> replyProducerFactory() {
    return new DefaultKafkaProducerFactory<>(producerConfigs());
  }

  @Bean
  public KafkaTemplate<String, AccountInquiryDto> replyTemplate() {
    return new KafkaTemplate<>(replyProducerFactory());
  }

  @Bean
  public DepAcctInqConsumerController Controller() {
    return new DepAcctInqConsumerController();
  }
  @Bean

  public KafkaAdmin admin() {
    Map<String, Object> configs = new HashMap<>();
    configs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    return new KafkaAdmin(configs);
  }

  @Bean
  public NewTopic requestTopic() {
    Map<String, String> configs = new HashMap<>();
    configs.put("retention.ms", replyTimeout.toString());
    return new NewTopic(requestTopic, 2, (short) 2).configs(configs);
  }


}
公共类卡夫卡图{
@值(${spring.kafka.bootstrap servers}”)
私有字符串引导服务器;
@值(${spring.kafka.consumer.groupid})
私有字符串groupId;
@值(${kafka.topic.acct info.request})
私有字符串请求主题;
@值(${kafka.topic.request-reply.timeout ms}”)
私人长时间回复;
@豆子
公共地图使用者配置(){
Map props=newhashmap();
put(ConsumerConfig.BOOTSTRAP\u server\u CONFIG,bootstrapserver);
put(ConsumerConfig.KEY\u反序列化程序\u类\u配置,StringDeserializer.CLASS);
put(ConsumerConfig.VALUE\u反序列化程序\u类\u配置,JsonDeserializer.CLASS);
props.put(ConsumerConfig.GROUP\u ID\u CONFIG,groupId);
返回道具;
}
@豆子
公共地图产品配置(){
Map props=newhashmap();
put(ProducerConfig.BOOTSTRAP\u server\u CONFIG,bootstrapserver);
put(ProducerConfig.KEY\u SERIALIZER\u CLASS\u CONFIG,StringSerializer.cl