Spring integration 如何在SpringXD中从Kafka接收器模块获得确认时手动提交Kafka源模块上的偏移量?

Spring integration 如何在SpringXD中从Kafka接收器模块获得确认时手动提交Kafka源模块上的偏移量?,spring-integration,spring-xd,spring-kafka,Spring Integration,Spring Xd,Spring Kafka,在XD流中,消息通过源模块从卡夫卡主题消费,然后发送到接收器卡夫卡模块。开发自定义源和接收卡夫卡模块的原因是,我只想在成功发送消息时,从接收模块下游获得确认时,才更新源模块的偏移量 我使用的是Spring集成Kafka 2.0.1.RELEASE和Spring Kafka 1.0.3.RELEASE,主题在Kafka 0.10.0.0环境中。我尝试了以下方法: 源模块配置: @Configuration public class ModuleConfiguration { @Value

在XD流中,消息通过源模块从卡夫卡主题消费,然后发送到接收器卡夫卡模块。开发自定义源和接收卡夫卡模块的原因是,我只想在成功发送消息时,从接收模块下游获得确认时,才更新源模块的偏移量

我使用的是Spring集成Kafka 2.0.1.RELEASE和Spring Kafka 1.0.3.RELEASE,主题在Kafka 0.10.0.0环境中。我尝试了以下方法:

源模块配置:

@Configuration
public class ModuleConfiguration {

    @Value("${topic}")
    private String topic;

    @Value("${brokerList}")
    private String brokerAddress;

    @Bean
    public SubscribableChannel output() {
        DirectChannel output = new DirectChannel();
        return output;
    }

    @Autowired
    TopicPartitionInitialOffset topicPartition;

    @Bean
    public TopicPartitionInitialOffset topicPartition(){
        return new TopicPartitionInitialOffset(this.topic, 0, (long) 0);    
    }

    @Bean
    public KafkaMessageListenerContainer<String, String> container() throws Exception {
        ContainerProperties containerProps = new ContainerProperties(topicPartition);
        containerProps.setAckMode(AckMode.MANUAL);
        KafkaMessageListenerContainer<String, String> kafkaMessageListenerContainer = new KafkaMessageListenerContainer<>(consumerFactory(),containerProps);
        return kafkaMessageListenerContainer;
    }
    @Bean
    public ConsumerFactory<String, String> consumerFactory() {
        Map<String, Object> props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.brokerAddress);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-consumer-group");
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
        props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000);
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        DefaultKafkaConsumerFactory<String,String> consumerFactory =  new DefaultKafkaConsumerFactory<>(props);
        return consumerFactory;
    }
}
@配置
公共类模块配置{
@值(“${topic}”)
私有字符串主题;
@值(“${brokerList}”)
私服;
@豆子
公共订阅通道输出(){
DirectChannel输出=新的DirectChannel();
返回输出;
}
@自动连线
主题划分初始偏移主题划分;
@豆子
公共主题分区初始偏移主题分区(){
返回新的TopicPartitionInitialOffset(this.topic,0,(long)0);
}
@豆子
公共KafkCamessageListenerContainer容器()引发异常{
ContainerProperties containerProps=新的ContainerProperty(主题分区);
containerProps.setAckMode(AckMode.MANUAL);
KafkaMessageListenerContainer KafkaMessageListenerContainer=新的KafkaMessageListenerContainer(consumerFactory(),containerProps);
返回KafkamessageliesStenerContainer;
}
@豆子
公共消费者工厂消费者工厂(){
Map props=newhashmap();
put(ConsumerConfig.BOOTSTRAP\u SERVERS\u CONFIG,this.brokerAddress);
props.put(ConsumerConfig.GROUP_ID_CONFIG,“测试消费者组”);
put(ConsumerConfig.ENABLE\u AUTO\u COMMIT\u CONFIG,false);
props.put(ConsumerConfig.SESSION\u TIMEOUT\u MS\u CONFIG,15000);
put(ConsumerConfig.KEY\u反序列化程序\u类\u配置,StringDeserializer.CLASS);
put(ConsumerConfig.VALUE\u反序列化程序\u类\u配置,StringDeserializer.CLASS);
DefaultKafkaConsumerFactory consumerFactory=新的DefaultKafkaConsumerFactory(道具);
还消费者工厂;
}
}
源模块:内置KafkamessageDriveNadapter

@MessageEndpoint
@Import(ModuleConfiguration.class)
public class InboundKafkaMessageDrivenAdapter {

    @Autowired
    KafkaMessageListenerContainer<String, String> container;

    @Autowired
    SubscribableChannel output;

    @Bean
    public KafkaMessageDrivenChannelAdapter<String, String> adapter(KafkaMessageListenerContainer<String, String> container) {
        KafkaMessageDrivenChannelAdapter<String, String> kafkaMessageDrivenChannelAdapter = new KafkaMessageDrivenChannelAdapter<>(container);
        kafkaMessageDrivenChannelAdapter.setOutputChannel(output);
        return kafkaMessageDrivenChannelAdapter;
    }
}
@Import(ModuleConfiguration.class)
@MessageEndpoint
public class SinkActivator {

    @Autowired
    KafkaProducerMessageHandler<String,String> handler;

    @Autowired
    SubscribableChannel input;

    @ServiceActivator(inputChannel = "input")
    public void sendMessage(Message<?> msg) throws Exception{
            Acknowledgment acknowledgment = msg.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT, Acknowledgment.class);
            handler.handleMessage(msg);
            acknowledgment.acknowledge();
            }
}
@MessageEndpoint
@导入(ModuleConfiguration.class)
公共类InboundKafkaMessageDrivenAdapter{
@自动连线
卡夫卡信使集装箱;
@自动连线
下标通道输出;
@豆子
公共KafCamessageDrivenChannelAdapter适配器(KafCamessageListenerContainer容器){
KafkaMessageDrivenChannelAdapter KafkaMessageDrivenChannelAdapter=新的KafkaMessageDrivenChannelAdapter(容器);
kafkaMessageDrivenChannelAdapter.setOutputChannel(输出);
返回kafkaMessageDrivenChannelAdapter;
}
}
接收器模块:配置

@Configuration
@EnableIntegration
public class ModuleConfiguration {

    @Value("${topic}")
    private String topic;

    @Value("${brokerList}")
    private String brokerAddress;

    @Bean
    public KafkaProducerMessageHandler<String,String> handler() throws Exception {
        KafkaProducerMessageHandler<String, String> handler = new KafkaProducerMessageHandler<>(kafkaTemplate());
        handler.setTopicExpression(new LiteralExpression(this.topic));
        return handler;
    }

    @Bean
    public SubscribableChannel input() {
        return new DirectChannel();
    }

    @Bean
    public KafkaTemplate<String, String> kafkaTemplate() {
        return new KafkaTemplate<>(producerFactory());
    }

    @Bean
    public ProducerFactory<String, String> producerFactory() {
        Map<String, Object> props = new HashMap<>();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.brokerAddress);
        props.put(ProducerConfig.RETRIES_CONFIG, 0);
        props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
        props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
        props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        return new DefaultKafkaProducerFactory<>(props);
    }
}
@配置
@使能集成
公共类模块配置{
@值(“${topic}”)
私有字符串主题;
@值(“${brokerList}”)
私服;
@豆子
公共KafkaProducerMessageHandler()引发异常{
KafkaProducerMessageHandler=新的KafkaProducerMessageHandler(kafkaTemplate());
setTopicExpression(新的LiteralExpression(this.topic));
返回处理程序;
}
@豆子
公共订阅频道输入(){
返回新的DirectChannel();
}
@豆子
公共卡夫卡模板卡夫卡模板(){
返回新的卡夫卡模板(producerFactory());
}
@豆子
公共生产工厂生产工厂(){
Map props=newhashmap();
put(ProducerConfig.BOOTSTRAP\u SERVERS\u CONFIG,this.brokerAddress);
props.put(ProducerConfig.RETRIES\u CONFIG,0);
props.put(ProducerConfig.BATCH\u SIZE\u CONFIG,16384);
props.put(ProducerConfig.LINGER\u MS\u CONFIG,1);
props.put(ProducerConfig.BUFFER\u MEMORY\u CONFIG,33554432);
put(ProducerConfig.KEY\u SERIALIZER\u CLASS\u CONFIG,StringSerializer.CLASS);
put(ProducerConfig.VALUE\u SERIALIZER\u CLASS\u CONFIG,StringSerializer.CLASS);
返回新的DefaultKafkaProducerFactory(道具);
}
}
接收器模块:SinkActivator

@MessageEndpoint
@Import(ModuleConfiguration.class)
public class InboundKafkaMessageDrivenAdapter {

    @Autowired
    KafkaMessageListenerContainer<String, String> container;

    @Autowired
    SubscribableChannel output;

    @Bean
    public KafkaMessageDrivenChannelAdapter<String, String> adapter(KafkaMessageListenerContainer<String, String> container) {
        KafkaMessageDrivenChannelAdapter<String, String> kafkaMessageDrivenChannelAdapter = new KafkaMessageDrivenChannelAdapter<>(container);
        kafkaMessageDrivenChannelAdapter.setOutputChannel(output);
        return kafkaMessageDrivenChannelAdapter;
    }
}
@Import(ModuleConfiguration.class)
@MessageEndpoint
public class SinkActivator {

    @Autowired
    KafkaProducerMessageHandler<String,String> handler;

    @Autowired
    SubscribableChannel input;

    @ServiceActivator(inputChannel = "input")
    public void sendMessage(Message<?> msg) throws Exception{
            Acknowledgment acknowledgment = msg.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT, Acknowledgment.class);
            handler.handleMessage(msg);
            acknowledgment.acknowledge();
            }
}
@Import(ModuleConfiguration.class)
@消息端点
公共类激活器{
@自动连线
KafkaProducerMessageHandler;
@自动连线
下标通道输入;
@ServiceActivator(inputChannel=“输入”)
public void sendMessage(Message msg)引发异常{
Acknowledge Acknowledge=msg.getHeaders().get(KafkaHeaders.Acknowledge,Acknowledge.class);
handleMessage(msg);
确认。确认();
}
}
源成功接收消息并将其发送到接收器,但是当我尝试在接收器中获取确认时:

Acknowledge Acknowledge=msg.getHeaders().get(KafkaHeaders.Acknowledge,Acknowledge.class)

将引发以下异常:

原因:java.lang.IllegalArgumentException:为标头“kafka_确认”指定的类型不正确。应为[interface org.springframework.kafka.support.ackknowledgement],但实际类型为[class org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer$consumerackknowledgement]

在spring-integration-kafka-2.0.1的源代码中,当AckMode=MANUAL时,释放类KafkaMessageListenerContainer,消息中添加了一个kafka_确认头,但是该类型是ConsumerRackNowldgment的内部静态类


那么,如何从接收模块获得对从源发送的消息的确认呢?

除非您使用本地传输,否则您不能这样做,
确认是一个“活动”对象,不能被识别