Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/java/392.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181

Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/spring/12.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Java 使用@KafkaListener处理错误_Java_Spring_Spring Kafka - Fatal编程技术网

Java 使用@KafkaListener处理错误

Java 使用@KafkaListener处理错误,java,spring,spring-kafka,Java,Spring,Spring Kafka,我正在使用带有以下配置的spring kafka: package com.danigu.fancypants.infrastructure; import com.fasterxml.jackson.databind.ObjectMapper; import lombok.Data; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.serialization.Str

我正在使用带有以下配置的
spring kafka

package com.danigu.fancypants.infrastructure;

import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.Data;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.support.converter.StringJsonMessageConverter;
import org.springframework.retry.RetryPolicy;
import org.springframework.retry.backoff.BackOffPolicy;
import org.springframework.retry.backoff.ExponentialBackOffPolicy;
import org.springframework.retry.policy.SimpleRetryPolicy;
import org.springframework.retry.support.RetryTemplate;

import javax.inject.Inject;
import java.util.HashMap;
import java.util.Map;

/**
 * @author dani
 */
@Data
@EnableKafka
@Configuration
@Import({KafkaConfigurationProperties.class})
public class KafkaConfiguration {
    @Inject KafkaConfigurationProperties kcp;

    protected Map<String, Object> consumerProperties() {
        Map<String, Object> props = new HashMap();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kcp.getBrokerAddress());
        props.put(ConsumerConfig.GROUP_ID_CONFIG, kcp.getGroupId());
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
        props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000);
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        return props;
    }

    public ConsumerFactory<String, String> consumerFactory() {
        return new DefaultKafkaConsumerFactory<>(consumerProperties());
    }

    @Bean
    public StringJsonMessageConverter stringJsonMessageConverter(ObjectMapper mapper) {
        return new StringJsonMessageConverter(mapper);
    }

    @Bean
    public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory(
            StringJsonMessageConverter messageConverter) {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory();

        factory.setMessageConverter(messageConverter);
        factory.setConsumerFactory(consumerFactory());
        factory.setConcurrency(1);
        factory.setRetryTemplate(retryTemplate());

        return factory;
    }

    /*
     * Retry template.
     */

    protected RetryPolicy retryPolicy() {
        SimpleRetryPolicy policy = new SimpleRetryPolicy();
        policy.setMaxAttempts(3);
        return policy;
    }

    protected BackOffPolicy backOffPolicy() {
        ExponentialBackOffPolicy policy = new ExponentialBackOffPolicy();
        policy.setInitialInterval(1000);
        return policy;
    }

    protected RetryTemplate retryTemplate() {
       RetryTemplate template = new RetryTemplate();

       template.setRetryPolicy(retryPolicy());
       template.setBackOffPolicy(backOffPolicy());

       return template;
    }
}
package com.danigu.fancypants.infrastructure;
导入com.fasterxml.jackson.databind.ObjectMapper;
导入龙目数据;
导入org.apache.kafka.clients.consumer.ConsumerConfig;
导入org.apache.kafka.common.serialization.StringDeserializer;
导入org.springframework.context.annotation.Bean;
导入org.springframework.context.annotation.Configuration;
导入org.springframework.context.annotation.import;
导入org.springframework.kafka.annotation.EnableKafka;
导入org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
导入org.springframework.kafka.core.ConsumerFactory;
导入org.springframework.kafka.core.DefaultKafkaConsumerFactory;
导入org.springframework.kafka.support.converter.StringJsonMessageConverter;
导入org.springframework.retry.RetryPolicy;
导入org.springframework.retry.backoff.BackOffPolicy;
导入org.springframework.retry.backoff.ExponentialBackOffPolicy;
导入org.springframework.retry.policy.SimpleRetryPolicy;
导入org.springframework.retry.support.RetryTemplate;
导入javax.inject.inject;
导入java.util.HashMap;
导入java.util.Map;
/**
*@作者丹尼
*/
@资料
@使能卡夫卡
@配置
@导入({kafkanconfigurationproperties.class})
公共类卡夫卡配置{
@注射卡夫卡配置属性kcp;
受保护的映射使用者属性(){
Map props=newhashmap();
put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,kcp.getBrokerAddress());
put(ConsumerConfig.GROUP_ID_CONFIG,kcp.getGroupId());
put(ConsumerConfig.ENABLE\u AUTO\u COMMIT\u CONFIG,false);
props.put(ConsumerConfig.SESSION\u TIMEOUT\u MS\u CONFIG,15000);
put(ConsumerConfig.KEY\u反序列化程序\u类\u配置,StringDeserializer.CLASS);
put(ConsumerConfig.VALUE\u反序列化程序\u类\u配置,StringDeserializer.CLASS);
返回道具;
}
公共消费者工厂消费者工厂(){
返回新的DefaultKafkanConsumerFactory(consumerProperties());
}
@豆子
公共StringJsonMessageConverter StringJsonMessageConverter(ObjectMapper映射器){
返回新的StringJsonMessageConverter(映射器);
}
@豆子
公共并发kafkaListenerContainerFactory kafkaListenerContainerFactory(
StringJsonMessageConverter(消息转换器){
ConcurrentKafkListenerContainerFactory=新ConcurrentKafkListenerContainerFactory();
factory.setMessageConverter(messageConverter);
setConsumerFactory(consumerFactory());
工厂设置并发(1);
setRetryTemplate(retryTemplate());
返回工厂;
}
/*
*重试模板。
*/
受保护的RetryPolicy RetryPolicy(){
SimpleRetryPolicy策略=新的SimpleRetryPolicy();
策略。setMaxAttempts(3);
退货政策;
}
受保护的BackOffPolicy BackOffPolicy(){
ExponentialBackOffPolicy policy=新的ExponentialBackOffPolicy();
policy.setInitialInterval(1000);
退货政策;
}
受保护的RetryTemplate RetryTemplate(){
RetryTemplate=新RetryTemplate();
setRetryPolicy(retryPolicy());
template.setBackOffPolicy(backOffPolicy());
返回模板;
}
}
我的听众是这样的:

package com.danigu.fancypants.integration.inbound.dress;

import com.danigu.fancypants.integration.inbound.InvalidRequestException;
import com.danigu.fancypants.integration.inbound.dress.payload.DressRequest;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.listener.AcknowledgingMessageListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.messaging.handler.annotation.Payload;
import org.springframework.stereotype.Component;

import javax.inject.Inject;
import javax.validation.ConstraintViolation;
import javax.validation.Validator;
import java.util.Set;

/**
 * @author dani
 */
@Component
public class DressListener {

    @Inject protected Validator validator;

    @KafkaListener(topics = {"${kafka.dressesTopic}"})
    public void onMessage(@Payload DressRequest request, Acknowledgment acknowledgment) {
        assertValidRequest(request);

        System.out.println(request);

        acknowledgment.acknowledge();
    }

    protected void assertValidRequest(DressRequest request) {
        final Set<ConstraintViolation<DressRequest>> violations = validator.validate(request);

        if(!violations.isEmpty()) {
            throw new InvalidRequestException(violations, request);
        }
    }
}
package com.danigu.fancypants.integration.inbound.dress;
导入com.danigu.fancypants.integration.inbound.InvalidRequestException;
导入com.danigu.fancypants.integration.inbound.dress.payload.DressRequest;
导入org.apache.kafka.clients.consumer.ConsumerRecord;
导入org.springframework.kafka.annotation.KafkaListener;
导入org.springframework.kafka.listener.AcknowledgeingMessageListener;
导入org.springframework.kafka.support.acknowledge;
导入org.springframework.messaging.handler.annotation.Payload;
导入org.springframework.stereotype.Component;
导入javax.inject.inject;
导入javax.validation.ConstraintViolation;
导入javax.validation.Validator;
导入java.util.Set;
/**
*@作者丹尼
*/
@组成部分
公共类{
@注入受保护的验证器;
@KafkaListener(主题={“${kafka.dresstopic}}”)
消息上的公共无效(@Payload-DressRequest-request,Acknowledge-Acknowledge){
资产有效性请求(请求);
系统输出打印项次(请求);
确认。确认();
}
受保护的无效资产ValidRequest(请求){
最终设置冲突=validator.validate(请求);
如果(!inflictions.isEmpty()){
抛出新的InvalidRequestException(违规、请求);
}
}
}
到目前为止,我一直在查看
springkafka
的测试和参考文档,文档中说应该配置相应类型的
ErrorHandler
,这意味着我应该在
containerproperty
上配置它,尽管在我的用例中,这只是一个错误处理程序,我想定义多个(针对不同的有效负载类型),如果是的话,是否可能,如何定义

另外,是否有一种方法可以描述在带注释的侦听器上使用哪个错误处理程序

另外,是否有一种方法可以描述每个
@KafkaListener
RecoveryCallback
,或者可能是每个不同的主题,或者必须有不同的
ListenerContainerFactory
来描述


我可能完全弄错了,有人能给我指出正确的方向吗?我怎样才能以正确的方式为不同的负载类型配置多个
错误处理程序?

我不确定“不同的负载类型”是什么意思,因为您只有一个
@KafkaListener
<对于不同的负载类型,类级别的code>@KafkaListener
可以在方法级别具有
@KafkaHandler

在任何情况下,每个包含只有一个错误处理程序
/**
 * Set an {@link RabbitListenerErrorHandler} to invoke if the listener method throws
 * an exception.
 * @return the error handler.
 * @since 2.0
 */
String errorHandler() default "";