Java 春季卡夫卡消费者/生产者测试
我目前正在研究卡夫卡模块,我正在使用卡夫卡通信的Java 春季卡夫卡消费者/生产者测试,java,spring,apache-kafka,spring-test,spring-kafka,Java,Spring,Apache Kafka,Spring Test,Spring Kafka,我目前正在研究卡夫卡模块,我正在使用卡夫卡通信的spring-Kafka抽象。我能够从实际实现的角度集成生产者和消费者,但是,我不确定如何使用@KafkaListener测试(特别是集成测试)消费者的业务逻辑。我试着跟随springkafk文档和关于这个主题的各种博客,但这些都没有回答我想要的问题 弹簧启动测试类 //imports not mentioned due to brevity @RunWith(SpringRunner.class) @SpringBootTest(classes
spring-Kafka
抽象。我能够从实际实现的角度集成生产者和消费者,但是,我不确定如何使用@KafkaListener
测试(特别是集成测试)消费者的业务逻辑。我试着跟随springkafk
文档和关于这个主题的各种博客,但这些都没有回答我想要的问题
弹簧启动测试类
//imports not mentioned due to brevity
@RunWith(SpringRunner.class)
@SpringBootTest(classes = PaymentAccountUpdaterApplication.class,
webEnvironment = SpringBootTest.WebEnvironment.NONE)
public class CardUpdaterMessagingIntegrationTest {
private final static String cardUpdateTopic = "TP.PRF.CARDEVENTS";
@Autowired
private ObjectMapper objectMapper;
@ClassRule
public static KafkaEmbedded kafkaEmbedded =
new KafkaEmbedded(1, false, cardUpdateTopic);
@Test
public void sampleTest() throws Exception {
Map<String, Object> consumerConfig =
KafkaTestUtils.consumerProps("test", "false", kafkaEmbedded);
consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
ConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerConfig);
ContainerProperties containerProperties = new ContainerProperties(cardUpdateTopic);
containerProperties.setMessageListener(new SafeStringJsonMessageConverter());
KafkaMessageListenerContainer<String, String>
container = new KafkaMessageListenerContainer<>(cf, containerProperties);
BlockingQueue<ConsumerRecord<String, String>> records = new LinkedBlockingQueue<>();
container.setupMessageListener((MessageListener<String, String>) data -> {
System.out.println("Added to Queue: "+ data);
records.add(data);
});
container.setBeanName("templateTests");
container.start();
ContainerTestUtils.waitForAssignment(container, kafkaEmbedded.getPartitionsPerTopic());
Map<String, Object> producerConfig = KafkaTestUtils.senderProps(kafkaEmbedded.getBrokersAsString());
producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
ProducerFactory<String, Object> pf =
new DefaultKafkaProducerFactory<>(producerConfig);
KafkaTemplate<String, Object> kafkaTemplate = new KafkaTemplate<>(pf);
String payload = objectMapper.writeValueAsString(accountWrapper());
kafkaTemplate.send(cardUpdateTopic, 0, payload);
ConsumerRecord<String, String> received = records.poll(10, TimeUnit.SECONDS);
assertThat(received).has(partition(0));
}
@After
public void after() {
kafkaEmbedded.after();
}
private AccountWrapper accountWrapper() {
return AccountWrapper.builder()
.eventSource("PROFILE")
.eventName("INITIAL_LOAD_CARD")
.eventTime(LocalDateTime.now().toString())
.eventID("8730c547-02bd-45c0-857b-d90f859e886c")
.details(AccountDetail.builder()
.customerId("idArZ_K2IgE86DcPhv-uZw")
.vaultId("912A60928AD04F69F3877D5B422327EE")
.expiryDate("122019")
.build())
.build();
}
}
@Service
public class ConsumerMessageListener {
private static final Logger LOGGER = LoggerFactory.getLogger(ConsumerMessageListener.class);
private ConsumerMessageProcessorService consumerMessageProcessorService;
public ConsumerMessageListener(ConsumerMessageProcessorService consumerMessageProcessorService) {
this.consumerMessageProcessorService = consumerMessageProcessorService;
}
@KafkaListener(id = "cardUpdateEventListener",
topics = "${kafka.consumer.cardupdates.topic}",
containerFactory = "kafkaJsonListenerContainerFactory")
public void processIncomingMessage(Payload<AccountWrapper,Object> payloadContainer,
Acknowledgment acknowledgment,
@Header(KafkaHeaders.RECEIVED_TOPIC) String topic,
@Header(KafkaHeaders.RECEIVED_PARTITION_ID) String partitionId,
@Header(KafkaHeaders.OFFSET) String offset) {
try {
// business logic to process the message
consumerMessageProcessorService.processIncomingMessage(payloadContainer);
} catch (Exception e) {
LOGGER.error("Unhandled exception in card event message consumer. Discarding offset commit." +
"message:: {}, details:: {}", e.getMessage(), messageMetadataInfo);
throw e;
}
acknowledgment.acknowledge();
}
}
//由于简洁,未提及导入
@RunWith(SpringRunner.class)
@SpringBootTest(类=PaymentAccountUpdaterApplication.class,
webEnvironment=SpringBootTest.webEnvironment.NONE)
公共类卡片更新消息集成测试{
私有最终静态字符串cardUpdateTopic=“TP.PRF.CARDEVENTS”;
@自动连线
私有对象映射器对象映射器;
@阶级规则
公共静态卡夫卡密德卡夫卡密德=
新卡夫卡迈德(1,错误,cardUpdateTopic);
@试验
public void sampleTest()引发异常{
映射消费者配置=
KafkaTestUtils.ConsumerOps(“测试”,“错误”,KafkameBedded);
consumerConfig.put(consumerConfig.KEY\u反序列化程序\u类\u配置,StringDeserializer.CLASS);
consumerConfig.put(consumerConfig.VALUE\u反序列化程序\u类\u配置,StringDeserializer.CLASS);
ConsumerFactory cf=新的默认KafkaconsumerFactory(consumerConfig);
ContainerProperties ContainerProperties=新的ContainerProperties(cardUpdateTopic);
setMessageListener(新的SafeStringJsonMessageConverter());
卡夫卡米萨格尔速递员
容器=新的KafkaMessageListenerContainer(cf,容器属性);
BlockingQueue记录=新建LinkedBlockingQueue();
container.setupMessageListener((MessageListener)数据->{
System.out.println(“添加到队列:+数据”);
记录。添加(数据);
});
容器名称(“模板测试”);
container.start();
ContainerTestUtils.waitForAssignment(容器,kafkameBedded.getPartitionsPerTopic());
Map producerConfig=KafkaTestUtils.senderProps(kafkamebedded.getBrokersAsString());
producerConfig.put(producerConfig.KEY\u SERIALIZER\u CLASS\u CONFIG,StringSerializer.CLASS);
producerConfig.put(producerConfig.VALUE\u SERIALIZER\u CLASS\u CONFIG,JsonSerializer.CLASS);
生产厂=
新的默认卡夫卡生产工厂(producerConfig);
KafkaTemplate KafkaTemplate=新的KafkaTemplate(pf);
字符串负载=objectMapper.writeValueAsString(accountWrapper());
发送(cardUpdateTopic,0,有效负载);
接收到的ConsumerRecord=记录。轮询(10,时间单位。秒);
资产(已接收).has(分区(0));
}
@之后
在()之后公共无效{
卡夫卡在()之后;
}
私有AccountWrapper(){
返回AccountWrapper.builder()
.eventSource(“概要文件”)
.eventName(“初始加载卡”)
.eventTime(LocalDateTime.now().toString())
.eventID(“8730c547-02bd-45c0-857b-d90f859e886c”)
.details(AccountDetail.builder()
.客户ID(“idArZ_K2IgE86DcPhv-uZw”)
.vaultId(“912A60928AD04F69F3877D5B422327EE”)
.到期日(“122019”)
.build())
.build();
}
}
监听器类
//imports not mentioned due to brevity
@RunWith(SpringRunner.class)
@SpringBootTest(classes = PaymentAccountUpdaterApplication.class,
webEnvironment = SpringBootTest.WebEnvironment.NONE)
public class CardUpdaterMessagingIntegrationTest {
private final static String cardUpdateTopic = "TP.PRF.CARDEVENTS";
@Autowired
private ObjectMapper objectMapper;
@ClassRule
public static KafkaEmbedded kafkaEmbedded =
new KafkaEmbedded(1, false, cardUpdateTopic);
@Test
public void sampleTest() throws Exception {
Map<String, Object> consumerConfig =
KafkaTestUtils.consumerProps("test", "false", kafkaEmbedded);
consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
ConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerConfig);
ContainerProperties containerProperties = new ContainerProperties(cardUpdateTopic);
containerProperties.setMessageListener(new SafeStringJsonMessageConverter());
KafkaMessageListenerContainer<String, String>
container = new KafkaMessageListenerContainer<>(cf, containerProperties);
BlockingQueue<ConsumerRecord<String, String>> records = new LinkedBlockingQueue<>();
container.setupMessageListener((MessageListener<String, String>) data -> {
System.out.println("Added to Queue: "+ data);
records.add(data);
});
container.setBeanName("templateTests");
container.start();
ContainerTestUtils.waitForAssignment(container, kafkaEmbedded.getPartitionsPerTopic());
Map<String, Object> producerConfig = KafkaTestUtils.senderProps(kafkaEmbedded.getBrokersAsString());
producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
ProducerFactory<String, Object> pf =
new DefaultKafkaProducerFactory<>(producerConfig);
KafkaTemplate<String, Object> kafkaTemplate = new KafkaTemplate<>(pf);
String payload = objectMapper.writeValueAsString(accountWrapper());
kafkaTemplate.send(cardUpdateTopic, 0, payload);
ConsumerRecord<String, String> received = records.poll(10, TimeUnit.SECONDS);
assertThat(received).has(partition(0));
}
@After
public void after() {
kafkaEmbedded.after();
}
private AccountWrapper accountWrapper() {
return AccountWrapper.builder()
.eventSource("PROFILE")
.eventName("INITIAL_LOAD_CARD")
.eventTime(LocalDateTime.now().toString())
.eventID("8730c547-02bd-45c0-857b-d90f859e886c")
.details(AccountDetail.builder()
.customerId("idArZ_K2IgE86DcPhv-uZw")
.vaultId("912A60928AD04F69F3877D5B422327EE")
.expiryDate("122019")
.build())
.build();
}
}
@Service
public class ConsumerMessageListener {
private static final Logger LOGGER = LoggerFactory.getLogger(ConsumerMessageListener.class);
private ConsumerMessageProcessorService consumerMessageProcessorService;
public ConsumerMessageListener(ConsumerMessageProcessorService consumerMessageProcessorService) {
this.consumerMessageProcessorService = consumerMessageProcessorService;
}
@KafkaListener(id = "cardUpdateEventListener",
topics = "${kafka.consumer.cardupdates.topic}",
containerFactory = "kafkaJsonListenerContainerFactory")
public void processIncomingMessage(Payload<AccountWrapper,Object> payloadContainer,
Acknowledgment acknowledgment,
@Header(KafkaHeaders.RECEIVED_TOPIC) String topic,
@Header(KafkaHeaders.RECEIVED_PARTITION_ID) String partitionId,
@Header(KafkaHeaders.OFFSET) String offset) {
try {
// business logic to process the message
consumerMessageProcessorService.processIncomingMessage(payloadContainer);
} catch (Exception e) {
LOGGER.error("Unhandled exception in card event message consumer. Discarding offset commit." +
"message:: {}, details:: {}", e.getMessage(), messageMetadataInfo);
throw e;
}
acknowledgment.acknowledge();
}
}
@服务
公共类消费者消息发送器{
私有静态最终记录器Logger=LoggerFactory.getLogger(ConsumerMessageListener.class);
私有使用者消息处理器服务使用者消息处理器服务;
公共用户消息处理器(用户消息处理器服务用户消息处理器服务){
this.consumerMessageProcessorService=consumerMessageProcessorService;
}
@KafkaListener(id=“cardUpdateEventListener”,
topics=“${kafka.consumer.cardupdates.topic}”,
containerFactory=“kafkaJsonListenerContainerFactory”)
public void processIncomingMessage(有效负载payloadContainer,
致谢致谢致谢,
@标题(KafkaHeaders.RECEIVED_主题)字符串主题,
@标题(KafkaHeaders.RECEIVED_PARTITION_ID)字符串partitionId,
@标题(KafkaHeaders.OFFSET)字符串偏移量){
试一试{
//处理消息的业务逻辑
consumerMessageProcessorService.processIncomingMessage(payloadContainer);
}捕获(例外e){
LOGGER.error(“卡事件消息使用者中未处理的异常。丢弃偏移提交。”+
“消息::{},详细信息::{}”,e.getMessage(),messageMetadataInfo);
投掷e;
}
确认。确认();
}
}
我的问题是:在测试类中,我断言分区、有效负载等是从BlockingQueue
轮询的,我的问题是如何验证用@KafkaListener
注释的类中的业务逻辑是否得到正确执行,并根据错误处理和其他业务场景将消息路由到不同的主题。在一些示例中,我看到了要断言的CountDownLatch
,我不想将其放在业务逻辑中,以便在生产级代码中断言。另外,消息处理器是异步的,因此,如何断言执行,不确定
任何帮助,谢谢
正确执行,并根据错误处理和其他业务场景将消息路由到不同的主题
集成测试可以使用该“不同”主题来断言listener pro
@RunWith(SpringRunner.class)
@SpringBootTest(classes = { So53678801Application.class,
So53678801ApplicationTests.TestConfig.class})
public class So53678801ApplicationTests {
@ClassRule
public static EmbeddedKafkaRule embededKafka = new EmbeddedKafkaRule(1, false, "so53678801");
@BeforeClass
public static void setup() {
System.setProperty("spring.kafka.bootstrap-servers",
embededKafka.getEmbeddedKafka().getBrokersAsString());
}
@Autowired
private KafkaTemplate<String, String> template;
@Autowired
private ListenerWrapper wrapper;
@Test
public void test() throws Exception {
this.template.send("so53678801", "{\"bar\":\"baz\"}");
assertThat(this.wrapper.latch.await(10, TimeUnit.SECONDS)).isTrue();
assertThat(this.wrapper.argsReceived[0]).isInstanceOf(Foo.class);
assertThat(((Foo) this.wrapper.argsReceived[0]).getBar()).isEqualTo("baz");
assertThat(this.wrapper.ackCalled).isTrue();
}
@Configuration
public static class TestConfig {
@Bean
public static ListenerWrapper bpp() { // BPPs have to be static
return new ListenerWrapper();
}
}
public static class ListenerWrapper implements BeanPostProcessor, Ordered {
private final CountDownLatch latch = new CountDownLatch(1);
private Object[] argsReceived;
private boolean ackCalled;
@Override
public int getOrder() {
return Ordered.HIGHEST_PRECEDENCE;
}
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
if (bean instanceof Listener) {
ProxyFactory pf = new ProxyFactory(bean);
pf.setProxyTargetClass(true); // unless the listener is on an interface
pf.addAdvice(interceptor());
return pf.getProxy();
}
return bean;
}
private MethodInterceptor interceptor() {
return invocation -> {
if (invocation.getMethod().getName().equals("processIncomingMessage")) {
Object[] args = invocation.getArguments();
this.argsReceived = Arrays.copyOf(args, args.length);
Acknowledgment ack = (Acknowledgment) args[1];
args[1] = (Acknowledgment) () -> {
this.ackCalled = true;
ack.acknowledge();
};
try {
return invocation.proceed();
}
finally {
this.latch.countDown();
}
}
else {
return invocation.proceed();
}
};
}
}
}