Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/java/329.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Java ApacheKafka嵌入式kafka junit测试-在运行单元测试时启动应用程序_Java_Spring Boot_Junit_Apache Kafka_Embedded Kafka - Fatal编程技术网

Java ApacheKafka嵌入式kafka junit测试-在运行单元测试时启动应用程序

Java ApacheKafka嵌入式kafka junit测试-在运行单元测试时启动应用程序,java,spring-boot,junit,apache-kafka,embedded-kafka,Java,Spring Boot,Junit,Apache Kafka,Embedded Kafka,我正在使用kafka在spring boot中开发一个异步邮件服务器 我已经用嵌入式卡夫卡编写了测试,它在一个随机端口中启动自己的卡夫卡主题,并使用它进行测试 当我启动这个应用程序时,上下文正在加载,它期望本地的kafka集群。我需要停止加载应用程序conext。 我复制了代码,从中可以完全正常工作。当我在我的项目中遵循相同的风格时,我可以看到实际的应用程序开始了 java package com.mailer.embeddedkafkatests; 导入静态org.junit.Assert.a

我正在使用kafka在spring boot中开发一个异步邮件服务器

我已经用嵌入式卡夫卡编写了测试,它在一个随机端口中启动自己的卡夫卡主题,并使用它进行测试

当我启动这个应用程序时,上下文正在加载,它期望本地的kafka集群。我需要停止加载应用程序conext。 我复制了代码,从中可以完全正常工作。当我在我的项目中遵循相同的风格时,我可以看到实际的应用程序开始了

java

package com.mailer.embeddedkafkatests;
导入静态org.junit.Assert.assertTrue;
导入java.util.Map;
导入java.util.concurrent.BlockingQueue;
导入java.util.concurrent.LinkedBlockingQueue;
导入java.util.concurrent.TimeUnit;
导入org.apache.kafka.clients.consumer.ConsumerConfig;
导入org.apache.kafka.clients.consumer.ConsumerRecord;
导入org.apache.kafka.common.serialization.StringDeserializer;
导入org.junit.After;
导入org.junit.Before;
导入org.junit.ClassRule;
导入org.junit.Test;
导入org.junit.runner.RunWith;
导入org.slf4j.Logger;
导入org.slf4j.LoggerFactory;
导入org.springframework.beans.factory.annotation.Autowired;
导入org.springframework.boot.test.context.SpringBootTest;
导入org.springframework.kafka.core.DefaultKafkaConsumerFactory;
导入org.springframework.kafka.listener.ContainerProperties;
导入org.springframework.kafka.listener.KafkaMessageListenerContainer;
导入org.springframework.kafka.listener.MessageListener;
导入org.springframework.kafka.test.rule.EmbeddedKafkaRule;
导入org.springframework.kafka.test.utils.ContainerTestUtils;
导入org.springframework.kafka.test.utils.KafkaTestUtils;
导入org.springframework.test.annotation.DirtiesContext;
导入org.springframework.test.context.junit4.SpringRunner;
导入com.mailer.model.Mail;
导入com.mailer.producer.KafkaMessageProducer;
导入com.mailer.serializer.MailSerializer;
@RunWith(SpringRunner.class)
@春靴测试
@肮脏的环境
公开课卡夫卡森测验{
专用静态最终记录器=
LoggerFactory.getLogger(SpringKafkaSenderTest.class);
私有静态字符串SENDER\u TOPIC=“SENDER.t”;
@自动连线
私人卡夫卡消息发送者;
卡夫卡专用集装箱;
私有阻塞队列记录;
@阶级规则
公共静态嵌入卡夫卡鲁勒嵌入卡夫卡=
新嵌入的卡夫卡鲁尔(1,true,发送者主题);
@以前
public void setUp()引发异常{
//设置卡夫卡消费者属性
映射消费者属性=
KafkaTestUtils.ConsumerOps(“发件人”、“假”),
getEmbeddedKafka());
consumerProperties.put(ConsumerConfig.KEY\u反序列化程序\u类\u配置,StringDeserializer.CLASS);
consumerProperties.put(ConsumerConfig.VALUE\u反序列化程序\u类\u配置,MailSerializer.CLASS);
//创建卡夫卡消费品工厂
默认卡夫卡消费者工厂消费者工厂=
新卡夫卡消费工厂(
consumerProperties);/,新的StringDeserializer(),新的JsonDeserializer(Mail.class));
//设置需要使用的主题
容器属性容器属性=
新的集装箱属性(发送者主题);
//创建Kafka MessageListenerContainer
容器=新的KafkaMessageListenerContainer(consumerFactory,
集装箱财产);
//创建线程安全队列以存储收到的消息
记录=新建LinkedBlockingQueue();
//设置卡夫卡消息侦听器
容器
.setupMessageListener(新建MessageListener(){
@凌驾
公共信息无效(
消费者记录(记录){
debug(“测试侦听器收到的消息={}”,
record.toString());
记录。添加(记录);
}
});
//启动容器和底层消息侦听器
container.start();
//等待容器具有所需数量的分配分区
ContainerTestUtils.waitForAssignment(容器,
embeddedKafka.getEmbeddedKafka().getPartitionsPerTopic());
}
@之后
公共无效拆卸(){
//停止容器
container.stop();
}
@试验
public void testSend()引发InterruptedException{
//发送消息
邮件=新邮件();
mail.setFrom(“vinoth@local.com");
发送者。发送消息(邮件);
睡眠(4000);
//检查消息是否已收到
收到消费者记录=
记录。轮询(10,时间单位。秒);
//Hamcrest匹配器检查值
assertTrue(received.value().getFrom().equals(mail.getFrom()));
System.out.println(received.value().getFrom());
//资产(已收到,有价值(邮件));
//AssertJ条件检查钥匙
//资产(received).has(key(null));
}
}

为什么要停止加载spring上下文?这个junit的目的不是测试您的spring应用程序吗

在任何情况下,只需删除
@SpringBootTest
注释,spring上下文将不会加载

package com.mailer.embeddedkafkatests;
import static org.junit.Assert.assertTrue;

import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
import org.springframework.kafka.listener.MessageListener;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.ContainerTestUtils;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit4.SpringRunner;

import com.mailer.model.Mail;
import com.mailer.producer.KafkaMessageProducer;
import com.mailer.serializer.MailSerializer;

@RunWith(SpringRunner.class)
@SpringBootTest
@DirtiesContext
public class SpringKafkaSenderTest {

  private static final Logger LOGGER =
      LoggerFactory.getLogger(SpringKafkaSenderTest.class);

  private static String SENDER_TOPIC = "sender.t";

  @Autowired
  private KafkaMessageProducer sender;

  private KafkaMessageListenerContainer<String, Mail> container;

  private BlockingQueue<ConsumerRecord<String, Mail>> records;

  @ClassRule
  public static EmbeddedKafkaRule embeddedKafka =
      new EmbeddedKafkaRule(1, true, SENDER_TOPIC);

  @Before
  public void setUp() throws Exception {
    // set up the Kafka consumer properties
    Map<String, Object> consumerProperties =
        KafkaTestUtils.consumerProps("sender", "false",
            embeddedKafka.getEmbeddedKafka());
    consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, MailSerializer.class);

    // create a Kafka consumer factory
    DefaultKafkaConsumerFactory<String, Mail> consumerFactory =
        new DefaultKafkaConsumerFactory<String, Mail>(
            consumerProperties);//, new StringDeserializer(), new JsonDeserializer<>(Mail.class));

    // set the topic that needs to be consumed
    ContainerProperties containerProperties =
        new ContainerProperties(SENDER_TOPIC);

    // create a Kafka MessageListenerContainer
    container = new KafkaMessageListenerContainer<>(consumerFactory,
        containerProperties);

    // create a thread safe queue to store the received message
    records = new LinkedBlockingQueue<>();

    // setup a Kafka message listener
    container
        .setupMessageListener(new MessageListener<String, Mail>() {
          @Override
          public void onMessage(
              ConsumerRecord<String, Mail> record) {
            LOGGER.debug("test-listener received message='{}'",
                record.toString());
            records.add(record);
          }
        });

    // start the container and underlying message listener
    container.start();

    // wait until the container has the required number of assigned partitions
    ContainerTestUtils.waitForAssignment(container,
        embeddedKafka.getEmbeddedKafka().getPartitionsPerTopic());
  }

  @After
  public void tearDown() {
    // stop the container
    container.stop();
  }

  @Test
  public void testSend() throws InterruptedException {
    // send the message
    Mail mail = new Mail();
    mail.setFrom("vinoth@local.com");
    sender.sendMessage(mail);
    Thread.sleep(4000);
    // check that the message was received
    ConsumerRecord<String, Mail> received =
        records.poll(10, TimeUnit.SECONDS);
    // Hamcrest Matchers to check the value
    assertTrue(received.value().getFrom().equals(mail.getFrom()));
    System.out.println(received.value().getFrom());
//    assertThat(received, hasValue(mail));
    // AssertJ Condition to check the key
//    assertThat(received).has(key(null));
  }
}