在一个Kafka主题下发送两个序列化Java对象

在一个Kafka主题下发送两个序列化Java对象,java,spring,spring-boot,apache-kafka,spring-kafka,Java,Spring,Spring Boot,Apache Kafka,Spring Kafka,我想实现Kafka Consumer and Producer,它发送和接收Java对象。我试过这个: 制作人: 自定义对象 序列化程序 响应类 import org.apache.kafka.common.serialization.Deserializer; 导入java.io.ByteArrayInputStream; 导入java.io.IOException; 导入java.io.ObjectInputStream; 导入java.io.Serializable; 公共类SaleRes

我想实现Kafka Consumer and Producer,它发送和接收Java对象。我试过这个:

制作人:

自定义对象

序列化程序

响应类

import org.apache.kafka.common.serialization.Deserializer;
导入java.io.ByteArrayInputStream;
导入java.io.IOException;
导入java.io.ObjectInputStream;
导入java.io.Serializable;
公共类SaleResponseFactoryDeserializer实现可序列化的反序列化程序{
@凌驾
public SaleRequestFactory反序列化(字符串主题,字节[]数据)
{
SaleRequestFactory SaleRequestFactory=null;
尝试
{
ByteArrayInputStream bis=新的ByteArrayInputStream(数据);
ObjectInputStream in=新ObjectInputStream(bis);
.readObject()中的saleRequestFactory=(saleRequestFactory);
in.close();
}
捕获(IOException | ClassNotFoundException e)
{
抛出新的运行时异常(“未处理”,e);
}
返回工厂;
}
}
我想根据对象类型发送和接收不同的序列化Java对象。例如,有时
SaleRequestFactory
和接收
SaleResponseFactory
或发送
AuthRequestFactory
和接收
AuthResponseFactory
。是否可以使用一个主题发送和接收不同的Java对象


完整示例

这是可能的,但每个对象类型需要两个单独的生产者工厂。或者使用ByteArraySerializer并自己序列化对象(相当于Gary的答案)

如果您确实希望正确地反序列化对象,则对使用者也是如此。否则,您将使用ByteArraydSerializer(同样,相当于Gary显示的反序列化程序),然后假设Java无法确定字节中的对象类型(哪些序列化对象流是},您应该在记录中包含额外的元数据,例如头,或者您解析的特定键,以确定如何反序列化数据,然后自己调用相应的反序列化方法


总的来说,我建议重新评估为什么需要将不同类型的记录放在一个主题中,或者查看其他消息格式,包括CloudEvents规范,或者使用Avro/Protobuf/polymorphic JSON类型,这将更好地与Kafka以外的客户机配合使用。这是可能的,但需要两种不同的格式每种对象类型的序列化工厂。或者一个带有ByteArraySerializer并自己序列化对象的工厂(相当于Gary的答案)

如果您确实希望正确地反序列化对象,则对使用者也是如此。否则,您将使用ByteArraydSerializer(同样,相当于Gary显示的反序列化器),然后假设Java无法确定字节中的对象类型(哪些序列化对象流是},您应该在记录中包含额外的元数据,例如头,或者您解析的特定键,以确定如何反序列化数据,然后自己调用相应的反序列化方法


总的来说,我建议重新评估为什么需要将不同类型的记录放在一个主题中,或者查看其他消息格式,包括CloudEvents规范,或者使用Avro/Protobuf/polymorphic JSON类型,这将更好地与Kafka以外的客户机一起使用是一个使用Boot的自动配置基础结构bean的示例

@springboot应用程序
公共类SO65866763应用程序{
公共静态void main(字符串[]args){
SpringApplication.run(So65866763Application.class,args);
}
@豆子
公共应用程序运行程序(KafkaTemplate模板){
返回参数->{
发送(“so65866763”,newfoo());
发送(“so65866763”,newbar());
};
}
@豆子
公共新话题(){
返回TopicBuilder.name(“so65866763”).partitions(1.replications(1.build();
}
}
类Foo实现可序列化{
}
类栏实现可序列化{
}
@组成部分
@卡夫卡列斯汀(id=“so65866763”,topics=“so65866763”)
类侦听器{
@卡夫卡汉德勒
void-doulistener(Foo-Foo){
System.out.println(“In-woullistener:+foo”);
}
@卡夫卡汉德勒
空条侦听器(条){
System.out.println(“In-barListener:+bar”);
}
}
公共类JavaSerializer实现序列化程序{
@凌驾
公共字节[]序列化(字符串主题、对象数据){
返回null;
}
@凌驾
公共字节[]序列化(字符串主题、标题、对象数据){
ByteArrayOutputStream bas=新的ByteArrayOutputStream();
try(ObjectOutputStream oos=newobjectoutputstream(baos)){
oos.writeObject(数据);
返回baos.toByteArray();
}
捕获(IOE异常){
抛出新的未选中异常(e);
}
}
}
公共类JavaDeserializer实现反序列化器{
@凌驾
公共对象反序列化(字符串主题,字节[]数据){
返回null;
}
@凌驾
公共对象反序列化(字符串主题、标题、字节[]数据){
ByteArrayInputStream bais=新的ByteArrayInputStream(数据);
try(ObjectInputStream ois=新ObjectInputStream(BAI)){
返回ois.readObject();
}
捕获(IOE异常){
抛出新的未选中异常(e);
}
catch(classnotfounde异常){
抛出新的非法状态异常(e);
}
}
}

使用
Object
作为值类型-下面是一个使用Boot的自动配置基础结构bean的示例

@springboot应用程序
公共类SO65866763应用程序{
公共静态void main(字符串[]ar
    @Configuration
public class KafkaProducerConfig {

    @Value(value = "${kafka.bootstrapAddress}")
    private String bootstrapAddress;

    @Bean
    public ProducerFactory<String, SaleRequestFactory> saleRequestFactoryProducerFactory() {
        Map<String, Object> configProps = new HashMap<>();
        configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
        configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, SaleRequestFactorySerializer.class);
        return new DefaultKafkaProducerFactory<>(configProps);
    }

    @Bean
    public ProducerFactory<String, String> producerFactory() {
        Map<String, Object> configProps = new HashMap<>();
        configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
        return new DefaultKafkaProducerFactory<>(configProps);
    }


    @Bean
    public KafkaTemplate<String, SaleRequestFactory> saleRequestFactoryKafkaTemplate() {
        return new KafkaTemplate<>(saleRequestFactoryProducerFactory());
    }

    @Bean
    public KafkaTemplate<String, String> kafkaTemplate() {
        return new KafkaTemplate<>(producerFactory());
    }

    @Bean
    public ReplyingKafkaTemplate<String, SaleRequestFactory, SaleResponseFactory> replyKafkaTemplate(ProducerFactory<String, SaleRequestFactory> producerFactory, ConcurrentKafkaListenerContainerFactory<String, SaleResponseFactory> factory) {
        ConcurrentMessageListenerContainer<String, SaleResponseFactory> kafkaMessageListenerContainer = factory.createContainer("tp-sale");
        kafkaMessageListenerContainer.getContainerProperties().setGroupId("tp-sale.reply");
        return new ReplyingKafkaTemplate<>(producerFactory, kafkaMessageListenerContainer);
    }
}
 @RestController
@RequestMapping("/checkout")
public class CheckoutController {
    
    private TransactionService transactionService;
    private KafkaTemplate<String, SaleRequestFactory> saleRequestFactoryKafkaTemplate;
    private ReplyingKafkaTemplate<String, SaleRequestFactory, SaleResponseFactory> requestReplyKafkaTemplate;
    private static String topic = "tp-sale";

    @Autowired
    public CheckoutController(ValidationMessage validationMessage, TransactionService transactionService,
                              KafkaTemplate<String, SaleRequestFactory> saleRequestFactoryKafkaTemplate,
                              ReplyingKafkaTemplate<String, SaleRequestFactory, SaleResponseFactory> requestReplyKafkaTemplate){
        this.transactionService = transactionService;
        this.saleRequestFactoryKafkaTemplate = saleRequestFactoryKafkaTemplate;
        this.requestReplyKafkaTemplate = requestReplyKafkaTemplate;
    }

    @PostMapping("test")
    private void performPayment() throws ExecutionException, InterruptedException, TimeoutException {

        Transaction transaction = new Transaction();
        transaction.setStatus(PaymentTransactionStatus.IN_PROGRESS.getText());

        Transaction insertedTransaction = transactionService.save(transaction);

        SaleRequestFactory obj = new SaleRequestFactory();
        obj.setId(100);

        ProducerRecord<String, SaleRequestFactory> record = new ProducerRecord<>("tp-sale", obj);
        RequestReplyFuture<String, SaleRequestFactory, SaleResponseFactory> replyFuture = requestReplyKafkaTemplate.sendAndReceive(record);
        SendResult<String, SaleRequestFactory> sendResult = replyFuture.getSendFuture().get(10, TimeUnit.SECONDS);
        ConsumerRecord<String, SaleResponseFactory> consumerRecord = replyFuture.get(10, TimeUnit.SECONDS);


        SaleResponseFactory value = consumerRecord.value();
        System.out.println("!!!!!!!!!!!! " + value.getUnique_id());
    }
}
@EnableKafka
@Configuration
public class KafkaConsumerConfig {

    @Value(value = "${kafka.bootstrapAddress}")
    private String bootstrapAddress;

    private String groupId = "test";

    @Bean
    public ConsumerFactory<String, SaleResponseFactory> consumerFactory() {
        Map<String, Object> props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, SaleResponseFactoryDeserializer.class);
        return new DefaultKafkaConsumerFactory<>(props);
    }

    @Bean
    public ConcurrentKafkaListenerContainerFactory<String, SaleResponseFactory> kafkaListenerContainerFactory() {
        ConcurrentKafkaListenerContainerFactory<String, SaleResponseFactory> factory =
                new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(consumerFactory());
        return factory;
    }
}
@Component
public class ProcessingSaleListener {

    private static String topic = "tp-sale";

    @KafkaListener(topics = "tp-sale")
    public SaleResponseFactory process(@Payload SaleRequestFactory tf, @Headers MessageHeaders headers) throws Exception {

        System.out.println(tf.getId());

        SaleResponseFactory resObj = new SaleResponseFactory();
        resObj.setUnique_id("123123");

        return resObj;
    }
}
import java.io.Serializable;

import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;

@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
@Builder(toBuilder = true)
public class SaleRequestFactory implements Serializable {

    private static final long serialVersionUID = 1744050117179344127L;
    
    private int id;

}
import org.apache.kafka.common.serialization.Serializer;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.io.Serializable;

public class SaleRequestFactorySerializer implements Serializable, Serializer<SaleRequestFactory> {

    @Override
    public byte[] serialize(String topic, SaleRequestFactory data)
    {
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        try
        {
            ObjectOutputStream outputStream = new ObjectOutputStream(out);
            outputStream.writeObject(data);
            out.close();
        }
        catch (IOException e)
        {
            throw new RuntimeException("Unhandled", e);
        }
        return out.toByteArray();
    }
}
import java.io.Serializable;
import java.time.LocalDateTime;

import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;

@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
@Builder(toBuilder = true)
public class SaleResponseFactory implements Serializable {

    private static final long serialVersionUID = 1744050117179344127L;

    private String unique_id;
}
import org.apache.kafka.common.serialization.Deserializer;

import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;

public class SaleResponseFactoryDeserializer implements Serializable, Deserializer<SaleRequestFactory> {

    @Override
    public SaleRequestFactory deserialize(String topic, byte[] data)
    {
        SaleRequestFactory saleRequestFactory = null;
        try
        {
            ByteArrayInputStream bis = new ByteArrayInputStream(data);
            ObjectInputStream in = new ObjectInputStream(bis);
            saleRequestFactory = (SaleRequestFactory) in.readObject();
            in.close();
        }
        catch (IOException | ClassNotFoundException e)
        {
            throw new RuntimeException("Unhandled", e);
        }
        return saleRequestFactory;
    }
}
spring.kafka.consumer.auto-offset-reset=earliest

spring.kafka.producer.value-serializer=com.example.demo.JavaSerializer
spring.kafka.consumer.value-deserializer=com.example.demo.JavaDeserializer
In fooListener: com.example.demo.Foo@331ca660
In barListener: com.example.demo.Bar@26f54288