Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/apache-kafka/3.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Java 卡夫卡制作者失败-意外错误代码:87_Java_Apache Kafka_Avro_Kafka Producer Api - Fatal编程技术网

Java 卡夫卡制作者失败-意外错误代码:87

Java 卡夫卡制作者失败-意外错误代码:87,java,apache-kafka,avro,kafka-producer-api,Java,Apache Kafka,Avro,Kafka Producer Api,试图向卡夫卡发送Avro格式的消息并使用它。直到一些研究添加了Thread.sleep(16000)以便制作人等待消息,它才发送消息。然而,它又停止了工作。它是org.apache.kafka.common.protocol.Errors-意外错误代码:87。无法向主题生成消息 有什么建议吗?下面是我的代码 public class AvroAutomationTest3IT { private static final Logger LOGGER = LoggerFactory.ge

试图向卡夫卡发送Avro格式的消息并使用它。直到一些研究添加了Thread.sleep(16000)以便制作人等待消息,它才发送消息。然而,它又停止了工作。它是
org.apache.kafka.common.protocol.Errors-意外错误代码:87。无法向主题生成消息

有什么建议吗?下面是我的代码

public class AvroAutomationTest3IT {

    private static final Logger LOGGER = LoggerFactory.getLogger(AvroAutomationTest3IT.class);
    private static Properties producerProps;
    private final String topic = "topic.one";

    String schemaPath = "src/test/resources/automation-tests/sample-avro.avsc";

    // subject convention is "<topic-name>-value"
    String subject = topic + "-value";

    // avsc json string.
    String schema = null;

    // kafka broker list.
    private static String brokers = "xxx:9093";
    // schema registry url.
    private static String registry = "xxx:8081";

    private static Gson gson = new GsonBuilder().setPrettyPrinting().serializeNulls().create();

    @BeforeAll
    public static void setUp() throws IOException {
        producerProps = new Properties();
        producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers);
        producerProps.put("acks", "1");
        producerProps.put("reconnect.backoff.ms", "5000");
        producerProps.put("retry.backoff.ms", "1000");
        producerProps.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, registry);
        //producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.IntegerSerializer");
        producerProps.put(KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class.getName());
        // producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "io.confluent.kafka.serializers.KafkaAvroSerializer");
        //  producerProps.put(VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        producerProps.put(CLIENT_ID_CONFIG, "AvroProducer");
        // producerProps.put(ProducerConfig.ACKS_CONFIG, "0");
        // producerProps.put(ProducerConfig.RETRIES_CONFIG, "0");

        //configure the KafkaAvroSerializer
        producerProps.put(VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
        producerProps.put(ProducerConfig.BATCH_SIZE_CONFIG, "16384");
        producerProps.put(ProducerConfig.LINGER_MS_CONFIG, "1");
        producerProps.put(ProducerConfig.BUFFER_MEMORY_CONFIG, "33554432");

        //consumer properties
        producerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers);
        producerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
        producerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
        producerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "NewConsumer");

        producerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        producerProps.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 10);

        //sslConfig
        producerProps.put("security.protocol", "SSL");
        producerProps.put("ssl.truststore.location", "C:/Users/xx/truststore.jks");
        producerProps.put("ssl.truststore.password", "expeditors");
        producerProps.put("ssl.keystore.location", "C:/Users/xx/xx.jks");
        producerProps.put("ssl.keystore.password", "xxx");
        producerProps.put("ssl.key.password", "xxx");

    }

    @Test
         public void avroTest() throws Exception {
        sendMessage();
        Thread.sleep(16000);

        readMessage();


    }

    public void readMessage() {
        KafkaConsumer<String, byte[]> consumer = new KafkaConsumer<>(producerProps);
        consumer.subscribe(Collections.singletonList(topic));
        try {
            ConsumerRecords<String, byte[]> records = consumer.poll(Duration.ofMillis(15000));

            // assertEquals(2, records.count(), "Expected 2 record");
            for (ConsumerRecord<String, byte[]> record : records) {
                try {
                    JsonElement el = this.parseAvroMessage(topic, record.value());

                    System.out.printf("offset = %d, value = %s\n", record.offset(), el);
                } catch (Exception ex) {
                    ex.printStackTrace();
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            consumer.commitAsync();
            consumer.close(Duration.ofMillis(3000));
        }
    }

    private JsonElement parseAvroMessage(String topic, byte[] value) {
        HashMap<String, String> configs = new HashMap<>();
        configs.put("schema.registry.url", registry);

        KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer();
        deserializer.configure(configs, true);
        Object obj = deserializer.deserialize(topic, value);
        return gson.fromJson(obj.toString(), JsonElement.class);
    }

    public void sendMessage() throws IOException {

        // construct kafka producer.
        Producer<Integer, GenericRecord> producer = new KafkaProducer<Integer, GenericRecord>(producerProps);


        // message key.
        // int userIdInt = 1;
        // message value, avro generic record.
        GenericRecord record = this.buildRecord();

        // send avro message to the topic page-view-event.
        producer.send(new ProducerRecord<Integer, GenericRecord>("visibility.avro.topic.source.one", null, record));
        // producer.flush();
    }

    public GenericRecord buildRecord() throws IOException {

        // avsc json string.
        String schemaString = null;

        FileInputStream inputStream = new FileInputStream(schemaPath);
        try {
            schemaString = IOUtils.toString(inputStream, StandardCharsets.UTF_8);
        } finally {
            inputStream.close();
        }

        // avro schema.
        Schema schema = new Schema.Parser().parse(schemaString);

        GenericRecord metadata = new GenericData.Record(schema.getField("metadata").schema());
        metadata.put("version", "1");
        metadata.put("eventName", "event.name");

        GenericRecord data = new GenericData.Record(schema.getField("data").schema());

        data.put("name", "Bob");
        data.put("age", 25);

        GenericRecord record = new GenericData.Record(schema);
        record.put("metadata", metadata);
        record.put("data", data);

        return record;
    }

}
公共类AvroAutomationTest3IT{
私有静态最终记录器Logger=LoggerFactory.getLogger(AvroAutomationTest3IT.class);
私有静态属性producerProps;
私有最终字符串topic=“topic.one”;
String schemaPath=“src/test/resources/automation tests/sample avro.avsc”;
//主题约定为“-值”
字符串主题=主题+“-value”;
//avsc json字符串。
字符串模式=null;
//卡夫卡经纪人名单。
私有静态字符串代理=“xxx:9093”;
//架构注册表url。
私有静态字符串注册表=“xxx:8081”;
私有静态Gson Gson=new GsonBuilder().setPrettyPrinting().serializeNulls().create();
@以前
public static void setUp()引发IOException{
producerProps=新属性();
producerProps.put(ProducerConfig.BOOTSTRAP\u SERVERS\u CONFIG,brokers);
生产商产品投入(“acks”、“1”);
producerProps.put(“重新连接.退避.ms”,“5000”);
producerProps.put(“retry.backoff.ms”,“1000”);
producerProps.put(AbstractKafkaAvroSerDeConfig.SCHEMA\u REGISTRY\u URL\u CONFIG,REGISTRY);
//producerProps.put(ProducerConfig.KEY\u SERIALIZER\u CLASS\u CONFIG,“org.apache.kafka.common.serialization.IntegerSerializer”);
put(KEY_SERIALIZER_CLASS_CONFIG,IntegerSerializer.CLASS.getName());
//producerProps.put(ProducerConfig.VALUE\u SERIALIZER\u CLASS\u CONFIG,“io.confluent.kafka.serializers.KafkaAvroSerializer”);
//producerProps.put(VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.CLASS.getName());
producerProps.put(客户端ID配置,“AvroProducer”);
//producerProps.put(ProducerConfig.ACKS_CONFIG,“0”);
//producerProps.put(ProducerConfig.RETRIES_CONFIG,“0”);
//配置KafkaAvroSerializer
producerProps.put(VALUE_SERIALIZER_CLASS_CONFIG,KafkaAvroSerializer.CLASS.getName());
producerProps.put(ProducerConfig.BATCH\u SIZE\u CONFIG,“16384”);
producerProps.put(ProducerConfig.LINGER\u MS\u CONFIG,“1”);
producerProps.put(ProducerConfig.BUFFER\u MEMORY\u CONFIG,“33554432”);
//消费者财产
producerProps.put(ConsumerConfig.BOOTSTRAP\u SERVERS\u CONFIG,brokers);
producerProps.put(ConsumerConfig.KEY\u反序列化程序\u CLASS\u配置,byteArraydSerializer.CLASS.getName());
producerProps.put(ConsumerConfig.VALUE\u反序列化程序\u CLASS\u配置,byteArraydSerializer.CLASS.getName());
producerProps.put(ConsumerConfig.GROUP_ID_CONFIG,“NewConsumer”);
producerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,“最早”);
producerProps.put(ConsumerConfig.MAX\u POLL\u RECORDS\u CONFIG,10);
//sslConfig
producerProps.put(“security.protocol”、“SSL”);
producerProps.put(“ssl.truststore.location”,“C:/Users/xx/truststore.jks”);
producerProps.put(“ssl.truststore.password”、“expeditors”);
producerProps.put(“ssl.keystore.location”,“C:/Users/xx/xx.jks”);
producerProps.put(“ssl.keystore.password”、“xxx”);
producerProps.put(“ssl.key.password”、“xxx”);
}
@试验
public void avroTest()引发异常{
sendMessage();
睡眠(16000);
readMessage();
}
public void readMessage(){
卡夫卡消费者=新卡夫卡消费者(生产商运营);
consumer.subscribe(Collections.singletonList(主题));
试一试{
ConsumerRecords记录=consumer.poll(持续时间为15000百万);
//assertEquals(2,records.count(),“预期的2条记录”);
对于(消费者记录:记录){
试一试{
JsonElement el=this.parseAvroMessage(主题,record.value());
System.out.printf(“偏移量=%d,值=%s\n”,record.offset(),el);
}捕获(例外情况除外){
例如printStackTrace();
}
}
}捕获(例外e){
e、 printStackTrace();
}最后{
consumer.commitAsync();
消费结束(持续时间3000百万);
}
}
私有JsonElement parseAvroMessage(字符串主题,字节[]值){
HashMap configs=新的HashMap();
configs.put(“schema.registry.url”,registry);
Kafkavrodeserializer反序列化器=新的Kafkavrodeserializer();
反序列化器.configure(configs,true);
Object obj=反序列化程序。反序列化(主题,值);
返回gson.fromJson(obj.toString(),jsoneElement.class);
}
public void sendMessage()引发IOException{
//构建卡夫卡制作人。
制作人=新卡夫卡制作人(producerProps);
//消息键。
//int userIdInt=1;
//消息值,avro通用记录。
GenericRecord=this.buildRecord();
//向主题页面查看事件发送avro消息。
send(newproducerrecord(“visibility.avro.topic.source.one”,null,record));
//producer.flush();
}
公共GenericRecord buildRecord()引发IOException{
//avsc json字符串。
字符串schematring=null;
FileInputStream inputStream=新的FileInputStream(schemaPath);
试一试{
schematstring=IOUtils.toString(inputStream,StandardCharsets.UTF_8);
}最后{
inputStream.close();
}
//avro模式。
Schema Schema=newschema.Parser().parse(schemaString);
GenericRecord metadata=新的GenericData.Record(schema.getField(“metadata”).schema());
梅塔达特