Java 生产者程序中的kafka网络处理器错误(ArrayIndexOutOfBoundsException:18)

Java 生产者程序中的kafka网络处理器错误(ArrayIndexOutOfBoundsException:18),java,api,apache-kafka,hadoop2,kafka-producer-api,Java,Api,Apache Kafka,Hadoop2,Kafka Producer Api,我有卡夫卡制作人Api程序,我是卡夫卡本身的新手。下面的代码从一个API获取数据并将消息发送到kafka主题 package kafka_Demo; import java.util.Properties; import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; import org.apache.kafka.clients.producer.*; import j

我有卡夫卡制作人Api程序,我是卡夫卡本身的新手。下面的代码从一个API获取数据并将消息发送到kafka主题

package kafka_Demo;

import java.util.Properties;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.apache.kafka.clients.producer.*;
import java.net.URL;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

public class HttpBasicAuth {

    public static void main(String[] args) {
        try {

            Properties props = new Properties();
             props.put("bootstrap.servers", "localhost:9092");
             props.put("zookeeper.connect", "localhost:2181");
             props.put("batch.size", 16384);
             props.put("linger.ms", 1);
             props.put("buffer.memory", 33554432);
             props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
             props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

            Producer<String, String> producer = new KafkaProducer<>(props);
            Json_read count = new Json_read();
            URL url = new URL("https://alm.sysbiz.org/rest/api/2/search?jql=project=ALG&maxResults=0");
            long total_ticket = count.ticketCount(url);
            Alm_authentication alm = new Alm_authentication();
            for (long i = 0; i <=total_ticket ; i = i + 20) {
                url = new URL("https://alm.sysbiz.org/rest/api/2/search?jql=project=ALG&expand=changelog&startAt=" + i+"&maxResults=20");
                InputStream content = (InputStream) alm.performAuth(url);
                if (content != null) {
                    BufferedReader in = new BufferedReader(new InputStreamReader(content));
                    String line;
                    while ((line = in.readLine()) != null) {
                        //writer.println(line);
                        // System.out.println(line);
                        producer.send(new ProducerRecord<String, String>("test", "a11", line)).get();
                    }               
                }
                producer.send(new ProducerRecord<String, String>("test", "a11", "\n"));
                content.close();

            }

         producer.close();
        } catch (Exception e) {
            e.printStackTrace();
        }


    }

}
kafka_演示包;
导入java.util.Properties;
导入java.io.BufferedReader;
导入java.io.InputStream;
导入java.io.InputStreamReader;
导入org.apache.kafka.clients.producer.*;
导入java.net.URL;
导入org.apache.kafka.clients.producer.KafkaProducer;
导入org.apache.kafka.clients.producer.ProducerRecord;
公共类HttpBasicAuth{
公共静态void main(字符串[]args){
试一试{
Properties props=新属性();
put(“bootstrap.servers”,“localhost:9092”);
props.put(“zookeeper.connect”,“localhost:2181”);
道具放置(“批量大小”,16384);
道具放置(“玲儿小姐”,1);
props.put(“buffer.memory”,33554432);
put(“key.serializer”、“org.apache.kafka.common.serialization.StringSerializer”);
put(“value.serializer”、“org.apache.kafka.common.serialization.StringSerializer”);
制作人=新卡夫卡制作人(道具);
Json_read count=新的Json_read();
URL=新URL(“https://alm.sysbiz.org/rest/api/2/search?jql=project=ALG&maxResults=0");
长总票数=count.ticketCount(url);
Alm_身份验证Alm=新的Alm_身份验证();

对于(长i=0;i而言,这是由于卡夫卡版本不匹配造成的。
确保您的机器上安装了kafka客户端jar版本和kafka服务器版本。

即使我的eclipse和kafka有相同的jar,我也遇到过类似的问题,然后我找到了根本原因:我在我的机器上安装了另一个版本的zookeeper,用于solr配置,并且zookeeper库添加到了environment var的类路径中可以。所以启动kafka zookeeper时它引用了另一个库,所以我删除了zookeeper的类路径,并尝试使用kafka zookeeper,它起作用了:)

ERROR Processor got uncaught exception. (kafka.network.Processor)
java.lang.ArrayIndexOutOfBoundsException: 18
    at org.apache.kafka.common.protocol.ApiKeys.forId(ApiKeys.java:68)
    at org.apache.kafka.common.requests.AbstractRequest.getRequest(AbstractRequest.java:39)
    at kafka.network.RequestChannel$Request.<init>(RequestChannel.scala:79)
    at kafka.network.Processor$$anonfun$run$11.apply(SocketServer.scala:426)
    at kafka.network.Processor$$anonfun$run$11.apply(SocketServer.scala:421)
    at scala.collection.Iterator$class.foreach(Iterator.scala:727)
    at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
    at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
    at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
    at kafka.network.Processor.run(SocketServer.scala:421)
    at java.lang.Thread.run(Thread.java:748)
java.util.concurrent.ExecutionException: org.apache.kafka.common.errors.TimeoutException: Failed to update metadata after 60000 ms.
    at org.apache.kafka.clients.producer.KafkaProducer$FutureFailure.<init>(KafkaProducer.java:1124)
    at org.apache.kafka.clients.producer.KafkaProducer.doSend(KafkaProducer.java:823)
    at org.apache.kafka.clients.producer.KafkaProducer.send(KafkaProducer.java:760)
    at org.apache.kafka.clients.producer.KafkaProducer.send(KafkaProducer.java:648)
    at kafka_Demo.HttpBasicAuth.main(HttpBasicAuth.java:40)
Caused by: org.apache.kafka.common.errors.TimeoutException: Failed to update metadata after 60000 ms.