Java ApacheKafka模式注册表抛出RestClientException

Java ApacheKafka模式注册表抛出RestClientException,java,mysql,apache-kafka,avro,mysqlbinlog,Java,Mysql,Apache Kafka,Avro,Mysqlbinlog,我有两个几乎相似的卡夫卡应用程序。它们都会侦听binlog以查看两个表的更改。我的问题是其中一个工作正常,但当尝试启动第二个时,我收到以下异常 org.apache.kafka.common.errors.SerializationException: Error registering Avro schema: {"type":"record","name":"Key","namespace":

我有两个几乎相似的卡夫卡应用程序。它们都会侦听binlog以查看两个表的更改。我的问题是其中一个工作正常,但当尝试启动第二个时,我收到以下异常

org.apache.kafka.common.errors.SerializationException: Error registering Avro schema: {"type":"record","name":"Key","namespace":"mysql.company.payments","fields":[{"name":"id","type":"long"}],"connect.name":"mysql.company.payments.Key"} Caused by: io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException: Unexpected character ('<' (code 60)): expected a valid value (number, String, array, object, 'true', 'false' or 'null')
另一个正在工作的应用程序具有完全相同的avro文件,但表名(付款)已被替换。 这两个应用程序都从同一台服务器运行,并连接到同一个Kafka群集。 我使用maven插件基于avro文件创建java类。成功创建了类Key.class

以下是我申请的两个重要类别:

主类 流类
你在github或gitlab上有应用程序吗?@AliBenZarrouk不幸的是,我不允许将其放在公共存储上。这两个应用程序之间的唯一区别是表的名称?@AliBenZarrouk是,早期阶段是这样,但后来它们在业务逻辑上明显不同。这是相似的吗?你在github或gitlab上有应用程序吗?@AliBenZarrouk不幸的是,我不允许将其放在公共存储上。这两个应用程序之间的唯一区别是表名?@AliBenZarrouk是的,早期阶段是这样的,但后来它们在业务逻辑上有明显的分歧。这是相似的吗?
{
  "type": "record",
  "name": "Key",
  "namespace": "mysql.company.payments",
  "fields": [
    {
      "name": "id",
      "type": "long"
    }
  ],
  "connect.name": "mysql.company.payments.Key"
}


import com.company.util.Configs;
import error.PaymentSerializationException;
import io.confluent.kafka.serializers.KafkaAvroSerializerConfig;
import io.confluent.kafka.streams.serdes.avro.GenericAvroSerde;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import payment.PaymentUpdateListener;

import java.util.Properties;

public class PaymentsMain {

    static Properties properties;

    public static void main(String[] args) {
        StreamsBuilder builder = new StreamsBuilder();
        properties = configProperties();

        StreamsBuilder streamsBuilder = watchForPaymentUpdate(builder);

        KafkaStreams kafkaStreams = new KafkaStreams(streamsBuilder.build(), properties);
        kafkaStreams.start();
        Runtime.getRuntime().addShutdownHook(new Thread(kafkaStreams::close));
    }

    private static StreamsBuilder watchForPaymentUpdate(StreamsBuilder builder){
        PaymentUpdateListener paymentUpdateListener = new PaymentUpdateListener(builder);
        paymentUpdateListener.start();
        return builder;
    }

    private static Properties configProperties(){

        Properties streamProperties = new Properties();

        streamProperties.put(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, Configs.getConfig("schemaRegistryUrl"));
        streamProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, "payment-kafka");
        streamProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, Configs.getConfig("bootstrapServerUrl"));
        streamProperties.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000);
        streamProperties.put(StreamsConfig.STATE_DIR_CONFIG, "/tmp/state_dir");
        streamProperties.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, "3");
        streamProperties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, GenericAvroSerde.class);
        streamProperties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, GenericAvroSerde.class);
        streamProperties.put(StreamsConfig.METRICS_RECORDING_LEVEL_CONFIG, "DEBUG");
        streamProperties.put(StreamsConfig.DEFAULT_PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG,
                PaymentSerializationException.class);

        return streamProperties;

    }
}
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.kstream.Consumed;

public class PaymentUpdateListener {

    private StreamsBuilder builder;

    public PaymentUpdateListener(StreamsBuilder builder) {
        this.builder = builder;
    }

    public void start(){

        builder.stream("mysql.company.payments",
                Consumed.with(PaymentSerde.getGenericKeySerde(), PaymentSerde.getEnvelopeSerde()))
                .to("kafka-consumer.payment");

    }
}