Warning: file_get_contents(/data/phpspider/zhask/data//catemap/1/asp.net/32.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
构建javakafka流应用程序的jar_Java_Apache Kafka Streams - Fatal编程技术网

构建javakafka流应用程序的jar

构建javakafka流应用程序的jar,java,apache-kafka-streams,Java,Apache Kafka Streams,我正在开发java kafka流应用程序。我使用mvncleanpackage命令来构建最终的jar文件。但在某些州,建造过程永远无法完成 2019-07-09 13:58:18.991信息12689---[-StreamThread-1] org.apache.kafka.streams.KafkaStreams:流客户端 [wordcount-live-test-51e167c1-08d3-4b23-b5aa-97f9e40bc5e7]状态 从再平衡到运行的过渡 如何将最终输出作为jar P

我正在开发java kafka流应用程序。我使用
mvncleanpackage
命令来构建最终的jar文件。但在某些州,建造过程永远无法完成

2019-07-09 13:58:18.991信息12689---[-StreamThread-1] org.apache.kafka.streams.KafkaStreams:流客户端 [wordcount-live-test-51e167c1-08d3-4b23-b5aa-97f9e40bc5e7]状态 从再平衡到运行的过渡

如何将最终输出作为jar

Properties streamsConfiguration = new Properties();
       streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-live-test");
       streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka-2.c.charming-opus-833.internal:9092,kafka-0.c.charming-opus-833.internal:9092,kafka-1.c.charming-opus-833.internal:9092");
      // streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
       streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
       streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class);
       streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000);
       streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
       streamsConfiguration.put("schema.registry.url", "http://35.184.181.97:8089/");


       final Map<String, String> serdeConfig = Collections.singletonMap("schema.registry.url", "http://35.184.181.97:8089/");

       final Serde<String> stringSerde = new Serdes.StringSerde();

       final Serde<GenericRecord> valueGenericAvroSerde = new GenericAvroSerde();
       valueGenericAvroSerde.configure(serdeConfig, false);

       final StreamsBuilder builder = new StreamsBuilder();
       final KStream<String, GenericRecord> stream = builder.stream("testtrip", Consumed.with(stringSerde, valueGenericAvroSerde));


       KStream<String, GenericRecord> tripStream[] = stream.branch(
               (k, v) -> (v.getSchema().getName().equals("TripCreated")),
               (k, v) -> (v.getSchema().getName().equals("TripCompleted"))
       );

       KStream<String, TripCompleted> tripCompletedKStream = tripStream[1].mapValues(
               (v) -> {
                   System.out.println("message received to trip Completed stream");
                   ObjectMapper objectMapper = new ObjectMapper();
                   try {
                       TripCompleted tc = objectMapper.readValue(v.toString(), TripCompleted.class);
                       return tc;
                   } catch (IOException e) {
                       e.printStackTrace();
                   }
                   return null;
               }
       );

       KStream<String, TripCreated> tripCreatedKStream = tripStream[0].mapValues(
               (v) -> {
                   System.out.println("message received to trip Created stream");
                   ObjectMapper objectMapper = new ObjectMapper();
                   try {
                       TripCreated tc = objectMapper.readValue(v.toString(), TripCreated.class);
                       return tc;
                   } catch (IOException e) {
                       e.printStackTrace();
                   }
                   return null;
               }
       );

       KTable<String, TripCompleted> tripCompletedTable = tripCompletedKStream.groupByKey().reduce(
               (k, v) -> {
                   System.out.println("trip completed message received");
                   return v;
               }
       );

       KTable<String, TripCreated> tripCreatedTable = tripCreatedKStream.groupByKey().reduce(
               (k, v) -> {
                   System.out.println("trip created message received");
                   return v;
               }
       );

       KTable<String, Materialized> joinedTable = tripCreatedTable.leftJoin(tripCompletedTable, (tcre, tcom) -> {
           long millis = System.currentTimeMillis();
           UUID uuid = UUID.randomUUID();
           System.out.println(tcre.toString());
           System.out.println(tcom == null ? "" : tcom.toString());
           Body body = Body.newBuilder()
                   .setBookedBy(tcre.getBody().getBookedBy())
                   .setDriverId(tcom == null ? 0 : tcom.getBody().getDriverId())
                   .setPassengerId(tcom == null ? 0 : tcom.getBody().getPassengerId())
                   .setModule(tcre.getBody().getModule())
                   .setTripId(tcre.getBody().getTripId())
                   .setVehicleType(tcre.getBody().getVehicleType())
                   .build();
           Materialized m = Materialized.newBuilder()
                   .setCreatedAt(millis)
                   .setExpiry(1234)
                   .setId(uuid.toString())
                   .setType("java_materialized_test")
                   .setVersion(1)
                   .setBody(body)
                   .setTraceInfo(setTraceInfo())
                   .build();
           return m;
       });



      // joinedTable.foreach((k, v) -> System.out.println("value:" + v.toString()));
       joinedTable.to("test_materialized");

       final Topology topology = builder.build();

       System.out.println("Topology:" + topology.describe());

       final KafkaStreams streams = new KafkaStreams(topology, streamsConfiguration);

       final CountDownLatch latch = new CountDownLatch(1);

       Runtime.getRuntime().addShutdownHook(new Thread() {
           @Override
           public void run() {
               streams.close();
               latch.countDown();
           }
       });
       try {
           streams.start();
           latch.await();
       } catch (final Throwable e) {
           System.exit(1);
       }
       System.exit(0);

   }

   private TraceInfo setTraceInfo() {
       TraceId traceId = TraceId.newBuilder()
               .setHigh(123456)
               .setLow(654321)
               .build();
       TraceInfo traceInfo = TraceInfo.newBuilder()
               .setTraceId(traceId)
               .setParentId(123456)
               .setSampled(true)
               .setSpanId(123456)
               .build();
       return traceInfo;
   }
属性流配置=新属性();
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG,“wordcount实时测试”);
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,“kafka-2.c.charming-opus-833.internal:9092,kafka-0.c.charming-opus-833.internal:9092,kafka-1.c.charming-opus-833.internal:9092”);
//streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,“localhost:9092”);
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG,Serdes.String().getClass().getName());
streamsConfiguration.put(StreamsConfig.DEFAULT\u VALUE\u SERDE\u CLASS\u CONFIG,SpecificAvroSerde.CLASS);
streamsConfiguration.put(StreamsConfig.COMMIT\u INTERVAL\u MS\u CONFIG,1000);
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,“最新”);
streamsConfiguration.put(“schema.registry.url”http://35.184.181.97:8089/");
最终映射serdeConfig=Collections.singletonMap(“schema.registry.url”)http://35.184.181.97:8089/");
最终Serde stringSerde=新Serde.stringSerde();
最终Serde值GenericAvroSerde=新的GenericAvroSerde();
ValueGenericaVroseOrder.configure(serdeConfig,false);
最终StreamsBuilder生成器=新StreamsBuilder();
最终KStream stream=builder.stream(“testtrip”,consumered.with(stringSerde,valueGenericAvroSerde));
KStream tripStream[]=stream.branch(
(k,v)->(v.getSchema().getName().equals(“TripCreated”),
(k,v)->(v.getSchema().getName().equals(“TripCompleted”))
);
KStream tripCompletedKStream=tripStream[1]。映射值(
(v) ->{
System.out.println(“接收到跳闸完成流的消息”);
ObjectMapper ObjectMapper=新的ObjectMapper();
试一试{
TripCompleted tc=objectMapper.readValue(v.toString(),TripCompleted.class);
返回tc;
}捕获(IOE异常){
e、 printStackTrace();
}
返回null;
}
);
KStream tripCreatedKStream=tripStream[0]。映射值(
(v) ->{
System.out.println(“接收到跳闸创建流的消息”);
ObjectMapper ObjectMapper=新的ObjectMapper();
试一试{
TripCreated tc=objectMapper.readValue(v.toString(),TripCreated.class);
返回tc;
}捕获(IOE异常){
e、 printStackTrace();
}
返回null;
}
);
KTable tripCompletedTable=tripCompletedKStream.groupByKey().reduce(
(k,v)->{
System.out.println(“收到跳闸完成消息”);
返回v;
}
);
KTable tripCreatedTable=tripCreatedKStream.groupByKey().reduce(
(k,v)->{
System.out.println(“收到跳闸创建消息”);
返回v;
}
);
KTable joinedTable=tripCreatedTable.leftJoin(tripCompletedTable,(tcre,tcom)->{
长毫秒=System.currentTimeMillis();
UUID UUID=UUID.randomUUID();
System.out.println(tcre.toString());
System.out.println(tcom==null?“:tcom.toString());
Body Body=Body.newBuilder()
.setBookedBy(tcre.getBody().getBookedBy())
.setDriverId(tcom==null?0:tcom.getBody().getDriverId())
.setPassengeId(tcom==null?0:tcom.getBody().getPassengeId())
.setModule(tcre.getBody().getModule())
.setTripId(tcre.getBody().getTripId())
.setVehicleType(tcre.getBody().getVehicleType())
.build();
Materialized m=Materialized.newBuilder()
.setCreatedAt(毫秒)
.setExpiry(1234)
.setId(uuid.toString())
.setType(“java_物化_测试”)
.setVersion(1)
.主体(主体)
.setTraceInfo(setTraceInfo())
.build();
返回m;
});
//joinedTable.foreach((k,v)->System.out.println(“值:+v.toString()));
可连接到(“测试物化”);
最终拓扑=builder.build();
System.out.println(“拓扑:+Topology.descripe());
最终KafkaStreams streams=新的KafkaStreams(拓扑、流配置);
最终倒计时闩锁=新倒计时闩锁(1);
Runtime.getRuntime().addShutdownHook(新线程(){
@凌驾
公开募捐{
streams.close();
倒计时();
}
});
试一试{
streams.start();
satch.wait();
}捕获(最终可丢弃e){
系统出口(1);
}
系统出口(0);
}
私有TraceInfo setTraceInfo(){
TraceId TraceId=TraceId.newBuilder()
.setHigh(123456)
.setLow(654321)
.build();
TraceInfo TraceInfo=TraceInfo.newBuilder()
.setTraceId(traceId)
.setParentId(123456)
.setSampled(真)
.设置窗格ID(123456)
.建造(
   final CountDownLatch latch = new CountDownLatch(1);

        Runtime.getRuntime().addShutdownHook(new Thread() {
            @Override
            public void run() {
                streams.close();
                latch.countDown();
            }
        });
        try {
            streams.start();
            latch.await();
        } catch (final Throwable e) {
            System.exit(1);
        }
        System.exit(0);