Java 基于事件时间的窗口不';t火
我正在使用基于时间的Flink事件窗口。但当我发送卡夫卡消息时,程序并没有做窗口操作。我做了医生说的一切,但无法解决问题。我们将提供任何帮助,提前谢谢Java 基于事件时间的窗口不';t火,java,apache-flink,Java,Apache Flink,我正在使用基于时间的Flink事件窗口。但当我发送卡夫卡消息时,程序并没有做窗口操作。我做了医生说的一切,但无法解决问题。我们将提供任何帮助,提前谢谢 public static void main(String[] args) throws Exception { StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment(); e
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
environment.getConfig();
environment.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
environment.setParallelism(1);
Properties props = new Properties();
props.setProperty("bootstrap.servers", "localhost:9092");
props.setProperty("group.id","event-group");
FlinkKafkaConsumer<EventSalesQuantity> consumer = new FlinkKafkaConsumer<EventSalesQuantity>("EventTopic",new EventSerializationSchema(),props);
DataStream<EventSalesQuantity> eventDataStream = environment.addSource(consumer);
KeyedStream<EventSalesQuantity, String> keyedEventStream = eventDataStream.assignTimestampsAndWatermarks( new AssignerWithPeriodicWatermarksImpl()).
keyBy(new KeySelector<EventSalesQuantity, String>() {
@Override
public String getKey(EventSalesQuantity eventSalesQuantity) throws Exception {
return eventSalesQuantity.getDealer();
}
});
DataStream<Tuple2<EventSalesQuantity,Integer>> eventSinkStream = keyedEventStream.timeWindow(Time.seconds(5)).aggregate(new AggregateImpl());
eventSinkStream.addSink(new FlinkKafkaProducer<Tuple2<EventSalesQuantity, Integer>>("localhost:9092","SinkEventTopic",new EventSinkSerializationSchema()));
eventSinkStream.print();
environment.execute();
}
}
public class AssignerWithPeriodicWatermarksImpl implements AssignerWithPeriodicWatermarks<EventSalesQuantity> {
private final long maxOutOfOrderness = 3500;
private long currentMaxTimestamp;
@Override
public long extractTimestamp(EventSalesQuantity element, long previousElementTimestamp) {
long timestamp = DateUtils.getDateFromString(element.getTransactionDate()).getTime();
currentMaxTimestamp = Math.max(timestamp, currentMaxTimestamp);
return timestamp;
}
@Override
public Watermark getCurrentWatermark() {
// return the watermark as current highest timestamp minus the out-of-orderness bound
return new Watermark(currentMaxTimestamp - maxOutOfOrderness);
}
publicstaticvoidmain(字符串[]args)引发异常{
StreamExecutionEnvironment环境=StreamExecutionEnvironment.getExecutionEnvironment();
getConfig();
环境.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
环境。设置并行性(1);
Properties props=新属性();
setProperty(“bootstrap.servers”,“localhost:9092”);
props.setProperty(“group.id”、“事件组”);
FlinkKafkaConsumer=new FlinkKafkaConsumer(“EventTopic”,new EventSerializationSchema(),props);
DataStream eventDataStream=environment.addSource(使用者);
KeyedStream keyedEventStream=eventDataStream.assignTimestampsAndWatermarks(新的AssignerWithPeriodicWatermarksImpl())。
keyBy(新的KeySelector(){
@凌驾
公共字符串getKey(EventSalesQuantity EventSalesQuantity)引发异常{
return eventsalequantity.getDealer();
}
});
DataStream eventSinkStream=keyedEventStream.timeWindow(Time.seconds(5)).aggregate(new AggregateImpl());
addSink(新的FlinkKafkaProducer(“localhost:9092”,“SinkEventTopic”,新的EventSinkSerializationSchema());
eventSinkStream.print();
execute();
}
}
公共类AssignerWithPeriodicWatermarksImpl实现AssignerWithPeriodicWatermarks{
专用最终长最大输出顺序=3500;
私有长currentMaxTimestamp;
@凌驾
公共长提取时间戳(EventSalesQuantity元素,长PreviousElement时间戳){
long timestamp=DateUtils.getDateFromString(element.getTransactionDate()).getTime();
currentMaxTimestamp=Math.max(时间戳,currentMaxTimestamp);
返回时间戳;
}
@凌驾
公共水印getCurrentWatermark(){
//将水印返回为当前最高时间戳减去无序边界
返回新水印(currentMaxTimestamp-maxOutOfOrderness);
}
“2019-06-21T09:43:01”
“2019-06-21T09:43:03”
我发送了两条带有这些时间戳的消息,但没有得到任何输出