Apache kafka 为什么Flink Cep一直在等待一个新的条目来捕捉模式?

Apache kafka 为什么Flink Cep一直在等待一个新的条目来捕捉模式?,apache-kafka,apache-flink,flink-streaming,flink-cep,Apache Kafka,Apache Flink,Flink Streaming,Flink Cep,我正在制作一个Flink CEP应用程序,通过Kafka读取数据。当我尝试捕获模式时,如果之后没有数据,则不会发生sink操作。例如,我希望A->B->C作为模式。来自卡夫卡的是A,B,C数据。但是,为了使我添加到patternProcess函数中的sink操作正常工作,来自kafka的数据必须类似于A、B、C、X。如何解决此问题请提供帮助 读卡夫卡 DataStream<String> dataStream = env.addSource(KAFKA).assignTimestam

我正在制作一个Flink CEP应用程序,通过Kafka读取数据。当我尝试捕获模式时,如果之后没有数据,则不会发生sink操作。例如,我希望A->B->C作为模式。来自卡夫卡的是A,B,C数据。但是,为了使我添加到patternProcess函数中的sink操作正常工作,来自kafka的数据必须类似于A、B、C、X。如何解决此问题请提供帮助

读卡夫卡

DataStream<String> dataStream = env.addSource(KAFKA).assignTimestampsAndWatermarks(WatermarkStrategy
                    .forBoundedOutOfOrderness(Duration.ofSeconds(0))); 
dataStream.print("DS:"); //to see every incoming data
DataStream DataStream=env.addSource(KAFKA).assignTimestampsAndWatermarks(水印策略
.forboundedAutoforderness(持续时间为秒(0));
dataStream.print(“DS:”)//查看每个传入的数据
模式

Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(
            new SimpleCondition<Event>() {
                @Override
                public boolean filter(Event event) {
                    return event.actionId.equals("2.24");
                }
            }
    ).next("middle").where(
            new SimpleCondition<Event>() {
                @Override
                public boolean filter(Event event) {
                    return event.actionId.equals("2.24");
                }
            }
    ).within(Time.seconds(5));
模式模式=模式。开始(“开始”)。其中( 新的SimpleCondition(){ @凌驾 公共布尔过滤器(事件){ 返回事件.actionId.equals(“2.24”); } } ).下一个(“中间”)。在哪里( 新的SimpleCondition(){ @凌驾 公共布尔过滤器(事件){ 返回事件.actionId.equals(“2.24”); } } ).在(时间.秒(5))内; CEP和水槽

PatternStream<Event> patternStream = CEP.pattern(eventStringKeyedStream, pattern);
    patternStream.process(new PatternProcessFunction<Event, Event>() {
        @Override
        public void processMatch(Map<String, List<Event>> map, Context context, Collector<Event> collector) throws Exception {

            collector.collect(map.get("start").get(0));
        }


    }).print();//or sink function
PatternStream PatternStream=CEP.pattern(eventStringKeyedStream,pattern);
进程(新的PatternProcessFunction(){
@凌驾
public void processMatch(映射、上下文上下文、收集器)引发异常{
collector.collect(map.get(“start”).get(0));
}
}).print()//或接收器功能
我的课程成绩

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.functions.PatternProcessFunction;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.List;
import java.util.Map;

public class EventTimePattern {

public static void main(String[] args) throws Exception {

    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

    DataStream<String> input = env.socketTextStream("localhost",9999)
            .map(new MapFunction<String, Tuple2<String, Long>>() {
                @Override
                public Tuple2<String, Long> map (String value) throws Exception {
                    String[] fields = value.split(",");
                    if (fields.length == 2) {
                        return new Tuple2<String, Long>(
                                fields[0] ,
                                Long.parseLong(fields[1]));
                    }
                    return null;
                }
            })

           /* env.fromElements(
                    Tuple2.of("A", 5L),
                    Tuple2.of("A", 10L)
            )*/
                    .assignTimestampsAndWatermarks(
                            WatermarkStrategy
                                    .<Tuple2<String, Long>>forBoundedOutOfOrderness(Duration.ofMillis(0))
                                    .withTimestampAssigner((event, timestamp) -> event.f1))
                    .map(event -> event.f0);

    Pattern<String, ?> pattern =
            Pattern.<String>begin("start")
                    .where(
                            new SimpleCondition<String>() {

                                @Override
                                public boolean filter(String value) throws Exception {
                                    return value.equals("A");
                                }
                            })
                    .next("end")
                    .where(
                            new SimpleCondition<String>() {

                                @Override
                                public boolean filter(String value) throws Exception {
                                    return value.equals("A");
                                }
                            })
                    .within(Time.seconds(5));
    input.print("I");

    DataStream<String> result =
            CEP.pattern(input, pattern)
                    .process(new PatternProcessFunction<String, String>() {
                        @Override
                        public void processMatch(
                                Map<String, List<String>> map,
                                Context context,
                                Collector<String> out) throws Exception {

                            StringBuilder builder = new StringBuilder();

                            builder.append(map.get("start").get(0))
                                    .append(",")
                                    .append(map.get("end").get(0));

                            out.collect(builder.toString());
                        }
                    });

    result.print();

    env.execute();
}
}
DS::2>{“ActionID”:“2.24”}

DS::2>{“ActionID”:“2.24”}

DS::2>{“ActionID”:“2.25”}

4> {ActionID='2.24'}

我在期待

DS::2>{“ActionID”:“2.24”}

DS::2>{“ActionID”:“2.24”}

4> {ActionID='2.24'}

那么,为什么在满足条件后再出现一个数据时,而不是在满足模式的条件时,它会产生结果呢? 请帮帮我

编辑

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.functions.PatternProcessFunction;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.List;
import java.util.Map;

public class EventTimePattern {

public static void main(String[] args) throws Exception {

    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

    DataStream<String> input = env.socketTextStream("localhost",9999)
            .map(new MapFunction<String, Tuple2<String, Long>>() {
                @Override
                public Tuple2<String, Long> map (String value) throws Exception {
                    String[] fields = value.split(",");
                    if (fields.length == 2) {
                        return new Tuple2<String, Long>(
                                fields[0] ,
                                Long.parseLong(fields[1]));
                    }
                    return null;
                }
            })

           /* env.fromElements(
                    Tuple2.of("A", 5L),
                    Tuple2.of("A", 10L)
            )*/
                    .assignTimestampsAndWatermarks(
                            WatermarkStrategy
                                    .<Tuple2<String, Long>>forBoundedOutOfOrderness(Duration.ofMillis(0))
                                    .withTimestampAssigner((event, timestamp) -> event.f1))
                    .map(event -> event.f0);

    Pattern<String, ?> pattern =
            Pattern.<String>begin("start")
                    .where(
                            new SimpleCondition<String>() {

                                @Override
                                public boolean filter(String value) throws Exception {
                                    return value.equals("A");
                                }
                            })
                    .next("end")
                    .where(
                            new SimpleCondition<String>() {

                                @Override
                                public boolean filter(String value) throws Exception {
                                    return value.equals("A");
                                }
                            })
                    .within(Time.seconds(5));
    input.print("I");

    DataStream<String> result =
            CEP.pattern(input, pattern)
                    .process(new PatternProcessFunction<String, String>() {
                        @Override
                        public void processMatch(
                                Map<String, List<String>> map,
                                Context context,
                                Collector<String> out) throws Exception {

                            StringBuilder builder = new StringBuilder();

                            builder.append(map.get("start").get(0))
                                    .append(",")
                                    .append(map.get("end").get(0));

                            out.collect(builder.toString());
                        }
                    });

    result.print();

    env.execute();
}
}
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
导入org.apache.flink.api.common.functions.FlatMapFunction;
导入org.apache.flink.api.common.functions.MapFunction;
导入org.apache.flink.api.java.tuple.Tuple2;
导入org.apache.flink.api.java.tuple.Tuple3;
导入org.apache.flink.cep.cep;
导入org.apache.flink.cep.functions.PatternProcessFunction;
导入org.apache.flink.cep.pattern.pattern;
导入org.apache.flink.cep.pattern.conditions.SimpleCondition;
导入org.apache.flink.streaming.api.datastream.datastream;
导入org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
导入org.apache.flink.streaming.api.windowing.time.time;
导入org.apache.flink.util.Collector;
导入java.time.Duration;
导入java.util.List;
导入java.util.Map;
公共类EventTimePattern{
公共静态void main(字符串[]args)引发异常{
StreamExecutionEnvironment env=StreamExecutionEnvironment.getExecutionEnvironment();
DataStream input=env.socketTextStream(“localhost”,9999)
.map(新的映射函数(){
@凌驾
公共元组2映射(字符串值)引发异常{
String[]字段=value.split(“,”);
如果(fields.length==2){
返回新的Tuple2(
字段[0],
Long.parseLong(字段[1]);
}
返回null;
}
})
/*环境元素(
第2组(“A”,5L),
第2组(“A”,10L)
)*/
.1.1.1.2水印(
水印策略
对于边界自动边界(持续时间为百万(0))
.withTimestampAssigner((事件,时间戳)->event.f1))
.map(事件->事件.f0);
图案=
模式。开始(“开始”)
.在哪里(
新的SimpleCondition(){
@凌驾
公共布尔筛选器(字符串值)引发异常{
返回值。等于(“A”);
}
})
.下一步(“结束”)
.在哪里(
新的SimpleCondition(){
@凌驾
公共布尔筛选器(字符串值)引发异常{
返回值。等于(“A”);
}
})
.在(时间.秒(5))内;
输入、打印(“I”);
数据流结果=
CEP.模式(输入,模式)
.process(新模式ProcessFunction(){
@凌驾
公共无效进程匹配(
地图,
语境,
收集器输出)抛出异常{
StringBuilder=新的StringBuilder();
builder.append(map.get(“start”).get(0))
.附加(“,”)
.append(map.get(“end”).get(0));
out.collect(builder.toString());
}
});
result.print();
execute();
}
}

我无法重现您的问题。下面是一个类似的例子,效果很好(我使用了Flink 1.12.2):

import org.apache.flink.api.common.eventtime.WatermarkStrateg