Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/three.js/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Apache flink Flink CEP未打印结果_Apache Flink_Flink Streaming_Flink Cep - Fatal编程技术网

Apache flink Flink CEP未打印结果

Apache flink Flink CEP未打印结果,apache-flink,flink-streaming,flink-cep,Apache Flink,Flink Streaming,Flink Cep,如果使用Flink CEP库找到Hello和world,我将尝试打印一个字符串。我的来源是Kafka,使用控制台生成器输入数据。那部分正在发挥作用。我可以打印出我在主题中输入的内容。然而,它不会打印出我最后的信息“世界太美好了!”。它甚至不会打印出它输入了lambda。下面是课堂 package kafka; import org.apache.flink.cep.CEP; import org.apache.flink.cep.PatternStream; import org.apache

如果使用Flink CEP库找到Hello和world,我将尝试打印一个字符串。我的来源是Kafka,使用控制台生成器输入数据。那部分正在发挥作用。我可以打印出我在主题中输入的内容。然而,它不会打印出我最后的信息“世界太美好了!”。它甚至不会打印出它输入了lambda。下面是课堂

package kafka;

import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer08;
import org.apache.flink.streaming.util.serialization.SimpleStringSchema;
import org.apache.flink.util.Collector;

import java.util.Map;
import java.util.Properties;

/**
 * Created by crackerman on 9/16/16.
*/
public class WordCount {

public static void main(String[] args) throws Exception {

    Properties properties = new Properties();
    properties.put("bootstrap.servers", "localhost:9092");
    properties.put("zookeeper.connect", "localhost:2181");
    properties.put("group.id", "test");
    StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
    see.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

    DataStream<String> src = see.addSource(new FlinkKafkaConsumer08<>("complexString",
                                                                      new SimpleStringSchema(),
                                                                      properties));

    src.print();


    Pattern<String, String> pattern = Pattern.<String>begin("first")
            .where(evt -> evt.contains("Hello"))
            .followedBy("second")
            .where(evt -> evt.contains("World"));

    PatternStream<String> patternStream = CEP.pattern(src, pattern);

    DataStream<String> alerts = patternStream.flatSelect(
            (Map<String, String> in, Collector<String> out) -> {
                System.out.println("Made it to the lambda");
                String first = in.get("first");
                String second = in.get("second");
                System.out.println("First: " + first);
                System.out.println("Second: " + second);

                if (first.equals("Hello") && second.equals("World")) {

                    out.collect("The world is so nice!");
                }


            });

    alerts.print();

    see.execute();
}

}
卡夫卡套餐;
导入org.apache.flink.cep.cep;
导入org.apache.flink.cep.PatternStream;
导入org.apache.flink.cep.pattern.pattern;
导入org.apache.flink.streaming.api.TimeCharacteristic;
导入org.apache.flink.streaming.api.datastream.datastream;
导入org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
导入org.apache.flink.streaming.api.windowing.time.time;
导入org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer08;
导入org.apache.flink.streaming.util.serialization.SimpleStringSchema;
导入org.apache.flink.util.Collector;
导入java.util.Map;
导入java.util.Properties;
/**
*由Crackeman于2016年9月16日创作。
*/
公共类字数{
公共静态void main(字符串[]args)引发异常{
属性=新属性();
properties.put(“bootstrap.servers”,“localhost:9092”);
properties.put(“zookeeper.connect”,“localhost:2181”);
属性。put(“group.id”、“test”);
StreamExecutionEnvironment请参见=StreamExecutionEnvironment.getExecutionEnvironment();
请参阅.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
DataStream src=see.addSource(新的FlinkKafkaConsumer08(“complexString”),
新的SimpleStringSchema(),
属性);
src.print();
模式模式=模式。开始(“第一个”)
.where(evt->evt.contains(“Hello”))
.“第二次”)
其中(evt->evt.contains(“世界”);
PatternStream PatternStream=CEP.pattern(src,pattern);
数据流警报=patternStream.flatSelect(
(映射入、收集器出)->{
System.out.println(“到达lambda”);
字符串first=in.get(“first”);
字符串second=in.get(“second”);
System.out.println(“First:+First”);
System.out.println(“第二个:“+Second”);
如果(第一个相等(“你好”)和第二个相等(“世界”)){
出去。收集(“世界太美好了!”);
}
});
alerts.print();
参见。execute();
}
}
任何帮助都将不胜感激


谢谢

问题如下

 see.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

如果删除了,它将按照我预期的方式工作。

您是否分配了时间戳和水印?只有告诉Flink每条记录的时间和经过的时间,事件时间模式才起作用。没有这些信息,Flink无法在事件时间处理数据。如果您的数据到达顺序不正确,或者您有基于时间的操作(例如windows),则需要事件时间来生成一致的结果。我没有分配时间戳和水印。我已经回去实现了这一点。谢谢你的信息!