Java 带列表的Apache flink模式条件
我写了一个模式。我有一个条件列表(从json获取规则)。数据(json)来自kafka服务器。我想用这个列表过滤数据。但它不起作用。我该怎么做? 我不确定是否需要输入keyedstream和警报。弗林克能这样工作吗 主程序:Java 带列表的Apache flink模式条件,java,apache,apache-flink,complex-event-processing,Java,Apache,Apache Flink,Complex Event Processing,我写了一个模式。我有一个条件列表(从json获取规则)。数据(json)来自kafka服务器。我想用这个列表过滤数据。但它不起作用。我该怎么做? 我不确定是否需要输入keyedstream和警报。弗林克能这样工作吗 主程序: package cep_kafka_eample.cep_kafka; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.Object
package cep_kafka_eample.cep_kafka;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonParser;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternSelectFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.SlidingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.streaming.util.serialization.JSONDeserializationSchema;
import util.AlarmPatterns;
import util.Rules;
import util.TypeProperties;
import java.io.FileReader;
import java.util.*;
public class MainClass {
public static void main( String[] args ) throws Exception
{
ObjectMapper mapper = new ObjectMapper();
JsonParser parser = new JsonParser();
Object obj = parser.parse(new FileReader(
"c://new 5.json"));
JsonArray array = (JsonArray)obj;
Gson googleJson = new Gson();
List<Rules> ruleList = new ArrayList<>();
for(int i = 0; i< array.size() ; i++) {
Rules jsonObjList = googleJson.fromJson(array.get(i), Rules.class);
ruleList.add(jsonObjList);
}
//apache kafka properties
Properties properties = new Properties();
properties.setProperty("zookeeper.connect", "localhost:2181");
properties.setProperty("bootstrap.servers", "localhost:9092");
//starting flink
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(1000).setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
//get kafka values
FlinkKafkaConsumer010<ObjectNode> myConsumer = new FlinkKafkaConsumer010<>("demo", new JSONDeserializationSchema(),
properties);
List<Pattern<ObjectNode,?>> patternList = new ArrayList<>();
DataStream<ObjectNode> dataStream = env.addSource(myConsumer);
dataStream.windowAll(SlidingProcessingTimeWindows.of(Time.seconds(10), Time.seconds(5)));
DataStream<ObjectNode> keyedStream = dataStream;
//get pattern list, keyeddatastream
for(Rules rules : ruleList){
List<TypeProperties> typePropertiesList = rules.getTypePropList();
for (int i = 0; i < typePropertiesList.size(); i++) {
TypeProperties typeProperty = typePropertiesList.get(i);
if (typeProperty.getGroupType() != null && typeProperty.getGroupType().equals("group")) {
keyedStream = keyedStream.keyBy(
jsonNode -> jsonNode.get(typeProperty.getPropName().toString())
);
}
}
Pattern<ObjectNode,?> pattern = new AlarmPatterns().getAlarmPattern(rules);
patternList.add(pattern);
}
//CEP pattern and alarms
List<DataStream<Alert>> alertList = new ArrayList<>();
for(Pattern<ObjectNode,?> pattern : patternList){
PatternStream<ObjectNode> patternStream = CEP.pattern(keyedStream, pattern);
DataStream<Alert> alarms = patternStream.select(new PatternSelectFunction<ObjectNode, Alert>() {
private static final long serialVersionUID = 1L;
public Alert select(Map<String, List<ObjectNode>> map) throws Exception {
return new Alert("new message");
}
});
alertList.add(alarms);
}
env.execute("Flink CEP monitoring job");
}
}
package cep_kafka_eample.cep_kafka;
导入com.fasterxml.jackson.databind.ObjectMapper;
导入com.fasterxml.jackson.databind.node.ObjectNode;
导入com.google.gson.gson;
导入com.google.gson.JsonArray;
导入com.google.gson.JsonParser;
导入org.apache.flink.cep.cep;
导入org.apache.flink.cep.PatternSelectFunction;
导入org.apache.flink.cep.PatternStream;
导入org.apache.flink.cep.pattern.pattern;
导入org.apache.flink.streaming.api.TimeCharacteristic;
导入org.apache.flink.streaming.api.datastream.datastream;
导入org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
导入org.apache.flink.streaming.api.windowing.assigners.SlidingProcessingTimeWindows;
导入org.apache.flink.streaming.api.windowing.time.time;
导入org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
导入org.apache.flink.streaming.util.serialization.JSONDeserializationSchema;
导入util.AlarmPatterns;
导入实用规则;
导入util.TypeProperties;
导入java.io.FileReader;
导入java.util.*;
公共类主类{
公共静态void main(字符串[]args)引发异常
{
ObjectMapper mapper=新的ObjectMapper();
JsonParser=新的JsonParser();
Object obj=parser.parse(新文件读取器(
“c://new 5.json”);
JsonArray数组=(JsonArray)obj;
Gson googleJson=new Gson();
列表规则列表=新的ArrayList();
对于(int i=0;ijsonNode.get(typeProperty.getPropName().toString())
);
}
}
模式模式=新的AlarmPatterns().getAlarmPattern(规则);
patternList.add(pattern);
}
//CEP模式和警报
List alertList=新建ArrayList();
用于(图案:图案列表){
PatternStream PatternStream=CEP.pattern(keyedStream,pattern);
DataStream alarms=patternStream.select(新的PatternSelectFunction(){
私有静态最终长serialVersionUID=1L;
公共警报选择(映射)引发异常{
返回新警报(“新消息”);
}
});
警报列表。添加(警报);
}
环境执行(“Flink CEP监控作业”);
}
}
getAlarmPattern:
package util;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.IterativeCondition;
import org.apache.flink.streaming.api.datastream.DataStream;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class AlarmPatterns {
public Pattern<ObjectNode, ?> getAlarmPattern(Rules rules) {
//MySimpleConditions conditions = new MySimpleConditions();
Pattern<ObjectNode, ?> alarmPattern = Pattern.<ObjectNode>begin("first")
.where(new IterativeCondition<ObjectNode>() {
@Override
public boolean filter(ObjectNode jsonNodes, Context<ObjectNode> context) throws Exception {
for (Criterias criterias : rules.getCriteriaList()) {
if (criterias.getCriteriaType().equals("equals")) {
return jsonNodes.get(criterias.getPropName()).equals(criterias.getCriteriaValue());
} else if (criterias.getCriteriaType().equals("greaterThen")) {
if (!jsonNodes.get(criterias.getPropName()).equals(criterias.getCriteriaValue())) {
return false;
}
int count = 0;
for (ObjectNode node : context.getEventsForPattern("first")) {
count += node.get("value").asInt();
}
return Integer.compare(count, 5) > 0;
} else if (criterias.getCriteriaType().equals("lessThen")) {
if (!jsonNodes.get(criterias.getPropName()).equals(criterias.getCriteriaValue())) {
return false;
}
int count = 0;
for (ObjectNode node : context.getEventsForPattern("first")) {
count += node.get("value").asInt();
}
return Integer.compare(count, 5) < 0;
}
}
return false;
}
}).times(rules.getRuleCount());
return alarmPattern;
}
}
package-util;
导入org.apache.flink.cep.pattern.pattern;
导入org.apache.flink.cep.pattern.conditions.IterativeCondition;
导入org.apache.flink.streaming.api.datastream.datastream;
导入com.fasterxml.jackson.databind.node.ObjectNode;
公共类报警模式{
公共模式getAlarmPattern(规则){
//MySimpleConditions条件=新的MySimpleConditions();
Pattern alarmPattern=Pattern.begin(“第一个”)
.where(新的迭代条件(){
@凌驾
公共布尔筛选器(ObjectNode jsonNodes,上下文)引发异常{
for(Criterias Criterias:rules.getCriteriaList()){
if(criterias.getCriteriaType().equals(“equals”)){
返回jsonNodes.get(criterias.getPropName()).equals(criterias.getCriteriaValue());
}else if(criterias.getCriteriaType().equals(“greaterThen”)){
如果(!jsonNodes.get(criterias.getPropName()).equals(criterias.getCriteriaValue()){
返回false;
}
整数计数=0;
对于(ObjectNode节点:context.getEventsForPattern(“第一”)){
count+=node.get(“value”).asInt();
}
返回整数。比较(计数,5)>0;
}else if(criterias.getCriteriaType().equals(“lessThen”)){
如果(!jsonNodes.get(criterias.getPropName()).equals(criterias.getCriteriaValue()){
返回false;
}
整数计数=