Java 在apache Flink中面对Flink连接流中的竞争条件

Java 在apache Flink中面对Flink连接流中的竞争条件,java,concurrency,apache-flink,java.util.concurrent,flink-cep,Java,Concurrency,Apache Flink,Java.util.concurrent,Flink Cep,在flink中实现过程函数时面临竞争条件 连接的流。我有两个共享的缓存映射 正在并行调用的函数processElement1和processElement2 通过两条不同的线 流1-->(发送报价数据) Streams2-->(发送lms(忠诚管理系统数据)) 在TriggerStream类中我使用唯一Id:MemberId作为unique键存储数据,以在缓存中存储查找数据。当数据流入时,我没有得到任何结果 class LRUConcurrentCache<K,V>{ pri

在flink中实现过程函数时面临竞争条件 连接的流。我有两个共享的
缓存映射
正在并行调用的函数
processElement1
processElement2
通过两条不同的线

流1
-->(发送报价数据)

Streams2
-->(发送lms(忠诚管理系统数据))

TriggerStream类中
我使用唯一Id:
MemberId
作为
unique键
存储数据,以在缓存中存储查找数据。当数据流入时,我没有得到任何结果

class LRUConcurrentCache<K,V>{
    private final Map<K,V> cache;
    private final int maxEntries;
    public LRUConcurrentCache(final int maxEntries) {
        this.cache = new LinkedHashMap<K,V>(maxEntries, 0.75F, true) {
            private static final long serialVersionUID = -1236481390177598762L;
            @Override
            protected boolean removeEldestEntry(Map.Entry<K,V> eldest){
                return size() > maxEntries;
            }
        };
    }
    //Why we cant lock on the key
    public void put(K key, V value) {
        synchronized(key) {
            cache.put(key, value);
        }
    }

    //get methode
    public V get(K key) {
        synchronized(key) {
            return cache.get(key);
        }
    }



public class TriggerStream extends CoProcessFunction<IOffer, LMSData, String> {


    private static final long serialVersionUID = 1L;
    LRUCache cache; 
    private String offerNode;
    String updatedValue, retrivedValue;
    Subscriber subscriber;

    TriggerStream(){
        this.cache== new LRUCache(10);
    }



@Override
    public void processElement1(IOffer offer) throws Exception {
        try {
            ObjectMapper mapper = new ObjectMapper();
            mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
            mapper.enableDefaultTyping();
            // TODO Auto-generated method stub
            IOffer latestOffer = offer;

            //Check the subscriber is there or not

            retrivedValue = cache.get(latestOffer.getMemberId().toString());
            if ((retrivedValue == null)) {
                //Subscriber is the class that is used and converted as Json String & then store into map
                Subscriber subscriber = new Subscriber();
                subscriber.setMemberId(latestOffer.getMemberId());
                ArrayList<IOffer> offerList = new ArrayList<IOffer>();
                offerList.add(latestOffer);
                subscriber.setOffers(offerList);
                updatedValue = mapper.writeValueAsString(subscriber);
                cache.set(subscriber.getMemberId().toString(), updatedValue);
            } else {
                Subscriber subscriber = mapper.readValue(retrivedValue, Subscriber.class);
                List<IOffer> offers = subscriber.getOffers();
                offers.add(latestOffer);
                updatedValue= mapper.writeValueAsString(subscriber);
                cache.set(subscriber.getMemberId().toString(), subscriberUpdatedValue);
            }
        } catch (Exception pb) {
            applicationlogger.error("Exception in Offer Loading:"+pb);
            applicationlogger.debug("*************************FINISHED OFFER LOADING*******************************");
        }
        applicationlogger.debug("*************************FINISHED OFFER LOADING*******************************");

    }

@Override
    public void processElement2(LMSData lms) throws Exception {
        try {
            ObjectMapper mapper = new ObjectMapper();
            mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
            mapper.enableDefaultTyping();
            // TODO Auto-generated method stub

            //Check the subscriber is there or not

            retrivedValue = cache.get(lms.getMemberId().toString());
            if(retrivedValue !=null){
                Subscriber subscriber = mapper.readValue(retrivedValue, Subscriber.class);
                //do some calculations 
                String updatedValue = mapper.writeValueAsString(subscriber);
                //Update value
                cache.set(subscriber.getMemberId().toString(), updatedValue);
            }

        } catch (Exception pb) {
            applicationlogger.error("Exception in Offer Loading:"+pb);
            applicationlogger.debug("*************************FINISHED OFFER LOADING*******************************");
        }
        applicationlogger.debug("*************************FINISHED OFFER LOADING*******************************");

    }

}   
类LRUConcurrentCache{
私有最终地图缓存;
私人最终int maxEntries;
公共LRUConcurrentCache(最终整数maxEntries){
this.cache=newlinkedhashmap(maxEntries,0.75F,true){
私有静态最终长serialVersionUID=-1236481390177598762L;
@凌驾
受保护的布尔重构(Map.Entry最早){
返回size()>maxEntries;
}
};
}
//为什么我们不能锁上钥匙
公开作废认沽权(K键,V值){
已同步(键){
cache.put(键、值);
}
}
//获取方法
公共V get(K键){
已同步(键){
返回cache.get(key);
}
}
公共类TriggerStream扩展了协处理函数{
私有静态最终长serialVersionUID=1L;
LRUCache缓存;
节点的私有字符串;
字符串updatedValue、RetriedValue;
认购人;
TriggerStream(){
this.cache==新的LRUCache(10);
}
@凌驾
public void processElement1(IOffer offer)引发异常{
试一试{
ObjectMapper mapper=新的ObjectMapper();
configure(SerializationFeature.FAIL\u ON\u EMPTY\u bean,false);
mapper.enableDefaultTyping();
//TODO自动生成的方法存根
IOffer latestOffer=报价;
//检查订户是否存在
retrievedvalue=cache.get(latestOffer.getMemberId().toString());
如果((检索值==null)){
//Subscriber是使用并转换为Json字符串,然后存储到map中的类
订户=新订户();
subscriber.setMemberId(latestOffer.getMemberId());
ArrayList offerList=新的ArrayList();
offerList.add(latestOffer);
订户.setOffers(offerList);
updatedValue=mapper.writeValueAsString(订阅服务器);
cache.set(subscriber.getMemberId().toString(),updatedValue);
}否则{
订阅者订阅者=mapper.readValue(retrivedValue,Subscriber.class);
List offers=subscriber.getOffers();
报价。添加(最新报价);
updatedValue=mapper.writeValueAsString(订阅服务器);
set(subscriber.getMemberId().toString(),subscriberUpdatedValue);
}
}捕获(异常pb){
applicationlogger.error(“要约加载异常:”+pb);
applicationlogger.debug(“*******************************已完成报价加载*************************************”);
}
applicationlogger.debug(“*******************************已完成报价加载*************************************”);
}
@凌驾
public void processElement2(LMSData lms)引发异常{
试一试{
ObjectMapper mapper=新的ObjectMapper();
configure(SerializationFeature.FAIL\u ON\u EMPTY\u bean,false);
mapper.enableDefaultTyping();
//TODO自动生成的方法存根
//检查订户是否存在
RetrievedValue=cache.get(lms.getMemberId().toString());
if(检索值!=null){
订阅者订阅者=mapper.readValue(retrivedValue,Subscriber.class);
//做一些计算
String updatedValue=mapper.writeValueAsString(订阅服务器);
//更新值
cache.set(subscriber.getMemberId().toString(),updatedValue);
}
}捕获(异常pb){
applicationlogger.error(“要约加载异常:”+pb);
applicationlogger.debug(“*******************************已完成报价加载*************************************”);
}
applicationlogger.debug(“*******************************已完成报价加载*************************************”);
}
}   

Flink不保证协处理函数(或任何其他协处理函数)接收数据的顺序。在分布式并行任务中维护某种确定性顺序的成本太高


相反,您必须在函数中使用state和可能的计时器来解决这个问题。函数中的
LRUCache
应该保持为state(可能)。否则,它将在失败时丢失。您可以为第一个流和缓冲区记录添加另一个状态,直到第二个流的查找值到达。

是的,我正在couchbase中维护LRUCache状态。因此状态不会丢失。请执行相同的操作您可以为第一个流和缓冲区记录添加另一个状态,直到第二个流的查找值到达。我建议将该状态存储在Flink中。否则,每次查找和/或更新都会有远程调用。此外,如果出现故障,外部状态不会重置,而ch意味着您无法实现一次状态一致性。
class LRUConcurrentCache<K,V>{
    private final Map<K,V> cache;
    private final int maxEntries;
    public LRUConcurrentCache(final int maxEntries) {
        this.cache = new LinkedHashMap<K,V>(maxEntries, 0.75F, true) {
            private static final long serialVersionUID = -1236481390177598762L;
            @Override
            protected boolean removeEldestEntry(Map.Entry<K,V> eldest){
                return size() > maxEntries;
            }
        };
    }
    //Why we cant lock on the key
    public void put(K key, V value) {
        synchronized(key) {
            cache.put(key, value);
        }
    }

    //get methode
    public V get(K key) {
        synchronized(key) {
            return cache.get(key);
        }
    }



public class TriggerStream extends CoProcessFunction<IOffer, LMSData, String> {


    private static final long serialVersionUID = 1L;
    LRUCache cache; 
    private String offerNode;
    String updatedValue, retrivedValue;
    Subscriber subscriber;

    TriggerStream(){
        this.cache== new LRUCache(10);
    }



@Override
    public void processElement1(IOffer offer) throws Exception {
        try {
            ObjectMapper mapper = new ObjectMapper();
            mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
            mapper.enableDefaultTyping();
            // TODO Auto-generated method stub
            IOffer latestOffer = offer;

            //Check the subscriber is there or not

            retrivedValue = cache.get(latestOffer.getMemberId().toString());
            if ((retrivedValue == null)) {
                //Subscriber is the class that is used and converted as Json String & then store into map
                Subscriber subscriber = new Subscriber();
                subscriber.setMemberId(latestOffer.getMemberId());
                ArrayList<IOffer> offerList = new ArrayList<IOffer>();
                offerList.add(latestOffer);
                subscriber.setOffers(offerList);
                updatedValue = mapper.writeValueAsString(subscriber);
                cache.set(subscriber.getMemberId().toString(), updatedValue);
            } else {
                Subscriber subscriber = mapper.readValue(retrivedValue, Subscriber.class);
                List<IOffer> offers = subscriber.getOffers();
                offers.add(latestOffer);
                updatedValue= mapper.writeValueAsString(subscriber);
                cache.set(subscriber.getMemberId().toString(), subscriberUpdatedValue);
            }
        } catch (Exception pb) {
            applicationlogger.error("Exception in Offer Loading:"+pb);
            applicationlogger.debug("*************************FINISHED OFFER LOADING*******************************");
        }
        applicationlogger.debug("*************************FINISHED OFFER LOADING*******************************");

    }

@Override
    public void processElement2(LMSData lms) throws Exception {
        try {
            ObjectMapper mapper = new ObjectMapper();
            mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
            mapper.enableDefaultTyping();
            // TODO Auto-generated method stub

            //Check the subscriber is there or not

            retrivedValue = cache.get(lms.getMemberId().toString());
            if(retrivedValue !=null){
                Subscriber subscriber = mapper.readValue(retrivedValue, Subscriber.class);
                //do some calculations 
                String updatedValue = mapper.writeValueAsString(subscriber);
                //Update value
                cache.set(subscriber.getMemberId().toString(), updatedValue);
            }

        } catch (Exception pb) {
            applicationlogger.error("Exception in Offer Loading:"+pb);
            applicationlogger.debug("*************************FINISHED OFFER LOADING*******************************");
        }
        applicationlogger.debug("*************************FINISHED OFFER LOADING*******************************");

    }

}